Network resize is fixed

This commit is contained in:
AlexeyAB
2019-01-18 23:30:10 +03:00
parent 0e022d0912
commit 6e99e852ff
2 changed files with 25 additions and 4 deletions

View File

@ -178,8 +178,13 @@ void train_detector(char *datacfg, char *cfgfile, char *weightfile, int *gpus, i
//int dim_h = (random_val + (init_h / 32 - 5)) * 32; // +-160
float random_val = rand_scale(1.4); // *x or /x
int dim_w = roundl(random_val*init_w / 32) * 32;
int dim_h = roundl(random_val*init_h / 32) * 32;
int dim_w = roundl(random_val*init_w / 32 + 1) * 32;
int dim_h = roundl(random_val*init_h / 32 + 1) * 32;
if (get_current_batch(net) == 0) {
dim_w = roundl(1.4*init_w / 32 + 1) * 32;
dim_h = roundl(1.4*init_h / 32 + 1) * 32;
}
if (dim_w < 32) dim_w = 32;
if (dim_h < 32) dim_h = 32;

View File

@ -77,10 +77,26 @@ void resize_yolo_layer(layer *l, int w, int h)
l->outputs = h*w*l->n*(l->classes + 4 + 1);
l->inputs = l->outputs;
l->output = realloc(l->output, l->batch*l->outputs*sizeof(float));
l->delta = realloc(l->delta, l->batch*l->outputs*sizeof(float));
if (!l->output_pinned) l->output = realloc(l->output, l->batch*l->outputs * sizeof(float));
if (!l->delta_pinned) l->delta = realloc(l->delta, l->batch*l->outputs*sizeof(float));
#ifdef GPU
if (l->output_pinned) {
cudaFreeHost(l->output);
if (cudaSuccess != cudaHostAlloc(&l->output, l->batch*l->outputs * sizeof(float), cudaHostRegisterMapped)) {
l->output = realloc(l->output, l->batch*l->outputs * sizeof(float));
l->output_pinned = 0;
}
}
if (l->delta_pinned) {
cudaFreeHost(l->delta);
if (cudaSuccess != cudaHostAlloc(&l->delta, l->batch*l->outputs * sizeof(float), cudaHostRegisterMapped)) {
l->delta = realloc(l->delta, l->batch*l->outputs * sizeof(float));
l->delta_pinned = 0;
}
}
cuda_free(l->delta_gpu);
cuda_free(l->output_gpu);