mirror of
https://github.com/pjreddie/darknet.git
synced 2023-08-10 21:13:14 +03:00
crop layer scaling and trans on cpu
This commit is contained in:
parent
d97331b88f
commit
47528e37cf
@ -1,6 +1,7 @@
|
|||||||
#include "cuda.h"
|
|
||||||
#ifndef ACTIVATIONS_H
|
#ifndef ACTIVATIONS_H
|
||||||
#define ACTIVATIONS_H
|
#define ACTIVATIONS_H
|
||||||
|
#include "cuda.h"
|
||||||
|
#include "math.h"
|
||||||
|
|
||||||
typedef enum{
|
typedef enum{
|
||||||
LOGISTIC, RELU, RELIE, LINEAR, RAMP, TANH, PLSE
|
LOGISTIC, RELU, RELIE, LINEAR, RAMP, TANH, PLSE
|
||||||
|
@ -38,15 +38,6 @@ cost_layer *make_cost_layer(int batch, int inputs, COST_TYPE type)
|
|||||||
return layer;
|
return layer;
|
||||||
}
|
}
|
||||||
|
|
||||||
void pull_cost_layer(cost_layer layer)
|
|
||||||
{
|
|
||||||
cuda_pull_array(layer.delta_gpu, layer.delta, layer.batch*layer.inputs);
|
|
||||||
}
|
|
||||||
void push_cost_layer(cost_layer layer)
|
|
||||||
{
|
|
||||||
cuda_push_array(layer.delta_gpu, layer.delta, layer.batch*layer.inputs);
|
|
||||||
}
|
|
||||||
|
|
||||||
void forward_cost_layer(cost_layer layer, network_state state)
|
void forward_cost_layer(cost_layer layer, network_state state)
|
||||||
{
|
{
|
||||||
if (!state.truth) return;
|
if (!state.truth) return;
|
||||||
@ -63,6 +54,16 @@ void backward_cost_layer(const cost_layer layer, network_state state)
|
|||||||
|
|
||||||
#ifdef GPU
|
#ifdef GPU
|
||||||
|
|
||||||
|
void pull_cost_layer(cost_layer layer)
|
||||||
|
{
|
||||||
|
cuda_pull_array(layer.delta_gpu, layer.delta, layer.batch*layer.inputs);
|
||||||
|
}
|
||||||
|
|
||||||
|
void push_cost_layer(cost_layer layer)
|
||||||
|
{
|
||||||
|
cuda_push_array(layer.delta_gpu, layer.delta, layer.batch*layer.inputs);
|
||||||
|
}
|
||||||
|
|
||||||
void forward_cost_layer_gpu(cost_layer layer, network_state state)
|
void forward_cost_layer_gpu(cost_layer layer, network_state state)
|
||||||
{
|
{
|
||||||
if (!state.truth) return;
|
if (!state.truth) return;
|
||||||
|
@ -37,6 +37,8 @@ void forward_crop_layer(const crop_layer layer, network_state state)
|
|||||||
int flip = (layer.flip && rand()%2);
|
int flip = (layer.flip && rand()%2);
|
||||||
int dh = rand()%(layer.h - layer.crop_height + 1);
|
int dh = rand()%(layer.h - layer.crop_height + 1);
|
||||||
int dw = rand()%(layer.w - layer.crop_width + 1);
|
int dw = rand()%(layer.w - layer.crop_width + 1);
|
||||||
|
float scale = 2;
|
||||||
|
float trans = -1;
|
||||||
if(!state.train){
|
if(!state.train){
|
||||||
flip = 0;
|
flip = 0;
|
||||||
dh = (layer.h - layer.crop_height)/2;
|
dh = (layer.h - layer.crop_height)/2;
|
||||||
@ -53,7 +55,7 @@ void forward_crop_layer(const crop_layer layer, network_state state)
|
|||||||
}
|
}
|
||||||
row = i + dh;
|
row = i + dh;
|
||||||
index = col+layer.w*(row+layer.h*(c + layer.c*b));
|
index = col+layer.w*(row+layer.h*(c + layer.c*b));
|
||||||
layer.output[count++] = state.input[index];
|
layer.output[count++] = state.input[index]*scale + trans;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -203,8 +203,6 @@ void test_detection(char *cfgfile, char *weightfile)
|
|||||||
fgets(filename, 256, stdin);
|
fgets(filename, 256, stdin);
|
||||||
strtok(filename, "\n");
|
strtok(filename, "\n");
|
||||||
image im = load_image_color(filename, im_size, im_size);
|
image im = load_image_color(filename, im_size, im_size);
|
||||||
translate_image(im, -128);
|
|
||||||
scale_image(im, 1/128.);
|
|
||||||
printf("%d %d %d\n", im.h, im.w, im.c);
|
printf("%d %d %d\n", im.h, im.w, im.c);
|
||||||
float *X = im.data;
|
float *X = im.data;
|
||||||
time=clock();
|
time=clock();
|
||||||
|
Loading…
x
Reference in New Issue
Block a user