99 problems

This commit is contained in:
Joseph Redmon 2014-12-18 15:46:45 -08:00
parent 19d3ae7267
commit f88baf4a3a
8 changed files with 34 additions and 13 deletions

View File

@ -27,7 +27,7 @@ LDFLAGS+= -lOpenCL
endif
endif
CFLAGS= $(COMMON) $(OPTS)
#CFLAGS= $(COMMON) -O0 -g
CFLAGS= $(COMMON) -O0 -g
LDFLAGS+=`pkg-config --libs opencv` -lm -pthread
VPATH=./src/
EXEC=cnn

View File

@ -380,22 +380,24 @@ void test_nist(char *path)
void train_nist(char *cfgfile)
{
srand(222222);
srand(time(0));
network net = parse_network_cfg(cfgfile);
// srand(time(0));
data train = load_categorical_data_csv("data/mnist/mnist_train.csv", 0, 10);
data test = load_categorical_data_csv("data/mnist/mnist_test.csv",0,10);
normalize_data_rows(train);
normalize_data_rows(test);
network net = parse_network_cfg(cfgfile);
int count = 0;
int iters = 60000/net.batch + 1;
while(++count <= 10){
clock_t start = clock(), end;
normalize_data_rows(train);
normalize_data_rows(test);
float loss = train_network_sgd(net, train, iters);
end = clock();
float test_acc = 0;
//if(count%1 == 0) test_acc = network_accuracy(net, test);
if(count%1 == 0) test_acc = network_accuracy(net, test);
end = clock();
printf("%d: Loss: %f, Test Acc: %f, Time: %lf seconds\n", count, loss, test_acc,(float)(end-start)/CLOCKS_PER_SEC);
}
free_data(train);
free_data(test);
char buff[256];
sprintf(buff, "%s.trained", cfgfile);
save_network(net, buff);

View File

@ -10,9 +10,11 @@ dropout_layer *make_dropout_layer(int batch, int inputs, float probability)
layer->probability = probability;
layer->inputs = inputs;
layer->batch = batch;
layer->output = calloc(inputs*batch, sizeof(float));
layer->rand = calloc(inputs*batch, sizeof(float));
layer->scale = 1./(1.-probability);
#ifdef GPU
layer->output_cl = cl_make_array(layer->output, inputs*batch);
layer->rand_cl = cl_make_array(layer->rand, inputs*batch);
#endif
return layer;
@ -24,14 +26,15 @@ void forward_dropout_layer(dropout_layer layer, float *input)
for(i = 0; i < layer.batch * layer.inputs; ++i){
float r = rand_uniform();
layer.rand[i] = r;
if(r < layer.probability) input[i] = 0;
else input[i] *= layer.scale;
if(r < layer.probability) layer.output[i] = 0;
else layer.output[i] = input[i]*layer.scale;
}
}
void backward_dropout_layer(dropout_layer layer, float *delta)
{
int i;
if(!delta) return;
for(i = 0; i < layer.batch * layer.inputs; ++i){
float r = layer.rand[i];
if(r < layer.probability) delta[i] = 0;
@ -66,6 +69,7 @@ void forward_dropout_layer_gpu(dropout_layer layer, cl_mem input)
cl.error = clSetKernelArg(kernel, i++, sizeof(layer.rand_cl), (void*) &layer.rand_cl);
cl.error = clSetKernelArg(kernel, i++, sizeof(layer.probability), (void*) &layer.probability);
cl.error = clSetKernelArg(kernel, i++, sizeof(layer.scale), (void*) &layer.scale);
cl.error = clSetKernelArg(kernel, i++, sizeof(layer.output_cl), (void*) &layer.output_cl);
check_error(cl);
const size_t global_size[] = {size};
@ -86,6 +90,7 @@ void backward_dropout_layer_gpu(dropout_layer layer, cl_mem delta)
cl.error = clSetKernelArg(kernel, i++, sizeof(layer.rand_cl), (void*) &layer.rand_cl);
cl.error = clSetKernelArg(kernel, i++, sizeof(layer.probability), (void*) &layer.probability);
cl.error = clSetKernelArg(kernel, i++, sizeof(layer.scale), (void*) &layer.scale);
cl.error = clSetKernelArg(kernel, i++, sizeof(delta), (void*) &delta);
check_error(cl);
const size_t global_size[] = {size};

View File

@ -1,5 +1,5 @@
__kernel void yoloswag420blazeit360noscope(__global float *input, __global float *rand, float prob, float scale)
__kernel void yoloswag420blazeit360noscope(__global float *input, __global float *rand, float prob, float scale, __global float *output)
{
int id = get_global_id(0);
input[id] = (rand[id] < prob) ? 0 : input[id]*scale;
output[id] = (rand[id] < prob) ? 0 : input[id]*scale;
}

View File

@ -8,8 +8,10 @@ typedef struct{
float probability;
float scale;
float *rand;
float *output;
#ifdef GPU
cl_mem rand_cl;
cl_mem output_cl;
#endif
} dropout_layer;

View File

@ -74,6 +74,7 @@ void forward_network(network net, float *input, float *truth, int train)
if(!train) continue;
dropout_layer layer = *(dropout_layer *)net.layers[i];
forward_dropout_layer(layer, input);
input = layer.output;
}
else if(net.types[i] == FREEWEIGHT){
if(!train) continue;
@ -119,7 +120,8 @@ float *get_network_output_layer(network net, int i)
softmax_layer layer = *(softmax_layer *)net.layers[i];
return layer.output;
} else if(net.types[i] == DROPOUT){
return get_network_output_layer(net, i-1);
dropout_layer layer = *(dropout_layer *)net.layers[i];
return layer.output;
} else if(net.types[i] == FREEWEIGHT){
return get_network_output_layer(net, i-1);
} else if(net.types[i] == CONNECTED){
@ -153,6 +155,7 @@ float *get_network_delta_layer(network net, int i)
softmax_layer layer = *(softmax_layer *)net.layers[i];
return layer.delta;
} else if(net.types[i] == DROPOUT){
if(i == 0) return 0;
return get_network_delta_layer(net, i-1);
} else if(net.types[i] == FREEWEIGHT){
return get_network_delta_layer(net, i-1);

View File

@ -52,6 +52,7 @@ void forward_network_gpu(network net, cl_mem input, cl_mem truth, int train)
if(!train) continue;
dropout_layer layer = *(dropout_layer *)net.layers[i];
forward_dropout_layer_gpu(layer, input);
input = layer.output_cl;
}
else if(net.types[i] == CROP){
crop_layer layer = *(crop_layer *)net.layers[i];
@ -138,7 +139,8 @@ cl_mem get_network_output_cl_layer(network net, int i)
softmax_layer layer = *(softmax_layer *)net.layers[i];
return layer.output_cl;
} else if(net.types[i] == DROPOUT){
return get_network_output_cl_layer(net, i-1);
dropout_layer layer = *(dropout_layer *)net.layers[i];
return layer.output_cl;
}
return 0;
}
@ -161,6 +163,7 @@ cl_mem get_network_delta_cl_layer(network net, int i)
softmax_layer layer = *(softmax_layer *)net.layers[i];
return layer.delta_cl;
} else if(net.types[i] == DROPOUT){
if(i == 0) return 0;
return get_network_delta_cl_layer(net, i-1);
}
return 0;

View File

@ -245,6 +245,12 @@ dropout_layer *parse_dropout(list *options, network *net, int count)
if(count == 0){
net->batch = option_find_int(options, "batch",1);
input = option_find_int(options, "input",1);
float learning_rate = option_find_float(options, "learning_rate", .001);
float momentum = option_find_float(options, "momentum", .9);
float decay = option_find_float(options, "decay", .0001);
net->learning_rate = learning_rate;
net->momentum = momentum;
net->decay = decay;
}else{
input = get_network_output_size_layer(*net, count-1);
}