diff --git a/src/cnn.c b/src/cnn.c index e587a1b7..10705fd3 100644 --- a/src/cnn.c +++ b/src/cnn.c @@ -105,7 +105,7 @@ void train_detection_net(char *cfgfile) time=clock(); float loss = train_network(net, train); avg_loss = avg_loss*.9 + loss*.1; - printf("%d: %f, %f avg, %lf seconds, %d images\n", i, loss, avg_loss, sec(clock()-time), i*imgs*net.batch); + printf("%d: %f, %f avg, %lf seconds, %d images\n", i, loss, avg_loss, sec(clock()-time), i*imgs); if(i%100==0){ char buff[256]; sprintf(buff, "/home/pjreddie/imagenet_backup/detnet_%d.cfg", i); @@ -213,7 +213,7 @@ void train_imagenet(char *cfgfile) set_learning_network(&net, net.learning_rate, 0, net.decay); printf("Learning Rate: %g, Momentum: %g, Decay: %g\n", net.learning_rate, net.momentum, net.decay); int imgs = 1024; - int i = 77700; + int i = 0; char **labels = get_labels("/home/pjreddie/data/imagenet/cls.labels.list"); list *plist = get_paths("/data/imagenet/cls.train.list"); char **paths = (char **)list_to_array(plist); @@ -240,7 +240,7 @@ void train_imagenet(char *cfgfile) free_data(train); if(i%100==0){ char buff[256]; - sprintf(buff, "/home/pjreddie/imagenet_backup/net_%d.cfg", i); + sprintf(buff, "/home/pjreddie/imagenet_backup/alexnet_%d.cfg", i); save_network(net, buff); } } diff --git a/src/network.c b/src/network.c index 5c5ce9d2..641d7824 100644 --- a/src/network.c +++ b/src/network.c @@ -15,6 +15,35 @@ #include "softmax_layer.h" #include "dropout_layer.h" +char *get_layer_string(LAYER_TYPE a) +{ + switch(a){ + case CONVOLUTIONAL: + return "convolutional"; + case CONNECTED: + return "connected"; + case MAXPOOL: + return "maxpool"; + case SOFTMAX: + return "softmax"; + case NORMALIZATION: + return "normalization"; + case DROPOUT: + return "dropout"; + case FREEWEIGHT: + return "freeweight"; + case CROP: + return "crop"; + case COST: + return "cost"; + default: + break; + } + return "none"; +} + + + network make_network(int n, int batch) { network net; diff --git a/src/network.h b/src/network.h index 7a401bdf..c6c77901 100644 --- a/src/network.h +++ b/src/network.h @@ -41,6 +41,7 @@ float *network_predict_gpu(network net, float *input); #endif void compare_networks(network n1, network n2, data d); +char *get_layer_string(LAYER_TYPE a); network make_network(int n, int batch); void forward_network(network net, float *input, float *truth, int train); diff --git a/src/network_gpu.c b/src/network_gpu.c index b53d534e..c958056b 100644 --- a/src/network_gpu.c +++ b/src/network_gpu.c @@ -24,7 +24,7 @@ void forward_network_gpu(network net, cl_mem input, cl_mem truth, int train) { int i; for(i = 0; i < net.n; ++i){ - clock_t time = clock(); + //clock_t time = clock(); if(net.types[i] == CONVOLUTIONAL){ convolutional_layer layer = *(convolutional_layer *)net.layers[i]; forward_convolutional_layer_gpu(layer, input); @@ -61,7 +61,7 @@ void forward_network_gpu(network net, cl_mem input, cl_mem truth, int train) input = layer.output_cl; } check_error(cl); - //printf("Forw %d %f\n", i, sec(clock() - time)); + //printf("Forward %d %s %f\n", i, get_layer_string(net.types[i]), sec(clock() - time)); } } @@ -71,7 +71,7 @@ void backward_network_gpu(network net, cl_mem input) cl_mem prev_input; cl_mem prev_delta; for(i = net.n-1; i >= 0; --i){ - clock_t time = clock(); + //clock_t time = clock(); if(i == 0){ prev_input = input; prev_delta = 0; @@ -104,7 +104,7 @@ void backward_network_gpu(network net, cl_mem input) backward_softmax_layer_gpu(layer, prev_delta); } check_error(cl); - //printf("Back %d %f\n", i, sec(clock() - time)); + //printf("Backward %d %s %f\n", i, get_layer_string(net.types[i]), sec(clock() - time)); } }