darknet/src/tests.c

350 lines
11 KiB
C
Raw Normal View History

2013-11-04 23:11:01 +04:00
#include "connected_layer.h"
#include "convolutional_layer.h"
#include "maxpool_layer.h"
#include "network.h"
#include "image.h"
#include "parser.h"
2013-11-13 22:50:38 +04:00
#include "data.h"
#include "matrix.h"
2013-12-03 04:41:40 +04:00
#include "utils.h"
2013-11-04 23:11:01 +04:00
#include <time.h>
#include <stdlib.h>
#include <stdio.h>
void test_convolve()
{
image dog = load_image("dog.jpg");
printf("dog channels %d\n", dog.c);
image kernel = make_random_image(3,3,dog.c);
image edge = make_image(dog.h, dog.w, 1);
int i;
clock_t start = clock(), end;
for(i = 0; i < 1000; ++i){
2013-12-03 04:41:40 +04:00
convolve(dog, kernel, 1, 0, edge, 1);
2013-11-04 23:11:01 +04:00
}
end = clock();
printf("Convolutions: %lf seconds\n", (double)(end-start)/CLOCKS_PER_SEC);
show_image_layers(edge, "Test Convolve");
}
void test_color()
{
image dog = load_image("test_color.png");
show_image_layers(dog, "Test Color");
}
void test_convolutional_layer()
{
srand(0);
image dog = load_image("dog.jpg");
2013-11-04 23:11:01 +04:00
int i;
int n = 3;
2013-11-04 23:11:01 +04:00
int stride = 1;
int size = 3;
2013-11-13 22:50:38 +04:00
convolutional_layer layer = *make_convolutional_layer(dog.h, dog.w, dog.c, n, size, stride, RELU);
2013-11-04 23:11:01 +04:00
char buff[256];
for(i = 0; i < n; ++i) {
sprintf(buff, "Kernel %d", i);
show_image(layer.kernels[i], buff);
}
2013-11-13 22:50:38 +04:00
forward_convolutional_layer(layer, dog.data);
2013-11-04 23:11:01 +04:00
2013-11-13 22:50:38 +04:00
image output = get_convolutional_image(layer);
maxpool_layer mlayer = *make_maxpool_layer(output.h, output.w, output.c, 2);
forward_maxpool_layer(mlayer, layer.output);
2013-11-04 23:11:01 +04:00
2013-11-13 22:50:38 +04:00
show_image_layers(get_maxpool_image(mlayer), "Test Maxpool Layer");
2013-11-04 23:11:01 +04:00
}
2013-12-03 04:41:40 +04:00
void verify_convolutional_layer()
{
srand(0);
int i;
int n = 1;
int stride = 1;
int size = 3;
double eps = .00000001;
image test = make_random_image(5,5, 1);
convolutional_layer layer = *make_convolutional_layer(test.h,test.w,test.c, n, size, stride, RELU);
image out = get_convolutional_image(layer);
double **jacobian = calloc(test.h*test.w*test.c, sizeof(double));
forward_convolutional_layer(layer, test.data);
image base = copy_image(out);
for(i = 0; i < test.h*test.w*test.c; ++i){
test.data[i] += eps;
forward_convolutional_layer(layer, test.data);
image partial = copy_image(out);
subtract_image(partial, base);
scale_image(partial, 1/eps);
jacobian[i] = partial.data;
test.data[i] -= eps;
}
double **jacobian2 = calloc(out.h*out.w*out.c, sizeof(double));
image in_delta = make_image(test.h, test.w, test.c);
image out_delta = get_convolutional_delta(layer);
for(i = 0; i < out.h*out.w*out.c; ++i){
out_delta.data[i] = 1;
2013-12-06 01:17:16 +04:00
backward_convolutional_layer(layer, test.data, in_delta.data);
2013-12-03 04:41:40 +04:00
image partial = copy_image(in_delta);
jacobian2[i] = partial.data;
out_delta.data[i] = 0;
}
int j;
double *j1 = calloc(test.h*test.w*test.c*out.h*out.w*out.c, sizeof(double));
double *j2 = calloc(test.h*test.w*test.c*out.h*out.w*out.c, sizeof(double));
for(i = 0; i < test.h*test.w*test.c; ++i){
for(j =0 ; j < out.h*out.w*out.c; ++j){
j1[i*out.h*out.w*out.c + j] = jacobian[i][j];
j2[i*out.h*out.w*out.c + j] = jacobian2[j][i];
printf("%f %f\n", jacobian[i][j], jacobian2[j][i]);
}
}
image mj1 = double_to_image(test.w*test.h*test.c, out.w*out.h*out.c, 1, j1);
image mj2 = double_to_image(test.w*test.h*test.c, out.w*out.h*out.c, 1, j2);
printf("%f %f\n", avg_image_layer(mj1,0), avg_image_layer(mj2,0));
show_image(mj1, "forward jacobian");
show_image(mj2, "backward jacobian");
}
2013-11-04 23:11:01 +04:00
void test_load()
{
image dog = load_image("dog.jpg");
show_image(dog, "Test Load");
show_image_layers(dog, "Test Load");
}
void test_upsample()
{
image dog = load_image("dog.jpg");
int n = 3;
image up = make_image(n*dog.h, n*dog.w, dog.c);
upsample_image(dog, n, up);
show_image(up, "Test Upsample");
show_image_layers(up, "Test Upsample");
}
void test_rotate()
{
int i;
image dog = load_image("dog.jpg");
clock_t start = clock(), end;
for(i = 0; i < 1001; ++i){
rotate_image(dog);
}
end = clock();
printf("Rotations: %lf seconds\n", (double)(end-start)/CLOCKS_PER_SEC);
show_image(dog, "Test Rotate");
image random = make_random_image(3,3,3);
show_image(random, "Test Rotate Random");
rotate_image(random);
show_image(random, "Test Rotate Random");
rotate_image(random);
show_image(random, "Test Rotate Random");
}
2013-11-13 22:50:38 +04:00
void test_parser()
{
2013-11-13 22:50:38 +04:00
network net = parse_network_cfg("test_parser.cfg");
double input[1];
int count = 0;
double avgerr = 0;
2013-12-06 01:17:16 +04:00
while(++count < 100000000){
double v = ((double)rand()/RAND_MAX);
double truth = v*v;
2013-11-13 22:50:38 +04:00
input[0] = v;
forward_network(net, input);
double *out = get_network_output(net);
2013-11-13 22:50:38 +04:00
double *delta = get_network_delta(net);
double err = pow((out[0]-truth),2.);
avgerr = .99 * avgerr + .01 * err;
2013-12-06 01:17:16 +04:00
if(count % 1000000 == 0) printf("%f %f :%f AVG %f \n", truth, out[0], err, avgerr);
2013-11-13 22:50:38 +04:00
delta[0] = truth - out[0];
2013-12-07 01:26:09 +04:00
backward_network(net, input, &truth);
update_network(net, .001,0,0);
}
2013-11-13 22:50:38 +04:00
}
2013-11-13 22:50:38 +04:00
void test_data()
{
2013-12-03 04:41:40 +04:00
char *labels[] = {"cat","dog"};
2013-12-07 01:26:09 +04:00
data train = load_data_image_pathfile_random("train_paths.txt", 101,labels, 2);
free_data(train);
}
2013-12-03 04:41:40 +04:00
void test_full()
{
2013-12-03 04:41:40 +04:00
network net = parse_network_cfg("full.cfg");
2013-11-13 22:50:38 +04:00
srand(0);
2013-12-03 04:41:40 +04:00
int i = 0;
char *labels[] = {"cat","dog"};
while(i++ < 1000 || 1){
2013-12-07 01:26:09 +04:00
data train = load_data_image_pathfile_random("train_paths.txt", 1000, labels, 2);
train_network(net, train, .0005, 0, 0);
free_data(train);
2013-12-03 04:41:40 +04:00
printf("Round %d\n", i);
}
2013-11-13 22:50:38 +04:00
}
2013-12-03 04:41:40 +04:00
void test_nist()
{
2013-12-07 01:26:09 +04:00
srand(444444);
2013-12-03 04:41:40 +04:00
network net = parse_network_cfg("nist.cfg");
2013-12-07 01:26:09 +04:00
data train = load_categorical_data_csv("mnist/mnist_train.csv", 0, 10);
data test = load_categorical_data_csv("mnist/mnist_test.csv",0,10);
normalize_data_rows(train);
normalize_data_rows(test);
randomize_data(train);
2013-12-03 04:41:40 +04:00
int count = 0;
2013-12-06 01:17:16 +04:00
double lr = .0005;
2013-12-07 01:26:09 +04:00
while(++count <= 1){
2013-12-07 21:38:50 +04:00
double acc = train_network_sgd(net, train, 10000, lr, .9, .001);
printf("Training Accuracy: %lf\n", acc);
2013-12-07 01:26:09 +04:00
lr /= 2;
2013-12-03 04:41:40 +04:00
}
2013-12-07 01:26:09 +04:00
double train_acc = network_accuracy(net, train);
fprintf(stderr, "\nTRAIN: %f\n", train_acc);
double test_acc = network_accuracy(net, test);
fprintf(stderr, "TEST: %f\n\n", test_acc);
printf("%d, %f, %f\n", count, train_acc, test_acc);
//end = clock();
2013-12-06 01:17:16 +04:00
//printf("Neural Net Learning: %lf seconds\n", (double)(end-start)/CLOCKS_PER_SEC);
2013-12-03 04:41:40 +04:00
}
2013-12-07 21:38:50 +04:00
void test_ensemble()
{
int i;
srand(888888);
data d = load_categorical_data_csv("mnist/mnist_train.csv", 0, 10);
normalize_data_rows(d);
randomize_data(d);
data test = load_categorical_data_csv("mnist/mnist_test.csv", 0,10);
normalize_data_rows(test);
data train = d;
/*
data *split = split_data(d, 1, 10);
data train = split[0];
data test = split[1];
*/
matrix prediction = make_matrix(test.y.rows, test.y.cols);
int n = 30;
for(i = 0; i < n; ++i){
int count = 0;
double lr = .0005;
network net = parse_network_cfg("nist.cfg");
while(++count <= 5){
double acc = train_network_sgd(net, train, train.X.rows, lr, .9, .001);
printf("Training Accuracy: %lf\n", acc);
lr /= 2;
}
matrix partial = network_predict_data(net, test);
double acc = matrix_accuracy(test.y, partial);
printf("Model Accuracy: %lf\n", acc);
matrix_add_matrix(partial, prediction);
acc = matrix_accuracy(test.y, prediction);
printf("Current Ensemble Accuracy: %lf\n", acc);
free_matrix(partial);
}
double acc = matrix_accuracy(test.y, prediction);
printf("Full Ensemble Accuracy: %lf\n", acc);
}
2013-12-03 04:41:40 +04:00
void test_kernel_update()
{
srand(0);
double delta[] = {.1};
double input[] = {.3, .5, .3, .5, .5, .5, .5, .0, .5};
double kernel[] = {1,2,3,4,5,6,7,8,9};
2013-12-06 01:17:16 +04:00
convolutional_layer layer = *make_convolutional_layer(3, 3, 1, 1, 3, 1, LINEAR);
2013-12-03 04:41:40 +04:00
layer.kernels[0].data = kernel;
layer.delta = delta;
learn_convolutional_layer(layer, input);
print_image(layer.kernels[0]);
print_image(get_convolutional_delta(layer));
print_image(layer.kernel_updates[0]);
2013-12-06 01:17:16 +04:00
2013-12-03 04:41:40 +04:00
}
void test_random_classify()
2013-11-13 22:50:38 +04:00
{
2013-12-03 04:41:40 +04:00
network net = parse_network_cfg("connected.cfg");
2013-11-13 22:50:38 +04:00
matrix m = csv_to_matrix("train.csv");
2013-12-07 01:26:09 +04:00
//matrix ho = hold_out_matrix(&m, 2500);
2013-11-13 22:50:38 +04:00
double *truth = pop_column(&m, 0);
2013-12-07 01:26:09 +04:00
//double *ho_truth = pop_column(&ho, 0);
2013-11-13 22:50:38 +04:00
int i;
clock_t start = clock(), end;
int count = 0;
2013-11-13 22:50:38 +04:00
while(++count <= 300){
for(i = 0; i < m.rows; ++i){
int index = rand()%m.rows;
//image p = double_to_image(1690,1,1,m.vals[index]);
//normalize_image(p);
forward_network(net, m.vals[index]);
double *out = get_network_output(net);
double *delta = get_network_delta(net);
//printf("%f\n", out[0]);
delta[0] = truth[index] - out[0];
2013-12-06 01:17:16 +04:00
// printf("%f\n", delta[0]);
2013-11-13 22:50:38 +04:00
//printf("%f %f\n", truth[index], out[0]);
2013-12-07 01:26:09 +04:00
//backward_network(net, m.vals[index], );
update_network(net, .00001, 0,0);
2013-11-13 22:50:38 +04:00
}
2013-12-06 01:17:16 +04:00
//double test_acc = error_network(net, m, truth);
//double valid_acc = error_network(net, ho, ho_truth);
//printf("%f, %f\n", test_acc, valid_acc);
//fprintf(stderr, "%5d: %f Valid: %f\n",count, test_acc, valid_acc);
2013-11-13 22:50:38 +04:00
//if(valid_acc > .70) break;
}
end = clock();
FILE *fp = fopen("submission/out.txt", "w");
matrix test = csv_to_matrix("test.csv");
truth = pop_column(&test, 0);
for(i = 0; i < test.rows; ++i){
forward_network(net, test.vals[i]);
double *out = get_network_output(net);
2013-11-13 22:50:38 +04:00
if(fabs(out[0]) < .5) fprintf(fp, "0\n");
else fprintf(fp, "1\n");
}
fclose(fp);
printf("Neural Net Learning: %lf seconds\n", (double)(end-start)/CLOCKS_PER_SEC);
}
2013-12-07 01:26:09 +04:00
void test_split()
2013-11-13 22:50:38 +04:00
{
2013-12-07 01:26:09 +04:00
data train = load_categorical_data_csv("mnist/mnist_train.csv", 0, 10);
2013-12-07 21:38:50 +04:00
data *split = split_data(train, 0, 13);
2013-12-07 01:26:09 +04:00
printf("%d, %d, %d\n", train.X.rows, split[0].X.rows, split[1].X.rows);
}
2013-12-07 01:26:09 +04:00
2013-11-04 23:11:01 +04:00
int main()
{
2013-12-03 04:41:40 +04:00
//test_kernel_update();
2013-12-07 21:38:50 +04:00
//test_split();
test_ensemble();
//test_nist();
2013-12-06 01:17:16 +04:00
//test_full();
2013-12-03 04:41:40 +04:00
//test_random_preprocess();
//test_random_classify();
2013-11-13 22:50:38 +04:00
//test_parser();
2013-11-04 23:11:01 +04:00
//test_backpropagate();
//test_ann();
2013-11-04 23:11:01 +04:00
//test_convolve();
//test_upsample();
//test_rotate();
//test_load();
//test_network();
2013-11-04 23:11:01 +04:00
//test_convolutional_layer();
2013-12-03 04:41:40 +04:00
//verify_convolutional_layer();
2013-11-04 23:11:01 +04:00
//test_color();
2013-12-07 21:38:50 +04:00
//cvWaitKey(0);
2013-11-04 23:11:01 +04:00
return 0;
}