This commit is contained in:
Joseph Redmon 2013-12-07 09:38:50 -08:00
parent 4bdf96bd6a
commit 8c3694bc91
8 changed files with 109 additions and 33 deletions

View File

@ -1,12 +1,12 @@
CC=gcc CC=gcc
COMMON=-Wall `pkg-config --cflags opencv` COMMON=-Wall `pkg-config --cflags opencv`
CFLAGS= $(COMMON) -O3 -ffast-math -flto
UNAME = $(shell uname) UNAME = $(shell uname)
ifeq ($(UNAME), Darwin) ifeq ($(UNAME), Darwin)
COMMON += -isystem /usr/local/Cellar/opencv/2.4.6.1/include/opencv -isystem /usr/local/Cellar/opencv/2.4.6.1/include COMMON += -isystem /usr/local/Cellar/opencv/2.4.6.1/include/opencv -isystem /usr/local/Cellar/opencv/2.4.6.1/include
else else
COMMON += -march=native CFLAGS += -march=native
endif endif
CFLAGS= $(COMMON) -O3 -ffast-math -flto
#CFLAGS= $(COMMON) -O0 -g #CFLAGS= $(COMMON) -O0 -g
LDFLAGS=`pkg-config --libs opencv` -lm LDFLAGS=`pkg-config --libs opencv` -lm
VPATH=./src/ VPATH=./src/

View File

@ -141,7 +141,7 @@ void normalize_data_rows(data d)
} }
} }
data *cv_split_data(data d, int part, int total) data *split_data(data d, int part, int total)
{ {
data *split = calloc(2, sizeof(data)); data *split = calloc(2, sizeof(data));
int i; int i;
@ -155,6 +155,12 @@ data *cv_split_data(data d, int part, int total)
train.X.rows = train.y.rows = d.X.rows - (end-start); train.X.rows = train.y.rows = d.X.rows - (end-start);
train.X.cols = test.X.cols = d.X.cols; train.X.cols = test.X.cols = d.X.cols;
train.y.cols = test.y.cols = d.y.cols; train.y.cols = test.y.cols = d.y.cols;
train.X.vals = calloc(train.X.rows, sizeof(double*));
test.X.vals = calloc(test.X.rows, sizeof(double*));
train.y.vals = calloc(train.y.rows, sizeof(double*));
test.y.vals = calloc(test.y.rows, sizeof(double*));
for(i = 0; i < start; ++i){ for(i = 0; i < start; ++i){
train.X.vals[i] = d.X.vals[i]; train.X.vals[i] = d.X.vals[i];
train.y.vals[i] = d.y.vals[i]; train.y.vals[i] = d.y.vals[i];
@ -164,8 +170,8 @@ data *cv_split_data(data d, int part, int total)
test.y.vals[i-start] = d.y.vals[i]; test.y.vals[i-start] = d.y.vals[i];
} }
for(i = end; i < d.X.rows; ++i){ for(i = end; i < d.X.rows; ++i){
train.X.vals[i-(start-end)] = d.X.vals[i]; train.X.vals[i-(end-start)] = d.X.vals[i];
train.y.vals[i-(start-end)] = d.y.vals[i]; train.y.vals[i-(end-start)] = d.y.vals[i];
} }
split[0] = train; split[0] = train;
split[1] = test; split[1] = test;

View File

@ -19,6 +19,6 @@ data load_data_image_pathfile_random(char *filename, int n, char **labels, int k
data load_categorical_data_csv(char *filename, int target, int k); data load_categorical_data_csv(char *filename, int target, int k);
void normalize_data_rows(data d); void normalize_data_rows(data d);
void randomize_data(data d); void randomize_data(data d);
data *cv_split_data(data d, int part, int total); data *split_data(data d, int part, int total);
#endif #endif

View File

@ -13,6 +13,18 @@ void free_matrix(matrix m)
free(m.vals); free(m.vals);
} }
double matrix_accuracy(matrix truth, matrix guess)
{
int k = truth.cols;
int i;
int count = 0;
for(i = 0; i < truth.rows; ++i){
int class = max_index(guess.vals[i], k);
if(truth.vals[i][class]) ++count;
}
return (double)count/truth.rows;
}
void matrix_add_matrix(matrix from, matrix to) void matrix_add_matrix(matrix from, matrix to)
{ {
assert(from.rows == to.rows && from.cols == to.cols); assert(from.rows == to.rows && from.cols == to.cols);
@ -26,12 +38,14 @@ void matrix_add_matrix(matrix from, matrix to)
matrix make_matrix(int rows, int cols) matrix make_matrix(int rows, int cols)
{ {
int i;
matrix m; matrix m;
m.rows = rows; m.rows = rows;
m.cols = cols; m.cols = cols;
m.vals = calloc(m.rows, sizeof(double *)); m.vals = calloc(m.rows, sizeof(double *));
int i; for(i = 0; i < m.rows; ++i){
for(i = 0; i < m.rows; ++i) m.vals[i] = calloc(m.cols, sizeof(double)); m.vals[i] = calloc(m.cols, sizeof(double));
}
return m; return m;
} }

View File

@ -11,6 +11,8 @@ void print_matrix(matrix m);
matrix csv_to_matrix(char *filename); matrix csv_to_matrix(char *filename);
matrix hold_out_matrix(matrix *m, int n); matrix hold_out_matrix(matrix *m, int n);
double matrix_accuracy(matrix truth, matrix guess);
void matrix_add_matrix(matrix from, matrix to);
double *pop_column(matrix *m, int c); double *pop_column(matrix *m, int c);

View File

@ -174,18 +174,18 @@ int train_network_datum(network net, double *x, double *y, double step, double m
return (y[class]?1:0); return (y[class]?1:0);
} }
double train_network_sgd(network net, data d, double step, double momentum,double decay) double train_network_sgd(network net, data d, int n, double step, double momentum,double decay)
{ {
int i; int i;
int correct = 0; int correct = 0;
for(i = 0; i < d.X.rows; ++i){ for(i = 0; i < n; ++i){
int index = rand()%d.X.rows; int index = rand()%d.X.rows;
correct += train_network_datum(net, d.X.vals[index], d.y.vals[index], step, momentum, decay); correct += train_network_datum(net, d.X.vals[index], d.y.vals[index], step, momentum, decay);
if((i+1)%10 == 0){ //if((i+1)%10 == 0){
printf("%d: %f\n", (i+1), (double)correct/(i+1)); // printf("%d: %f\n", (i+1), (double)correct/(i+1));
} //}
} }
return (double)correct/d.X.rows; return (double)correct/n;
} }
void train_network(network net, data d, double step, double momentum, double decay) void train_network(network net, data d, double step, double momentum, double decay)
@ -269,6 +269,27 @@ void visualize_network(network net)
} }
} }
double *network_predict(network net, double *input)
{
forward_network(net, input);
double *out = get_network_output(net);
return out;
}
matrix network_predict_data(network net, data test)
{
int i,j;
int k = get_network_output_size(net);
matrix pred = make_matrix(test.X.rows, k);
for(i = 0; i < test.X.rows; ++i){
double *out = network_predict(net, test.X.vals[i]);
for(j = 0; j < k; ++j){
pred.vals[i][j] = out[j];
}
}
return pred;
}
void print_network(network net) void print_network(network net)
{ {
int i,j; int i,j;
@ -306,17 +327,12 @@ void print_network(network net)
fprintf(stderr, "\n"); fprintf(stderr, "\n");
} }
} }
double network_accuracy(network net, data d) double network_accuracy(network net, data d)
{ {
int i; matrix guess = network_predict_data(net, d);
int correct = 0; double acc = matrix_accuracy(d.y, guess);
int k = get_network_output_size(net); free_matrix(guess);
for(i = 0; i < d.X.rows; ++i){ return acc;
forward_network(net, d.X.vals[i]);
double *out = get_network_output(net);
int guess = max_index(out, k);
if(d.y.vals[i][guess]) ++correct;
}
return (double)correct/d.X.rows;
} }

View File

@ -24,8 +24,9 @@ network make_network(int n);
void forward_network(network net, double *input); void forward_network(network net, double *input);
void backward_network(network net, double *input, double *truth); void backward_network(network net, double *input, double *truth);
void update_network(network net, double step, double momentum, double decay); void update_network(network net, double step, double momentum, double decay);
double train_network_sgd(network net, data d, double step, double momentum,double decay); double train_network_sgd(network net, data d, int n, double step, double momentum,double decay);
void train_network(network net, data d, double step, double momentum, double decay); void train_network(network net, data d, double step, double momentum, double decay);
matrix network_predict_data(network net, data test);
double network_accuracy(network net, data d); double network_accuracy(network net, data d);
double *get_network_output(network net); double *get_network_output(network net);
double *get_network_output_layer(network net, int i); double *get_network_output_layer(network net, int i);

View File

@ -204,21 +204,57 @@ void test_nist()
int count = 0; int count = 0;
double lr = .0005; double lr = .0005;
while(++count <= 1){ while(++count <= 1){
double acc = train_network_sgd(net, train, lr, .9, .001); double acc = train_network_sgd(net, train, 10000, lr, .9, .001);
printf("Training Accuracy: %lf", acc); printf("Training Accuracy: %lf\n", acc);
lr /= 2; lr /= 2;
} }
/*
double train_acc = network_accuracy(net, train); double train_acc = network_accuracy(net, train);
fprintf(stderr, "\nTRAIN: %f\n", train_acc); fprintf(stderr, "\nTRAIN: %f\n", train_acc);
double test_acc = network_accuracy(net, test); double test_acc = network_accuracy(net, test);
fprintf(stderr, "TEST: %f\n\n", test_acc); fprintf(stderr, "TEST: %f\n\n", test_acc);
printf("%d, %f, %f\n", count, train_acc, test_acc); printf("%d, %f, %f\n", count, train_acc, test_acc);
*/
//end = clock(); //end = clock();
//printf("Neural Net Learning: %lf seconds\n", (double)(end-start)/CLOCKS_PER_SEC); //printf("Neural Net Learning: %lf seconds\n", (double)(end-start)/CLOCKS_PER_SEC);
} }
void test_ensemble()
{
int i;
srand(888888);
data d = load_categorical_data_csv("mnist/mnist_train.csv", 0, 10);
normalize_data_rows(d);
randomize_data(d);
data test = load_categorical_data_csv("mnist/mnist_test.csv", 0,10);
normalize_data_rows(test);
data train = d;
/*
data *split = split_data(d, 1, 10);
data train = split[0];
data test = split[1];
*/
matrix prediction = make_matrix(test.y.rows, test.y.cols);
int n = 30;
for(i = 0; i < n; ++i){
int count = 0;
double lr = .0005;
network net = parse_network_cfg("nist.cfg");
while(++count <= 5){
double acc = train_network_sgd(net, train, train.X.rows, lr, .9, .001);
printf("Training Accuracy: %lf\n", acc);
lr /= 2;
}
matrix partial = network_predict_data(net, test);
double acc = matrix_accuracy(test.y, partial);
printf("Model Accuracy: %lf\n", acc);
matrix_add_matrix(partial, prediction);
acc = matrix_accuracy(test.y, prediction);
printf("Current Ensemble Accuracy: %lf\n", acc);
free_matrix(partial);
}
double acc = matrix_accuracy(test.y, prediction);
printf("Full Ensemble Accuracy: %lf\n", acc);
}
void test_kernel_update() void test_kernel_update()
{ {
srand(0); srand(0);
@ -283,7 +319,7 @@ void test_random_classify()
void test_split() void test_split()
{ {
data train = load_categorical_data_csv("mnist/mnist_train.csv", 0, 10); data train = load_categorical_data_csv("mnist/mnist_train.csv", 0, 10);
data *split = cv_split_data(train, 0, 13); data *split = split_data(train, 0, 13);
printf("%d, %d, %d\n", train.X.rows, split[0].X.rows, split[1].X.rows); printf("%d, %d, %d\n", train.X.rows, split[0].X.rows, split[1].X.rows);
} }
@ -291,8 +327,9 @@ void test_split()
int main() int main()
{ {
//test_kernel_update(); //test_kernel_update();
test_split(); //test_split();
// test_nist(); test_ensemble();
//test_nist();
//test_full(); //test_full();
//test_random_preprocess(); //test_random_preprocess();
//test_random_classify(); //test_random_classify();
@ -307,6 +344,6 @@ int main()
//test_convolutional_layer(); //test_convolutional_layer();
//verify_convolutional_layer(); //verify_convolutional_layer();
//test_color(); //test_color();
cvWaitKey(0); //cvWaitKey(0);
return 0; return 0;
} }