mirror of
https://github.com/pjreddie/darknet.git
synced 2023-08-10 21:13:14 +03:00
stuff
This commit is contained in:
parent
e7688a05a1
commit
0cbfa46461
2
Makefile
2
Makefile
@ -25,7 +25,7 @@ CFLAGS+=-DGPU
|
|||||||
LDFLAGS+= -L/usr/local/cuda/lib64 -lcuda -lcudart -lcublas -lcurand
|
LDFLAGS+= -L/usr/local/cuda/lib64 -lcuda -lcudart -lcublas -lcurand
|
||||||
endif
|
endif
|
||||||
|
|
||||||
OBJ=gemm.o utils.o cuda.o deconvolutional_layer.o convolutional_layer.o list.o image.o activations.o im2col.o col2im.o blas.o crop_layer.o dropout_layer.o maxpool_layer.o softmax_layer.o data.o matrix.o network.o connected_layer.o cost_layer.o normalization_layer.o parser.o option_list.o darknet.o detection_layer.o imagenet.o captcha.o detection.o
|
OBJ=gemm.o utils.o cuda.o deconvolutional_layer.o convolutional_layer.o list.o image.o activations.o im2col.o col2im.o blas.o crop_layer.o dropout_layer.o maxpool_layer.o softmax_layer.o data.o matrix.o network.o connected_layer.o cost_layer.o normalization_layer.o parser.o option_list.o darknet.o detection_layer.o imagenet.o captcha.o detection.o route_layer.o
|
||||||
ifeq ($(GPU), 1)
|
ifeq ($(GPU), 1)
|
||||||
OBJ+=convolutional_kernels.o deconvolutional_kernels.o activation_kernels.o im2col_kernels.o col2im_kernels.o blas_kernels.o crop_layer_kernels.o dropout_layer_kernels.o maxpool_layer_kernels.o softmax_layer_kernels.o network_kernels.o
|
OBJ+=convolutional_kernels.o deconvolutional_kernels.o activation_kernels.o im2col_kernels.o col2im_kernels.o blas_kernels.o crop_layer_kernels.o dropout_layer_kernels.o maxpool_layer_kernels.o softmax_layer_kernels.o network_kernels.o
|
||||||
endif
|
endif
|
||||||
|
@ -167,8 +167,10 @@ void fill_truth_detection(char *path, float *truth, int classes, int num_boxes,
|
|||||||
h = constrain(0, 1, h);
|
h = constrain(0, 1, h);
|
||||||
if (w == 0 || h == 0) continue;
|
if (w == 0 || h == 0) continue;
|
||||||
if(1){
|
if(1){
|
||||||
w = sqrt(w);
|
//w = sqrt(w);
|
||||||
h = sqrt(h);
|
//h = sqrt(h);
|
||||||
|
w = pow(w, 1./2.);
|
||||||
|
h = pow(h, 1./2.);
|
||||||
}
|
}
|
||||||
|
|
||||||
int index = (i+j*num_boxes)*(4+classes+background);
|
int index = (i+j*num_boxes)*(4+classes+background);
|
||||||
|
@ -308,8 +308,8 @@ void predict_detections(network net, data d, float threshold, int offset, int cl
|
|||||||
float y = (pred.vals[j][ci + 1] + row)/num_boxes;
|
float y = (pred.vals[j][ci + 1] + row)/num_boxes;
|
||||||
float w = pred.vals[j][ci + 2]; //* distance_from_edge(row, num_boxes);
|
float w = pred.vals[j][ci + 2]; //* distance_from_edge(row, num_boxes);
|
||||||
float h = pred.vals[j][ci + 3]; //* distance_from_edge(col, num_boxes);
|
float h = pred.vals[j][ci + 3]; //* distance_from_edge(col, num_boxes);
|
||||||
w = w*w;
|
w = pow(w, 2);
|
||||||
h = h*h;
|
h = pow(h, 2);
|
||||||
float prob = scale*pred.vals[j][k+class+background+nuisance];
|
float prob = scale*pred.vals[j][k+class+background+nuisance];
|
||||||
if(prob < threshold) continue;
|
if(prob < threshold) continue;
|
||||||
printf("%d %d %f %f %f %f %f\n", offset + j, class, prob, x, y, w, h);
|
printf("%d %d %f %f %f %f %f\n", offset + j, class, prob, x, y, w, h);
|
||||||
|
@ -4,7 +4,6 @@
|
|||||||
#include "image.h"
|
#include "image.h"
|
||||||
#include "data.h"
|
#include "data.h"
|
||||||
#include "utils.h"
|
#include "utils.h"
|
||||||
#include "params.h"
|
|
||||||
|
|
||||||
#include "crop_layer.h"
|
#include "crop_layer.h"
|
||||||
#include "connected_layer.h"
|
#include "connected_layer.h"
|
||||||
@ -16,6 +15,7 @@
|
|||||||
#include "normalization_layer.h"
|
#include "normalization_layer.h"
|
||||||
#include "softmax_layer.h"
|
#include "softmax_layer.h"
|
||||||
#include "dropout_layer.h"
|
#include "dropout_layer.h"
|
||||||
|
#include "route_layer.h"
|
||||||
|
|
||||||
char *get_layer_string(LAYER_TYPE a)
|
char *get_layer_string(LAYER_TYPE a)
|
||||||
{
|
{
|
||||||
@ -40,6 +40,8 @@ char *get_layer_string(LAYER_TYPE a)
|
|||||||
return "crop";
|
return "crop";
|
||||||
case COST:
|
case COST:
|
||||||
return "cost";
|
return "cost";
|
||||||
|
case ROUTE:
|
||||||
|
return "route";
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -99,6 +101,9 @@ void forward_network(network net, network_state state)
|
|||||||
else if(net.types[i] == DROPOUT){
|
else if(net.types[i] == DROPOUT){
|
||||||
forward_dropout_layer(*(dropout_layer *)net.layers[i], state);
|
forward_dropout_layer(*(dropout_layer *)net.layers[i], state);
|
||||||
}
|
}
|
||||||
|
else if(net.types[i] == ROUTE){
|
||||||
|
forward_route_layer(*(route_layer *)net.layers[i], net);
|
||||||
|
}
|
||||||
state.input = get_network_output_layer(net, i);
|
state.input = get_network_output_layer(net, i);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -143,6 +148,8 @@ float *get_network_output_layer(network net, int i)
|
|||||||
return ((crop_layer *)net.layers[i]) -> output;
|
return ((crop_layer *)net.layers[i]) -> output;
|
||||||
} else if(net.types[i] == NORMALIZATION){
|
} else if(net.types[i] == NORMALIZATION){
|
||||||
return ((normalization_layer *)net.layers[i]) -> output;
|
return ((normalization_layer *)net.layers[i]) -> output;
|
||||||
|
} else if(net.types[i] == ROUTE){
|
||||||
|
return ((route_layer *)net.layers[i]) -> output;
|
||||||
}
|
}
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
@ -177,6 +184,8 @@ float *get_network_delta_layer(network net, int i)
|
|||||||
} else if(net.types[i] == CONNECTED){
|
} else if(net.types[i] == CONNECTED){
|
||||||
connected_layer layer = *(connected_layer *)net.layers[i];
|
connected_layer layer = *(connected_layer *)net.layers[i];
|
||||||
return layer.delta;
|
return layer.delta;
|
||||||
|
} else if(net.types[i] == ROUTE){
|
||||||
|
return ((route_layer *)net.layers[i]) -> delta;
|
||||||
}
|
}
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
@ -247,10 +256,12 @@ void backward_network(network net, network_state state)
|
|||||||
else if(net.types[i] == CONNECTED){
|
else if(net.types[i] == CONNECTED){
|
||||||
connected_layer layer = *(connected_layer *)net.layers[i];
|
connected_layer layer = *(connected_layer *)net.layers[i];
|
||||||
backward_connected_layer(layer, state);
|
backward_connected_layer(layer, state);
|
||||||
}
|
} else if(net.types[i] == COST){
|
||||||
else if(net.types[i] == COST){
|
|
||||||
cost_layer layer = *(cost_layer *)net.layers[i];
|
cost_layer layer = *(cost_layer *)net.layers[i];
|
||||||
backward_cost_layer(layer, state);
|
backward_cost_layer(layer, state);
|
||||||
|
} else if(net.types[i] == ROUTE){
|
||||||
|
route_layer layer = *(route_layer *)net.layers[i];
|
||||||
|
backward_route_layer(layer, net);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -369,6 +380,10 @@ void set_batch_network(network *net, int b)
|
|||||||
crop_layer *layer = (crop_layer *)net->layers[i];
|
crop_layer *layer = (crop_layer *)net->layers[i];
|
||||||
layer->batch = b;
|
layer->batch = b;
|
||||||
}
|
}
|
||||||
|
else if(net->types[i] == ROUTE){
|
||||||
|
route_layer *layer = (route_layer *)net->layers[i];
|
||||||
|
layer->batch = b;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -445,12 +460,17 @@ int get_network_output_size_layer(network net, int i)
|
|||||||
softmax_layer layer = *(softmax_layer *)net.layers[i];
|
softmax_layer layer = *(softmax_layer *)net.layers[i];
|
||||||
return layer.inputs;
|
return layer.inputs;
|
||||||
}
|
}
|
||||||
|
else if(net.types[i] == ROUTE){
|
||||||
|
route_layer layer = *(route_layer *)net.layers[i];
|
||||||
|
return layer.outputs;
|
||||||
|
}
|
||||||
fprintf(stderr, "Can't find output size\n");
|
fprintf(stderr, "Can't find output size\n");
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
int resize_network(network net, int h, int w, int c)
|
int resize_network(network net, int h, int w, int c)
|
||||||
{
|
{
|
||||||
|
fprintf(stderr, "Might be broken, careful!!");
|
||||||
int i;
|
int i;
|
||||||
for (i = 0; i < net.n; ++i){
|
for (i = 0; i < net.n; ++i){
|
||||||
if(net.types[i] == CONVOLUTIONAL){
|
if(net.types[i] == CONVOLUTIONAL){
|
||||||
@ -540,6 +560,10 @@ image get_network_image_layer(network net, int i)
|
|||||||
crop_layer layer = *(crop_layer *)net.layers[i];
|
crop_layer layer = *(crop_layer *)net.layers[i];
|
||||||
return get_crop_image(layer);
|
return get_crop_image(layer);
|
||||||
}
|
}
|
||||||
|
else if(net.types[i] == ROUTE){
|
||||||
|
route_layer layer = *(route_layer *)net.layers[i];
|
||||||
|
return get_network_image_layer(net, layer.input_layers[0]);
|
||||||
|
}
|
||||||
return make_empty_image(0,0,0);
|
return make_empty_image(0,0,0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4,7 +4,6 @@
|
|||||||
|
|
||||||
#include "image.h"
|
#include "image.h"
|
||||||
#include "detection_layer.h"
|
#include "detection_layer.h"
|
||||||
#include "params.h"
|
|
||||||
#include "data.h"
|
#include "data.h"
|
||||||
|
|
||||||
typedef enum {
|
typedef enum {
|
||||||
@ -17,6 +16,7 @@ typedef enum {
|
|||||||
NORMALIZATION,
|
NORMALIZATION,
|
||||||
DROPOUT,
|
DROPOUT,
|
||||||
CROP,
|
CROP,
|
||||||
|
ROUTE,
|
||||||
COST
|
COST
|
||||||
} LAYER_TYPE;
|
} LAYER_TYPE;
|
||||||
|
|
||||||
|
@ -18,11 +18,12 @@ extern "C" {
|
|||||||
#include "normalization_layer.h"
|
#include "normalization_layer.h"
|
||||||
#include "softmax_layer.h"
|
#include "softmax_layer.h"
|
||||||
#include "dropout_layer.h"
|
#include "dropout_layer.h"
|
||||||
|
#include "route_layer.h"
|
||||||
}
|
}
|
||||||
|
|
||||||
float * get_network_output_gpu_layer(network net, int i);
|
float * get_network_output_gpu_layer(network net, int i);
|
||||||
float * get_network_delta_gpu_layer(network net, int i);
|
float * get_network_delta_gpu_layer(network net, int i);
|
||||||
float *get_network_output_gpu(network net);
|
float * get_network_output_gpu(network net);
|
||||||
|
|
||||||
void forward_network_gpu(network net, network_state state)
|
void forward_network_gpu(network net, network_state state)
|
||||||
{
|
{
|
||||||
@ -55,6 +56,9 @@ void forward_network_gpu(network net, network_state state)
|
|||||||
else if(net.types[i] == CROP){
|
else if(net.types[i] == CROP){
|
||||||
forward_crop_layer_gpu(*(crop_layer *)net.layers[i], state);
|
forward_crop_layer_gpu(*(crop_layer *)net.layers[i], state);
|
||||||
}
|
}
|
||||||
|
else if(net.types[i] == ROUTE){
|
||||||
|
forward_route_layer_gpu(*(route_layer *)net.layers[i], net);
|
||||||
|
}
|
||||||
state.input = get_network_output_gpu_layer(net, i);
|
state.input = get_network_output_gpu_layer(net, i);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -96,6 +100,9 @@ void backward_network_gpu(network net, network_state state)
|
|||||||
else if(net.types[i] == SOFTMAX){
|
else if(net.types[i] == SOFTMAX){
|
||||||
backward_softmax_layer_gpu(*(softmax_layer *)net.layers[i], state);
|
backward_softmax_layer_gpu(*(softmax_layer *)net.layers[i], state);
|
||||||
}
|
}
|
||||||
|
else if(net.types[i] == ROUTE){
|
||||||
|
backward_route_layer_gpu(*(route_layer *)net.layers[i], net);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -142,6 +149,9 @@ float * get_network_output_gpu_layer(network net, int i)
|
|||||||
else if(net.types[i] == SOFTMAX){
|
else if(net.types[i] == SOFTMAX){
|
||||||
return ((softmax_layer *)net.layers[i]) -> output_gpu;
|
return ((softmax_layer *)net.layers[i]) -> output_gpu;
|
||||||
}
|
}
|
||||||
|
else if(net.types[i] == ROUTE){
|
||||||
|
return ((route_layer *)net.layers[i]) -> output_gpu;
|
||||||
|
}
|
||||||
else if(net.types[i] == DROPOUT){
|
else if(net.types[i] == DROPOUT){
|
||||||
return get_network_output_gpu_layer(net, i-1);
|
return get_network_output_gpu_layer(net, i-1);
|
||||||
}
|
}
|
||||||
@ -170,6 +180,10 @@ float * get_network_delta_gpu_layer(network net, int i)
|
|||||||
maxpool_layer layer = *(maxpool_layer *)net.layers[i];
|
maxpool_layer layer = *(maxpool_layer *)net.layers[i];
|
||||||
return layer.delta_gpu;
|
return layer.delta_gpu;
|
||||||
}
|
}
|
||||||
|
else if(net.types[i] == ROUTE){
|
||||||
|
route_layer layer = *(route_layer *)net.layers[i];
|
||||||
|
return layer.delta_gpu;
|
||||||
|
}
|
||||||
else if(net.types[i] == SOFTMAX){
|
else if(net.types[i] == SOFTMAX){
|
||||||
softmax_layer layer = *(softmax_layer *)net.layers[i];
|
softmax_layer layer = *(softmax_layer *)net.layers[i];
|
||||||
return layer.delta_gpu;
|
return layer.delta_gpu;
|
||||||
|
36
src/parser.c
36
src/parser.c
@ -14,6 +14,7 @@
|
|||||||
#include "softmax_layer.h"
|
#include "softmax_layer.h"
|
||||||
#include "dropout_layer.h"
|
#include "dropout_layer.h"
|
||||||
#include "detection_layer.h"
|
#include "detection_layer.h"
|
||||||
|
#include "route_layer.h"
|
||||||
#include "list.h"
|
#include "list.h"
|
||||||
#include "option_list.h"
|
#include "option_list.h"
|
||||||
#include "utils.h"
|
#include "utils.h"
|
||||||
@ -34,6 +35,7 @@ int is_crop(section *s);
|
|||||||
int is_cost(section *s);
|
int is_cost(section *s);
|
||||||
int is_detection(section *s);
|
int is_detection(section *s);
|
||||||
int is_normalization(section *s);
|
int is_normalization(section *s);
|
||||||
|
int is_route(section *s);
|
||||||
list *read_cfg(char *filename);
|
list *read_cfg(char *filename);
|
||||||
|
|
||||||
void free_section(section *s)
|
void free_section(section *s)
|
||||||
@ -246,6 +248,32 @@ normalization_layer *parse_normalization(list *options, size_params params)
|
|||||||
return layer;
|
return layer;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
route_layer *parse_route(list *options, size_params params, network net)
|
||||||
|
{
|
||||||
|
char *l = option_find(options, "layers");
|
||||||
|
int len = strlen(l);
|
||||||
|
if(!l) error("Route Layer must specify input layers");
|
||||||
|
int n = 1;
|
||||||
|
int i;
|
||||||
|
for(i = 0; i < len; ++i){
|
||||||
|
if (l[i] == ',') ++n;
|
||||||
|
}
|
||||||
|
|
||||||
|
int *layers = calloc(n, sizeof(int));
|
||||||
|
int *sizes = calloc(n, sizeof(int));
|
||||||
|
for(i = 0; i < n; ++i){
|
||||||
|
int index = atoi(l);
|
||||||
|
l = strchr(l, ',')+1;
|
||||||
|
layers[i] = index;
|
||||||
|
sizes[i] = get_network_output_size_layer(net, index);
|
||||||
|
}
|
||||||
|
int batch = params.batch;
|
||||||
|
|
||||||
|
route_layer *layer = make_route_layer(batch, n, layers, sizes);
|
||||||
|
option_unused(options);
|
||||||
|
return layer;
|
||||||
|
}
|
||||||
|
|
||||||
void parse_net_options(list *options, network *net)
|
void parse_net_options(list *options, network *net)
|
||||||
{
|
{
|
||||||
net->batch = option_find_int(options, "batch",1);
|
net->batch = option_find_int(options, "batch",1);
|
||||||
@ -326,6 +354,10 @@ network parse_network_cfg(char *filename)
|
|||||||
normalization_layer *layer = parse_normalization(options, params);
|
normalization_layer *layer = parse_normalization(options, params);
|
||||||
net.types[count] = NORMALIZATION;
|
net.types[count] = NORMALIZATION;
|
||||||
net.layers[count] = layer;
|
net.layers[count] = layer;
|
||||||
|
}else if(is_route(s)){
|
||||||
|
route_layer *layer = parse_route(options, params, net);
|
||||||
|
net.types[count] = ROUTE;
|
||||||
|
net.layers[count] = layer;
|
||||||
}else if(is_dropout(s)){
|
}else if(is_dropout(s)){
|
||||||
dropout_layer *layer = parse_dropout(options, params);
|
dropout_layer *layer = parse_dropout(options, params);
|
||||||
net.types[count] = DROPOUT;
|
net.types[count] = DROPOUT;
|
||||||
@ -402,6 +434,10 @@ int is_normalization(section *s)
|
|||||||
return (strcmp(s->type, "[lrnorm]")==0
|
return (strcmp(s->type, "[lrnorm]")==0
|
||||||
|| strcmp(s->type, "[localresponsenormalization]")==0);
|
|| strcmp(s->type, "[localresponsenormalization]")==0);
|
||||||
}
|
}
|
||||||
|
int is_route(section *s)
|
||||||
|
{
|
||||||
|
return (strcmp(s->type, "[route]")==0);
|
||||||
|
}
|
||||||
|
|
||||||
int read_option(char *s, list *options)
|
int read_option(char *s, list *options)
|
||||||
{
|
{
|
||||||
|
Loading…
x
Reference in New Issue
Block a user