Convolutional layers working w/ matrices

This commit is contained in:
Joseph Redmon
2014-01-28 16:28:42 -08:00
parent b2b7137b6f
commit f7a17f82eb
26 changed files with 459 additions and 363 deletions

View File

@ -8,15 +8,16 @@ softmax_layer *make_softmax_layer(int inputs)
fprintf(stderr, "Softmax Layer: %d inputs\n", inputs);
softmax_layer *layer = calloc(1, sizeof(softmax_layer));
layer->inputs = inputs;
layer->output = calloc(inputs, sizeof(double));
layer->delta = calloc(inputs, sizeof(double));
layer->output = calloc(inputs, sizeof(float));
layer->delta = calloc(inputs, sizeof(float));
return layer;
}
void forward_softmax_layer(const softmax_layer layer, double *input)
/* UNSTABLE!
void forward_softmax_layer(const softmax_layer layer, float *input)
{
int i;
double sum = 0;
float sum = 0;
for(i = 0; i < layer.inputs; ++i){
sum += exp(input[i]);
}
@ -24,8 +25,25 @@ void forward_softmax_layer(const softmax_layer layer, double *input)
layer.output[i] = exp(input[i])/sum;
}
}
*/
void forward_softmax_layer(const softmax_layer layer, float *input)
{
int i;
float sum = 0;
float largest = 0;
for(i = 0; i < layer.inputs; ++i){
if(input[i] > largest) largest = input[i];
}
for(i = 0; i < layer.inputs; ++i){
sum += exp(input[i]-largest);
}
sum = largest+log(sum);
for(i = 0; i < layer.inputs; ++i){
layer.output[i] = exp(input[i]-sum);
}
}
void backward_softmax_layer(const softmax_layer layer, double *input, double *delta)
void backward_softmax_layer(const softmax_layer layer, float *input, float *delta)
{
int i;
for(i = 0; i < layer.inputs; ++i){