2013-12-03 04:41:40 +04:00
|
|
|
#include "softmax_layer.h"
|
2015-01-23 03:38:24 +03:00
|
|
|
#include "blas.h"
|
|
|
|
#include "cuda.h"
|
2014-10-22 01:49:18 +04:00
|
|
|
#include <float.h>
|
2013-12-03 04:41:40 +04:00
|
|
|
#include <math.h>
|
|
|
|
#include <stdlib.h>
|
|
|
|
#include <stdio.h>
|
2015-02-24 05:52:05 +03:00
|
|
|
#include <assert.h>
|
2013-12-03 04:41:40 +04:00
|
|
|
|
2015-02-24 05:52:05 +03:00
|
|
|
softmax_layer *make_softmax_layer(int batch, int groups, int inputs)
|
2013-12-03 04:41:40 +04:00
|
|
|
{
|
2015-02-24 05:52:05 +03:00
|
|
|
assert(inputs%groups == 0);
|
2013-12-06 01:17:16 +04:00
|
|
|
fprintf(stderr, "Softmax Layer: %d inputs\n", inputs);
|
2013-12-03 04:41:40 +04:00
|
|
|
softmax_layer *layer = calloc(1, sizeof(softmax_layer));
|
2014-03-13 08:57:34 +04:00
|
|
|
layer->batch = batch;
|
2015-02-24 05:52:05 +03:00
|
|
|
layer->groups = groups;
|
2013-12-03 04:41:40 +04:00
|
|
|
layer->inputs = inputs;
|
2014-03-13 08:57:34 +04:00
|
|
|
layer->output = calloc(inputs*batch, sizeof(float));
|
|
|
|
layer->delta = calloc(inputs*batch, sizeof(float));
|
2014-10-22 01:49:18 +04:00
|
|
|
#ifdef GPU
|
2015-01-23 03:38:24 +03:00
|
|
|
layer->output_gpu = cuda_make_array(layer->output, inputs*batch);
|
|
|
|
layer->delta_gpu = cuda_make_array(layer->delta, inputs*batch);
|
2014-10-22 01:49:18 +04:00
|
|
|
#endif
|
2013-12-03 04:41:40 +04:00
|
|
|
return layer;
|
|
|
|
}
|
|
|
|
|
2015-02-24 05:52:05 +03:00
|
|
|
void softmax_array(float *input, int n, float *output)
|
|
|
|
{
|
|
|
|
int i;
|
|
|
|
float sum = 0;
|
|
|
|
float largest = -FLT_MAX;
|
|
|
|
for(i = 0; i < n; ++i){
|
|
|
|
if(input[i] > largest) largest = input[i];
|
|
|
|
}
|
|
|
|
for(i = 0; i < n; ++i){
|
|
|
|
sum += exp(input[i]-largest);
|
|
|
|
}
|
|
|
|
if(sum) sum = largest+log(sum);
|
|
|
|
else sum = largest-100;
|
|
|
|
for(i = 0; i < n; ++i){
|
|
|
|
output[i] = exp(input[i]-sum);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-01-29 04:28:42 +04:00
|
|
|
void forward_softmax_layer(const softmax_layer layer, float *input)
|
|
|
|
{
|
2015-02-24 05:52:05 +03:00
|
|
|
int b;
|
|
|
|
int inputs = layer.inputs / layer.groups;
|
|
|
|
int batch = layer.batch * layer.groups;
|
|
|
|
for(b = 0; b < batch; ++b){
|
|
|
|
softmax_array(input+b*inputs, inputs, layer.output+b*inputs);
|
2014-01-29 04:28:42 +04:00
|
|
|
}
|
|
|
|
}
|
2013-12-03 04:41:40 +04:00
|
|
|
|
2014-10-22 01:49:18 +04:00
|
|
|
void backward_softmax_layer(const softmax_layer layer, float *delta)
|
2013-12-03 04:41:40 +04:00
|
|
|
{
|
|
|
|
int i;
|
2014-03-13 08:57:34 +04:00
|
|
|
for(i = 0; i < layer.inputs*layer.batch; ++i){
|
2013-12-03 04:41:40 +04:00
|
|
|
delta[i] = layer.delta[i];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|