2013-11-04 23:11:01 +04:00
|
|
|
#include "connected_layer.h"
|
2013-12-03 04:41:40 +04:00
|
|
|
#include "utils.h"
|
2014-01-25 02:49:02 +04:00
|
|
|
#include "mini_blas.h"
|
2013-11-04 23:11:01 +04:00
|
|
|
|
2013-11-06 22:37:37 +04:00
|
|
|
#include <math.h>
|
2013-11-13 22:50:38 +04:00
|
|
|
#include <stdio.h>
|
2013-11-04 23:11:01 +04:00
|
|
|
#include <stdlib.h>
|
|
|
|
#include <string.h>
|
|
|
|
|
2014-05-10 02:14:52 +04:00
|
|
|
connected_layer *make_connected_layer(int batch, int inputs, int outputs, float dropout, ACTIVATION activation)
|
2013-11-04 23:11:01 +04:00
|
|
|
{
|
2013-12-06 01:17:16 +04:00
|
|
|
fprintf(stderr, "Connected Layer: %d inputs, %d outputs\n", inputs, outputs);
|
2013-11-04 23:11:01 +04:00
|
|
|
int i;
|
2013-11-07 04:09:41 +04:00
|
|
|
connected_layer *layer = calloc(1, sizeof(connected_layer));
|
|
|
|
layer->inputs = inputs;
|
|
|
|
layer->outputs = outputs;
|
2014-03-13 08:57:34 +04:00
|
|
|
layer->batch=batch;
|
2014-05-10 02:14:52 +04:00
|
|
|
layer->dropout = dropout;
|
2013-11-04 23:11:01 +04:00
|
|
|
|
2014-03-13 08:57:34 +04:00
|
|
|
layer->output = calloc(batch*outputs, sizeof(float*));
|
|
|
|
layer->delta = calloc(batch*outputs, sizeof(float*));
|
2013-11-04 23:11:01 +04:00
|
|
|
|
2014-01-29 04:28:42 +04:00
|
|
|
layer->weight_updates = calloc(inputs*outputs, sizeof(float));
|
2014-02-14 22:26:31 +04:00
|
|
|
layer->weight_adapt = calloc(inputs*outputs, sizeof(float));
|
2014-01-29 04:28:42 +04:00
|
|
|
layer->weight_momentum = calloc(inputs*outputs, sizeof(float));
|
|
|
|
layer->weights = calloc(inputs*outputs, sizeof(float));
|
2014-02-14 22:26:31 +04:00
|
|
|
float scale = 1./inputs;
|
2013-11-04 23:11:01 +04:00
|
|
|
for(i = 0; i < inputs*outputs; ++i)
|
2014-02-14 22:26:31 +04:00
|
|
|
layer->weights[i] = scale*(rand_uniform());
|
2013-11-04 23:11:01 +04:00
|
|
|
|
2014-01-29 04:28:42 +04:00
|
|
|
layer->bias_updates = calloc(outputs, sizeof(float));
|
2014-02-14 22:26:31 +04:00
|
|
|
layer->bias_adapt = calloc(outputs, sizeof(float));
|
2014-01-29 04:28:42 +04:00
|
|
|
layer->bias_momentum = calloc(outputs, sizeof(float));
|
|
|
|
layer->biases = calloc(outputs, sizeof(float));
|
2013-11-04 23:11:01 +04:00
|
|
|
for(i = 0; i < outputs; ++i)
|
2013-12-03 04:41:40 +04:00
|
|
|
//layer->biases[i] = rand_normal()*scale + scale;
|
2014-02-14 22:26:31 +04:00
|
|
|
layer->biases[i] = 1;
|
2013-11-04 23:11:01 +04:00
|
|
|
|
2013-12-03 04:41:40 +04:00
|
|
|
layer->activation = activation;
|
2013-11-04 23:11:01 +04:00
|
|
|
return layer;
|
|
|
|
}
|
|
|
|
|
2014-01-29 04:28:42 +04:00
|
|
|
void update_connected_layer(connected_layer layer, float step, float momentum, float decay)
|
2013-11-04 23:11:01 +04:00
|
|
|
{
|
2014-01-25 02:49:02 +04:00
|
|
|
int i;
|
2013-11-04 23:11:01 +04:00
|
|
|
for(i = 0; i < layer.outputs; ++i){
|
2014-01-25 02:49:02 +04:00
|
|
|
layer.bias_momentum[i] = step*(layer.bias_updates[i]) + momentum*layer.bias_momentum[i];
|
|
|
|
layer.biases[i] += layer.bias_momentum[i];
|
|
|
|
}
|
|
|
|
for(i = 0; i < layer.outputs*layer.inputs; ++i){
|
|
|
|
layer.weight_momentum[i] = step*(layer.weight_updates[i] - decay*layer.weights[i]) + momentum*layer.weight_momentum[i];
|
|
|
|
layer.weights[i] += layer.weight_momentum[i];
|
2013-11-04 23:11:01 +04:00
|
|
|
}
|
2014-01-29 04:28:42 +04:00
|
|
|
memset(layer.bias_updates, 0, layer.outputs*sizeof(float));
|
|
|
|
memset(layer.weight_updates, 0, layer.outputs*layer.inputs*sizeof(float));
|
2013-11-04 23:11:01 +04:00
|
|
|
}
|
|
|
|
|
2014-05-10 02:14:52 +04:00
|
|
|
void forward_connected_layer(connected_layer layer, float *input, int train)
|
2013-11-04 23:11:01 +04:00
|
|
|
{
|
2014-07-14 09:07:51 +04:00
|
|
|
int i;
|
2014-05-10 02:14:52 +04:00
|
|
|
if(!train) layer.dropout = 0;
|
2014-07-14 09:07:51 +04:00
|
|
|
for(i = 0; i < layer.batch; ++i){
|
|
|
|
memcpy(layer.output+i*layer.outputs, layer.biases, layer.outputs*sizeof(float));
|
|
|
|
}
|
2014-03-13 08:57:34 +04:00
|
|
|
int m = layer.batch;
|
2014-01-25 02:49:02 +04:00
|
|
|
int k = layer.inputs;
|
|
|
|
int n = layer.outputs;
|
2014-01-29 04:28:42 +04:00
|
|
|
float *a = input;
|
|
|
|
float *b = layer.weights;
|
|
|
|
float *c = layer.output;
|
2014-01-25 02:49:02 +04:00
|
|
|
gemm(0,0,m,n,k,1,a,k,b,n,1,c,n);
|
2014-05-10 02:14:52 +04:00
|
|
|
activate_array(layer.output, layer.outputs*layer.batch, layer.activation, layer.dropout);
|
2013-11-04 23:11:01 +04:00
|
|
|
}
|
|
|
|
|
2014-05-10 02:14:52 +04:00
|
|
|
void backward_connected_layer(connected_layer layer, float *input, float *delta)
|
2013-11-04 23:11:01 +04:00
|
|
|
{
|
2014-01-25 02:49:02 +04:00
|
|
|
int i;
|
2014-03-13 08:57:34 +04:00
|
|
|
for(i = 0; i < layer.outputs*layer.batch; ++i){
|
2014-01-25 02:49:02 +04:00
|
|
|
layer.delta[i] *= gradient(layer.output[i], layer.activation);
|
2014-07-17 20:05:07 +04:00
|
|
|
layer.bias_updates[i%layer.outputs] += layer.delta[i];
|
2013-11-04 23:11:01 +04:00
|
|
|
}
|
2014-01-25 02:49:02 +04:00
|
|
|
int m = layer.inputs;
|
2014-03-13 08:57:34 +04:00
|
|
|
int k = layer.batch;
|
2014-01-25 02:49:02 +04:00
|
|
|
int n = layer.outputs;
|
2014-01-29 04:28:42 +04:00
|
|
|
float *a = input;
|
|
|
|
float *b = layer.delta;
|
|
|
|
float *c = layer.weight_updates;
|
2014-07-17 20:05:07 +04:00
|
|
|
gemm(1,0,m,n,k,1,a,m,b,n,1,c,n);
|
2013-11-04 23:11:01 +04:00
|
|
|
|
2014-07-14 09:07:51 +04:00
|
|
|
m = layer.batch;
|
2014-05-10 02:14:52 +04:00
|
|
|
k = layer.outputs;
|
2014-07-14 09:07:51 +04:00
|
|
|
n = layer.inputs;
|
2014-01-25 02:49:02 +04:00
|
|
|
|
2014-07-14 09:07:51 +04:00
|
|
|
a = layer.delta;
|
|
|
|
b = layer.weights;
|
2014-05-10 02:14:52 +04:00
|
|
|
c = delta;
|
2014-01-25 02:49:02 +04:00
|
|
|
|
2014-07-14 09:07:51 +04:00
|
|
|
if(c) gemm(0,1,m,n,k,1,a,k,b,k,0,c,n);
|
2013-11-04 23:11:01 +04:00
|
|
|
}
|
|
|
|
|