fix make_dropout_layer

If you want to resize dropout layer, you may want to realloc inputs*l->batch*sizeof(float).
This commit is contained in:
DoctorKey 2018-04-11 10:45:57 +08:00
parent d1622c9bba
commit f61f3171d9

View File

@ -27,6 +27,8 @@ dropout_layer make_dropout_layer(int batch, int inputs, float probability)
void resize_dropout_layer(dropout_layer *l, int inputs) void resize_dropout_layer(dropout_layer *l, int inputs)
{ {
l->inputs = inputs
l->outputs = inputs
l->rand = realloc(l->rand, inputs*l->batch*sizeof(float)); l->rand = realloc(l->rand, inputs*l->batch*sizeof(float));
#ifdef GPU #ifdef GPU
cuda_free(l->rand_gpu); cuda_free(l->rand_gpu);