fix make_dropout_layer

If you want to resize dropout layer, you may want to realloc inputs*l->batch*sizeof(float).
This commit is contained in:
DoctorKey 2018-04-11 09:02:20 +08:00
parent 508381b37f
commit d1622c9bba

View File

@ -27,7 +27,7 @@ dropout_layer make_dropout_layer(int batch, int inputs, float probability)
void resize_dropout_layer(dropout_layer *l, int inputs) void resize_dropout_layer(dropout_layer *l, int inputs)
{ {
l->rand = realloc(l->rand, l->inputs*l->batch*sizeof(float)); l->rand = realloc(l->rand, inputs*l->batch*sizeof(float));
#ifdef GPU #ifdef GPU
cuda_free(l->rand_gpu); cuda_free(l->rand_gpu);