Try to fuse conv_xnor+shortcut -> conv_xnor

This commit is contained in:
AlexeyAB
2019-02-12 02:05:15 +03:00
parent 9e138adf09
commit 5448e07445
5 changed files with 51 additions and 18 deletions

View File

@ -975,7 +975,7 @@ void fuse_conv_batchnorm(network net)
}
}
void forward_blank_layer(layer l, network_state state) {}
void calculate_binary_weights(network net)
{
@ -996,6 +996,19 @@ void calculate_binary_weights(network net)
if (net.layers[j].use_bin_output) {
l->activation = LINEAR;
}
// fuse conv_xnor + shortcut -> conv_xnor
if ((j + 1) < net.n && net.layers[j].type == CONVOLUTIONAL) {
layer *sc = &net.layers[j + 1];
if (sc->type == SHORTCUT && sc->w == sc->out_w && sc->h == sc->out_h && sc->c == sc->out_c)
{
l->bin_conv_shortcut_in_gpu = net.layers[net.layers[j + 1].index].output_gpu;
l->bin_conv_shortcut_out_gpu = net.layers[j + 1].output_gpu;
net.layers[j + 1].type = BLANK;
net.layers[j + 1].forward_gpu = forward_blank_layer;
}
}
}
}
}