[net] subdivisions=1 inputs=256 batch = 1 time_steps = 1 momentum=0.9 decay=0.0005 max_batches = 50000000 learning_rate=0.1 [rnn] batch_normalize=1 output = 256 hidden=512 activation=leaky [rnn] batch_normalize=1 output = 256 hidden=512 activation=leaky [rnn] batch_normalize=1 output = 256 hidden=512 activation=leaky [connected] output=256 activation=leaky [softmax] [cost] type=sse