2016-01-28 23:30:54 +03:00
|
|
|
[net]
|
|
|
|
subdivisions=1
|
|
|
|
inputs=256
|
2016-02-05 11:15:12 +03:00
|
|
|
batch = 1
|
2016-01-28 23:30:54 +03:00
|
|
|
momentum=0.9
|
2016-02-01 02:52:03 +03:00
|
|
|
decay=0.001
|
2016-02-05 11:15:12 +03:00
|
|
|
max_batches = 2000
|
|
|
|
time_steps=1
|
2016-01-28 23:30:54 +03:00
|
|
|
learning_rate=0.1
|
2016-02-05 11:15:12 +03:00
|
|
|
policy=steps
|
|
|
|
steps=1000,1500
|
|
|
|
scales=.1,.1
|
2016-01-28 23:30:54 +03:00
|
|
|
|
|
|
|
[rnn]
|
|
|
|
batch_normalize=1
|
2016-02-05 11:15:12 +03:00
|
|
|
output = 1024
|
|
|
|
hidden=1024
|
2016-01-28 23:30:54 +03:00
|
|
|
activation=leaky
|
|
|
|
|
|
|
|
[rnn]
|
|
|
|
batch_normalize=1
|
2016-02-05 11:15:12 +03:00
|
|
|
output = 1024
|
|
|
|
hidden=1024
|
2016-01-28 23:30:54 +03:00
|
|
|
activation=leaky
|
|
|
|
|
|
|
|
[rnn]
|
|
|
|
batch_normalize=1
|
2016-02-05 11:15:12 +03:00
|
|
|
output = 1024
|
|
|
|
hidden=1024
|
2016-01-28 23:30:54 +03:00
|
|
|
activation=leaky
|
|
|
|
|
|
|
|
[connected]
|
|
|
|
output=256
|
|
|
|
activation=leaky
|
|
|
|
|
|
|
|
[softmax]
|
|
|
|
|
|
|
|
|