lstm.train.cfg
364 Bytes
[net]
subdivisions=8
inputs=256
batch = 128
momentum=0.9
decay=0.001
max_batches = 2000
time_steps=576
learning_rate=0.5
policy=steps
burn_in=10
steps=1000,1500
scales=.1,.1
[lstm]
batch_normalize=1
output = 1024
[lstm]
batch_normalize=1
output = 1024
[lstm]
batch_normalize=1
output = 1024
[connected]
output=256
activation=leaky
[softmax]
[cost]
type=sse