1234567891011121314151617181920212223242526272829303132333435363738 |
- [net]
- subdivisions=1
- inputs=256
- batch = 128
- momentum=0.9
- decay=0.001
- max_batches = 2000
- time_steps=576
- learning_rate=0.1
- policy=steps
- steps=1000,1500
- scales=.1,.1
- [rnn]
- batch_normalize=1
- output = 1024
- hidden=1024
- activation=leaky
- [rnn]
- batch_normalize=1
- output = 1024
- hidden=1024
- activation=leaky
- [rnn]
- batch_normalize=1
- output = 1024
- hidden=1024
- activation=leaky
- [connected]
- output=256
- activation=leaky
- [softmax]
|