Add optim option to copy_task

This commit is contained in:
ixaxaar 2017-11-12 14:05:53 +05:30
parent 70b3388051
commit 256b323cdc
2 changed files with 7 additions and 4 deletions

View File

@ -78,7 +78,7 @@ The copy task, as descibed in the original paper, is included in the repo.
From the project root:
```bash
python ./tasks/copy_task.py -cuda 0
python ./tasks/copy_task.py -cuda 0 -optim rmsprop -batch_size 100 -mem_slot 64
```
The copy task can be used to debug memory using [Visdom](https://github.com/facebookresearch/visdom).

View File

@ -31,7 +31,8 @@ parser.add_argument('-dropout', type=float, default=0, help='controller dropout'
parser.add_argument('-nlayer', type=int, default=2, help='number of layers')
parser.add_argument('-nhlayer', type=int, default=2, help='number of hidden layers')
parser.add_argument('-lr', type=float, default=1e-2, help='initial learning rate')
parser.add_argument('-lr', type=float, default=1e-4, help='initial learning rate')
parser.add_argument('-optim', type=str, default='adam', help='learning rule, supports adam|rmsprop')
parser.add_argument('-clip', type=float, default=50, help='gradient clipping')
parser.add_argument('-batch_size', type=int, default=100, metavar='N', help='batch size')
@ -129,8 +130,10 @@ if __name__ == '__main__':
last_save_losses = []
optimizer = optim.Adam(rnn.parameters(), lr=args.lr, eps=1e-9, betas=[0.9, 0.98])
# optimizer = optim.RMSprop(rnn.parameters(), lr=args.lr, eps=1e-10)
if args.optim == 'adam':
optimizer = optim.Adam(rnn.parameters(), lr=args.lr, eps=1e-9, betas=[0.9, 0.98])
elif args.optim == 'rmsprop':
optimizer = optim.RMSprop(rnn.parameters(), lr=args.lr, eps=1e-10)
for epoch in range(iterations + 1):
llprint("\rIteration {ep}/{tot}".format(ep=epoch, tot=iterations))