Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
Removed fixed seed and increased learning rate and tolerance for test…
Browse files Browse the repository at this point in the history
…_nadam (#12164)
  • Loading branch information
access2rohit authored and sandeep-krishnamurthy committed Aug 15, 2018
1 parent b675d69 commit 5b9251b
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions tests/python/unittest/test_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -943,7 +943,7 @@ def test_ftrl():
compare_optimizer(opt1(lazy_update=True, **kwarg), opt2(**kwarg), shape,
np.float32, w_stype='row_sparse', g_stype='row_sparse')

@with_seed(1234)
@with_seed()
def test_nadam():

def get_net(num_hidden, flatten=True):
Expand All @@ -965,10 +965,10 @@ def get_net(num_hidden, flatten=True):
loss = Loss(output, l)
loss = mx.sym.make_loss(loss)
mod = mx.mod.Module(loss, data_names=('data',), label_names=('label',))
mod.fit(data_iter, num_epoch=60, optimizer_params={'learning_rate': 0.0005, 'wd': 0.0005},
mod.fit(data_iter, num_epoch=60, optimizer_params={'learning_rate': 0.001, 'wd': 0.0005},
initializer=mx.init.Xavier(magnitude=2), eval_metric=mx.metric.Loss(),
optimizer='nadam')
assert mod.score(data_iter, eval_metric=mx.metric.Loss())[0][1] < 0.1
assert mod.score(data_iter, eval_metric=mx.metric.Loss())[0][1] < 0.11

# AdaGrad
class PyAdaGrad(mx.optimizer.Optimizer):
Expand Down

0 comments on commit 5b9251b

Please sign in to comment.