From c9e201f86adb03a8538f4605acea0fc2546df79e Mon Sep 17 00:00:00 2001 From: Jake Lee Date: Wed, 20 Mar 2019 15:07:14 +0800 Subject: [PATCH] Fixes the test_sgld (#14473) * fix the test_sgld * retrigger CI --- tests/python/unittest/test_optimizer.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/python/unittest/test_optimizer.py b/tests/python/unittest/test_optimizer.py index 68aa5f8e4d8e..d5aabcb4b1e5 100644 --- a/tests/python/unittest/test_optimizer.py +++ b/tests/python/unittest/test_optimizer.py @@ -461,7 +461,6 @@ def update(self, index, weight, grad, state): @with_seed() -@unittest.skip("test fails intermittently. temporarily disabled till it gets fixed. tracked at /~https://github.com/apache/incubator-mxnet/issues/14241") def test_sgld(): opt1 = PySGLD opt2 = mx.optimizer.SGLD @@ -518,7 +517,9 @@ def compare_optimizer_noise_seeded(opt1, opt2, shape, dtype, noise_seed, if (dtype == np.float16 and ('multi_precision' not in kwarg or not kwarg['multi_precision'])): continue - compare_optimizer_noise_seeded(opt1(**kwarg), opt2(**kwarg), shape, dtype, seed) + atol = 1e-2 if dtype == np.float16 else 1e-3 + rtol = 1e-4 if dtype == np.float16 else 1e-5 + compare_optimizer_noise_seeded(opt1(**kwarg), opt2(**kwarg), shape, dtype, seed, atol=atol, rtol=rtol)