Skip to content

Commit 6be19d2

Browse files
authored
Moved tests out of run_in_graph_and_eager_mode in novograd (#1605)
1 parent 7c432f3 commit 6be19d2

File tree

1 file changed

+48
-45
lines changed

1 file changed

+48
-45
lines changed

tensorflow_addons/optimizers/tests/novograd_test.py

Lines changed: 48 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -15,35 +15,58 @@
1515
"""Tests for NovoGrad Optimizer."""
1616

1717
import numpy as np
18+
import pytest
1819
import tensorflow as tf
1920

2021
from tensorflow_addons.optimizers import NovoGrad
2122
from tensorflow_addons.utils import test_utils
2223

2324

24-
@test_utils.run_all_in_graph_and_eager_modes
25-
class NovoGradTest(tf.test.TestCase):
26-
def run_dense_sample(self, iterations, expected, optimizer):
27-
var_0 = tf.Variable([1.0, 2.0], dtype=tf.dtypes.float32)
28-
var_1 = tf.Variable([3.0, 4.0], dtype=tf.dtypes.float32)
25+
def run_dense_sample(iterations, expected, optimizer):
26+
var_0 = tf.Variable([1.0, 2.0], dtype=tf.dtypes.float32)
27+
var_1 = tf.Variable([3.0, 4.0], dtype=tf.dtypes.float32)
2928

30-
grad_0 = tf.constant([0.1, 0.2], dtype=tf.dtypes.float32)
31-
grad_1 = tf.constant([0.3, 0.4], dtype=tf.dtypes.float32)
29+
grad_0 = tf.constant([0.1, 0.2], dtype=tf.dtypes.float32)
30+
grad_1 = tf.constant([0.3, 0.4], dtype=tf.dtypes.float32)
3231

33-
grads_and_vars = list(zip([grad_0, grad_1], [var_0, var_1]))
32+
grads_and_vars = list(zip([grad_0, grad_1], [var_0, var_1]))
3433

35-
if tf.executing_eagerly():
36-
for _ in range(iterations):
37-
optimizer.apply_gradients(grads_and_vars)
38-
else:
39-
update = optimizer.apply_gradients(grads_and_vars)
40-
self.evaluate(tf.compat.v1.global_variables_initializer())
41-
for _ in range(iterations):
42-
self.evaluate(update)
34+
for _ in range(iterations):
35+
optimizer.apply_gradients(grads_and_vars)
4336

44-
self.assertAllClose(var_0.read_value(), expected[0], atol=2e-4)
45-
self.assertAllClose(var_1.read_value(), expected[1], atol=2e-4)
37+
np.testing.assert_allclose(var_0.read_value(), expected[0], atol=2e-4)
38+
np.testing.assert_allclose(var_1.read_value(), expected[1], atol=2e-4)
39+
40+
41+
@pytest.mark.usefixtures("maybe_run_functions_eagerly")
42+
def test_dense_sample():
43+
run_dense_sample(
44+
iterations=1,
45+
expected=[[0.9552786425, 1.9105572849], [2.9400000012, 3.9200000016]],
46+
optimizer=NovoGrad(lr=0.1, epsilon=1e-8),
47+
)
48+
49+
50+
@pytest.mark.usefixtures("maybe_run_functions_eagerly")
51+
def test_dense_sample_with_weight_decay():
52+
run_dense_sample(
53+
iterations=1,
54+
expected=[[0.945278642, 1.8905572849], [2.9100000012, 3.8800000016]],
55+
optimizer=NovoGrad(lr=0.1, weight_decay=0.1, epsilon=1e-8),
56+
)
57+
58+
59+
@pytest.mark.usefixtures("maybe_run_functions_eagerly")
60+
def test_dense_sample_with_grad_averaging():
61+
run_dense_sample(
62+
iterations=2,
63+
expected=[[0.9105572849, 1.8211145698], [2.8800000024, 3.8400000032]],
64+
optimizer=NovoGrad(lr=0.1, grad_averaging=True, epsilon=1e-8),
65+
)
4666

67+
68+
@test_utils.run_all_in_graph_and_eager_modes
69+
class NovoGradTest(tf.test.TestCase):
4770
def run_sparse_sample(self, iterations, expected, optimizer):
4871
var_0 = tf.Variable([1.0, 2.0])
4972
var_1 = tf.Variable([3.0, 4.0])
@@ -69,41 +92,20 @@ def run_sparse_sample(self, iterations, expected, optimizer):
6992
self.assertAllClose(var_0.read_value(), expected[0], atol=2e-4)
7093
self.assertAllClose(var_1.read_value(), expected[1], atol=2e-4)
7194

72-
def test_dense_sample(self):
73-
self.run_dense_sample(
74-
iterations=1,
75-
expected=[[0.9552786425, 1.9105572849], [2.9400000012, 3.9200000016]],
76-
optimizer=NovoGrad(lr=0.1, epsilon=1e-8),
77-
)
78-
7995
def test_sparse_sample(self):
8096
self.run_sparse_sample(
8197
iterations=1,
8298
expected=[[0.9552786425, 1.9105572849], [2.9400000012, 3.9200000016]],
8399
optimizer=NovoGrad(lr=0.1, epsilon=1e-8),
84100
)
85101

86-
def test_dense_sample_with_weight_decay(self):
87-
self.run_dense_sample(
88-
iterations=1,
89-
expected=[[0.945278642, 1.8905572849], [2.9100000012, 3.8800000016]],
90-
optimizer=NovoGrad(lr=0.1, weight_decay=0.1, epsilon=1e-8),
91-
)
92-
93102
def test_sparse_sample_with_weight_decay(self):
94103
self.run_sparse_sample(
95104
iterations=1,
96105
expected=[[0.945278642, 1.8905572849], [2.9100000012, 3.8800000016]],
97106
optimizer=NovoGrad(lr=0.1, weight_decay=0.1, epsilon=1e-8),
98107
)
99108

100-
def test_dense_sample_with_grad_averaging(self):
101-
self.run_dense_sample(
102-
iterations=2,
103-
expected=[[0.9105572849, 1.8211145698], [2.8800000024, 3.8400000032]],
104-
optimizer=NovoGrad(lr=0.1, grad_averaging=True, epsilon=1e-8),
105-
)
106-
107109
def test_sparse_sample_with_grad_averaging(self):
108110
self.run_sparse_sample(
109111
iterations=2,
@@ -132,9 +134,10 @@ def test_fit_simple_linear_model(self):
132134
max_abs_diff = np.max(np.abs(predicted - y))
133135
self.assertLess(max_abs_diff, 1e-2)
134136

135-
def test_get_config(self):
136-
opt = NovoGrad(lr=1e-4, weight_decay=0.0, grad_averaging=False)
137-
config = opt.get_config()
138-
self.assertEqual(config["learning_rate"], 1e-4)
139-
self.assertEqual(config["weight_decay"], 0.0)
140-
self.assertEqual(config["grad_averaging"], False)
137+
138+
def test_get_config():
139+
opt = NovoGrad(lr=1e-4, weight_decay=0.0, grad_averaging=False)
140+
config = opt.get_config()
141+
assert config["learning_rate"] == 1e-4
142+
assert config["weight_decay"] == 0.0
143+
assert config["grad_averaging"] is False

0 commit comments

Comments
 (0)