Skip to content

Commit 0de6c2f

Browse files
authored
FIX: Use all keras optimizers (#273)
* FIX: Use all keras optimizers
1 parent 3e6b4b8 commit 0de6c2f

File tree

3 files changed

+4
-3
lines changed

3 files changed

+4
-3
lines changed

tensorflow_addons/image/dense_image_warp_test.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -204,7 +204,7 @@ def test_gradients_exist(self):
204204
interp = dense_image_warp(image, flows)
205205
loss = tf.math.reduce_mean(tf.math.square(interp - image))
206206

207-
optimizer = tf.optimizers.Adam(1.0)
207+
optimizer = tf.keras.optimizers.Adam(1.0)
208208
grad = tf.gradients(loss, [flows])
209209
opt_func = optimizer.apply_gradients(zip(grad, [flows]))
210210
init_op = tf.compat.v1.global_variables_initializer()

tensorflow_addons/layers/wrappers_test.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,8 @@ def test_weightnorm_conv2d(self):
6666

6767
model.add(tf.keras.layers.Activation('relu'))
6868
model.compile(
69-
optimizer=tf.optimizers.RMSprop(learning_rate=0.001), loss='mse')
69+
optimizer=tf.keras.optimizers.RMSprop(learning_rate=0.001),
70+
loss='mse')
7071
model.fit(
7172
np.random.random((2, 4, 4, 3)),
7273
np.random.random((2, 4, 4, 5)),

tensorflow_addons/optimizers/weight_decay_optimizers_test.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -269,7 +269,7 @@ class ExtendWithWeightDecayTest(SGDWTest):
269269
"""Verify that the factory function SGDW is the same as SGDW."""
270270

271271
optimizer = weight_decay_optimizers.extend_with_decoupled_weight_decay(
272-
tf.optimizers.SGD)
272+
tf.keras.optimizers.SGD)
273273

274274

275275
if __name__ == "__main__":

0 commit comments

Comments
 (0)