Skip to content

Commit

Permalink
Expands Keras internal testing coverage for the new v3 saving format …
Browse files Browse the repository at this point in the history
…for common tests.

PiperOrigin-RevId: 527732364
  • Loading branch information
nkovela1 authored and tensorflower-gardener committed Apr 28, 2023
1 parent d72829a commit e7c4d09
Show file tree
Hide file tree
Showing 7 changed files with 208 additions and 67 deletions.
17 changes: 17 additions & 0 deletions keras/engine/deferred_sequential_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,23 @@ def test_feature_extraction(self):
# Check that inputs and outputs are connected
_ = extractor(np.random.random((4, 6)))

@test_combinations.run_all_keras_modes(always_skip_v1=True)
def test_saving_keras_v3(self):
model = get_model()
model(np.random.random((3, 6))) # Build model

path = os.path.join(self.get_temp_dir(), "model_path.keras")
model.save(path)
new_model = keras.models.load_model(path)
model_layers = model._flatten_layers(include_self=True, recursive=False)
new_model_layers = new_model._flatten_layers(
include_self=True, recursive=False
)
for layer1, layer2 in zip(model_layers, new_model_layers):
self.assertEqual(layer1.name, layer2.name)
for w1, w2 in zip(layer1.weights, layer2.weights):
self.assertAllClose(w1, w2)

@test_combinations.run_all_keras_modes(always_skip_v1=True)
def test_saving_savedmodel(self):
model = get_model()
Expand Down
23 changes: 22 additions & 1 deletion keras/engine/functional_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
from keras.engine import input_layer as input_layer_lib
from keras.engine import sequential
from keras.engine import training as training_lib
from keras.saving import object_registration
from keras.saving.legacy import save
from keras.testing_infra import test_combinations
from keras.testing_infra import test_utils
Expand Down Expand Up @@ -1875,7 +1876,7 @@ def test_external_keras_serialization_compat_input_layers(self):
test_combinations.combine(mode=["graph", "eager"])
)
@test_utils.run_v2_only
def test_save_load_with_single_elem_list_inputs(self):
def test_save_load_with_single_elem_list_inputs_saved_model(self):
class MyLayer(layers.Layer):
def __init__(self):
super().__init__()
Expand All @@ -1893,6 +1894,26 @@ def call(self, inputs):

save.load_model("/tmp/km2")

@test_utils.run_v2_only
def test_save_load_with_single_elem_list_inputs_keras_v3(self):
@object_registration.register_keras_serializable()
class MyLayer(layers.Layer):
def __init__(self):
super().__init__()
self._preserve_input_structure_in_config = True

def call(self, inputs):
return inputs[0]

inputs = input_layer_lib.Input(shape=(3,))
layer = MyLayer()
outputs = layer([inputs])

model = training_lib.Model(inputs=inputs, outputs=outputs)
model.save("/tmp/model.keras")

models.load_model("/tmp/model.keras")

@test_combinations.generate(
test_combinations.combine(mode=["graph", "eager"])
)
Expand Down
50 changes: 41 additions & 9 deletions keras/engine/functional_utils_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,11 +151,6 @@ def test_build_model_from_intermediate_tensor(self):
model.fit(
np.random.randn(batch_size, 32), np.random.randn(batch_size, 16)
)
# Test for model saving
output_path = os.path.join(self.get_temp_dir(), "tf_keras_saved_model")
model.save(output_path, save_format="tf")
loaded_model = models.load_model(output_path)
self.assertEqual(model.summary(), loaded_model.summary())

# Also make sure the original inputs and y can still be used to build
# model
Expand All @@ -167,6 +162,27 @@ def test_build_model_from_intermediate_tensor(self):
self.assertIs(new_model.layers[1], layer1)
self.assertIs(new_model.layers[2], layer2)

# Test for model saving
with self.subTest("savedmodel"):
output_path = os.path.join(
self.get_temp_dir(), "tf_keras_saved_model"
)
model.save(output_path, save_format="tf")
loaded_model = models.load_model(output_path)
self.assertEqual(model.summary(), loaded_model.summary())

with self.subTest("keras_v3"):
if not tf.__internal__.tf2.enabled():
self.skipTest(
"TF2 must be enabled to use the new `.keras` saving."
)
output_path = os.path.join(
self.get_temp_dir(), "tf_keras_v3_model.keras"
)
model.save(output_path, save_format="keras_v3")
loaded_model = models.load_model(output_path)
self.assertEqual(model.summary(), loaded_model.summary())

def test_build_model_from_intermediate_tensor_with_complicated_model(self):
# The topology is like below:
# input1 -> dense1 -> a
Expand Down Expand Up @@ -212,10 +228,6 @@ def test_build_model_from_intermediate_tensor_with_complicated_model(self):
],
np.random.randn(batch_size, 8),
)
output_path = os.path.join(self.get_temp_dir(), "tf_keras_saved_model")
model.save(output_path, save_format="tf")
loaded_model = models.load_model(output_path)
self.assertEqual(model.summary(), loaded_model.summary())

model2 = models.Model([a, b], d)
# 2 input layers and 2 Add layer.
Expand All @@ -230,6 +242,26 @@ def test_build_model_from_intermediate_tensor_with_complicated_model(self):
np.random.randn(batch_size, 8),
)

with self.subTest("savedmodel"):
output_path = os.path.join(
self.get_temp_dir(), "tf_keras_saved_model"
)
model.save(output_path, save_format="tf")
loaded_model = models.load_model(output_path)
self.assertEqual(model.summary(), loaded_model.summary())

with self.subTest("keras_v3"):
if not tf.__internal__.tf2.enabled():
self.skipTest(
"TF2 must be enabled to use the new `.keras` saving."
)
output_path = os.path.join(
self.get_temp_dir(), "tf_keras_v3_model.keras"
)
model.save(output_path, save_format="keras_v3")
loaded_model = models.load_model(output_path)
self.assertEqual(model.summary(), loaded_model.summary())


if __name__ == "__main__":
tf.test.main()
74 changes: 47 additions & 27 deletions keras/layers/core/core_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ def test_dropout_partial_noise_shape(self):
# Test that dropout mask is shared across second dim.
self.assertAllClose(out_np[:, 0, :], out_np[:, 1, :])

def test_dropout_with_savemodel(self):
def test_dropout_with_saving(self):
inputs = keras.Input(shape=(5, 10))
layer = keras.layers.Dropout(0.5, force_generator=True)
outputs = layer(inputs)
Expand All @@ -105,32 +105,52 @@ def test_dropout_with_savemodel(self):
# Make sure the layer does dropout value when training
self.assertNotAllClose(train, predict)

model.save(
os.path.join(self.get_temp_dir(), "savedmodel"), save_format="tf"
)
loaded_model = keras.models.load_model(
os.path.join(self.get_temp_dir(), "savedmodel")
)
predict2 = loaded_model(np.ones((20, 5, 10)))

self.assertAllClose(predict, predict2)
# Make sure the model dropout different value after loading
train2 = loaded_model(np.ones((20, 5, 10)), training=True)
self.assertNotAllClose(train, train2)
self.assertIsNotNone(loaded_model.layers[1]._random_generator)

# Also make sure the checkpoint doesn't contain any variable from the
# dropout layer, to keep the backward compatibility.
checkpoint = tf.train.Checkpoint(model)
save_path = checkpoint.save(
os.path.join(self.get_temp_dir(), "checkpoint")
)
checkpoint_var_names = [
name_value_tuple[0]
for name_value_tuple in tf.train.list_variables(save_path)
]
for name in checkpoint_var_names:
self.assertNotIn("dropout", name)
with self.subTest("savedmodel"):
model.save(
os.path.join(self.get_temp_dir(), "savedmodel"),
save_format="tf",
)
loaded_model = keras.models.load_model(
os.path.join(self.get_temp_dir(), "savedmodel")
)
predict2 = loaded_model(np.ones((20, 5, 10)))

self.assertAllClose(predict, predict2)
# Make sure the model dropout different value after loading
train2 = loaded_model(np.ones((20, 5, 10)), training=True)
self.assertNotAllClose(train, train2)
self.assertIsNotNone(loaded_model.layers[1]._random_generator)

with self.subTest("keras_v3"):
if not tf.__internal__.tf2.enabled():
self.skipTest(
"TF2 must be enabled to use the new `.keras` saving."
)
model.save(os.path.join(self.get_temp_dir(), "model.keras"))
loaded_model = keras.models.load_model(
os.path.join(self.get_temp_dir(), "model.keras")
)
predict2 = loaded_model(np.ones((20, 5, 10)))

self.assertAllClose(predict, predict2)
# Make sure the model dropout different value after loading
train2 = loaded_model(np.ones((20, 5, 10)), training=True)
self.assertNotAllClose(train, train2)
self.assertIsNotNone(loaded_model.layers[1]._random_generator)

with self.subTest("checkpoint"):
# Also make sure the checkpoint doesn't contain any variable from
# the dropout layer, to keep the backward compatibility.
checkpoint = tf.train.Checkpoint(model)
save_path = checkpoint.save(
os.path.join(self.get_temp_dir(), "checkpoint")
)
checkpoint_var_names = [
name_value_tuple[0]
for name_value_tuple in tf.train.list_variables(save_path)
]
for name in checkpoint_var_names:
self.assertNotIn("dropout", name)


@test_combinations.run_all_keras_modes
Expand Down
19 changes: 14 additions & 5 deletions keras/models/sharpness_aware_minimization_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,12 +109,21 @@ def test_save_sam(self):

sam_model.fit(data, label)

path = os.path.join(self.get_temp_dir(), "model")
sam_model.save(path)
loaded_sam_model = keras.models.load_model(path)
loaded_sam_model.load_weights(path)
with self.subTest("savedmodel"):
path = os.path.join(self.get_temp_dir(), "model")
sam_model.save(path)
loaded_sam_model = keras.models.load_model(path)
loaded_sam_model.load_weights(path)

self.assertAllClose(sam_model(data), loaded_sam_model(data))
self.assertAllClose(sam_model(data), loaded_sam_model(data))

with self.subTest("keras_v3"):
path = os.path.join(self.get_temp_dir(), "model.keras")
sam_model.save(path)
loaded_sam_model = keras.models.load_model(path)
loaded_sam_model.load_weights(path)

self.assertAllClose(sam_model(data), loaded_sam_model(data))

def test_checkpoint_sam(self):
model = keras.Sequential(
Expand Down
11 changes: 11 additions & 0 deletions keras/optimizers/optimizer_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -527,6 +527,17 @@ def testSaveAndLoadOptimizerWithModel(self, optimizer_fn):
loaded_optimizer.build(loaded_model.trainable_variables)
self.assertAllClose(optimizer.variables, loaded_optimizer.variables)

# Save in `.keras` format.
path = os.path.join(self.get_temp_dir(), "model.keras")
model.save(path)
loaded_model = keras.models.load_model(path)
loaded_model.load_weights(path)
loaded_optimizer = loaded_model.optimizer
self.assertEqual(type(optimizer), type(loaded_optimizer))
self.assertEqual(loaded_optimizer.learning_rate, 0.002)
self.assertEqual(loaded_optimizer.clipnorm, 0.1)
self.assertAllClose(optimizer.variables, loaded_optimizer.variables)

@parameterized.product(optimizer_fn=OPTIMIZER_FN)
def testSparseGradientsWorkAsExpected(self, optimizer_fn):
optimizer_1 = optimizer_fn()
Expand Down
Loading

0 comments on commit e7c4d09

Please sign in to comment.