From f4fd1063a71d408b10b79c97793753f3b659cd3e Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 11 Oct 2022 13:52:43 -0700 Subject: [PATCH] Add helper methods to load saved model from external files in model maker. PiperOrigin-RevId: 480444918 --- .../python/core/utils/model_util.py | 26 +++++++++++++++++++ .../python/core/utils/model_util_test.py | 19 +++++++++++--- .../python/core/utils/test_util.py | 18 +++++++++++++ 3 files changed, 59 insertions(+), 4 deletions(-) diff --git a/mediapipe/model_maker/python/core/utils/model_util.py b/mediapipe/model_maker/python/core/utils/model_util.py index 4914fea57..8962f2868 100644 --- a/mediapipe/model_maker/python/core/utils/model_util.py +++ b/mediapipe/model_maker/python/core/utils/model_util.py @@ -26,6 +26,7 @@ from typing import Any, Callable, Dict, List, Optional, Text, Tuple, Union import numpy as np import tensorflow as tf +# resources dependency from mediapipe.model_maker.python.core.data import dataset from mediapipe.model_maker.python.core.utils import quantization @@ -33,6 +34,31 @@ DEFAULT_SCALE, DEFAULT_ZERO_POINT = 0, 0 ESTIMITED_STEPS_PER_EPOCH = 1000 +def load_keras_model(model_path: str, + compile_on_load: bool = False) -> tf.keras.Model: + """Loads a tensorflow Keras model from file and returns the Keras model. + + Args: + model_path: Relative path to a directory containing model data, such as + /saved_model/. + compile_on_load: Whether the model should be compiled while loading. If + False, the model returned has to be compiled with the appropriate loss + function and custom metrics before running for inference on a test + dataset. + + Returns: + A tensorflow Keras model. + """ + # Extract the file path before mediapipe/ as the `base_dir`. By joining it + # with the `model_path` which defines the relative path under mediapipe/, it + # yields to the aboslution path of the model files directory. + cwd = os.path.dirname(__file__) + base_dir = cwd[:cwd.rfind('mediapipe')] + absolute_path = os.path.join(base_dir, model_path) + return tf.keras.models.load_model( + absolute_path, custom_objects={'tf': tf}, compile=compile_on_load) + + def get_steps_per_epoch(steps_per_epoch: Optional[int] = None, batch_size: Optional[int] = None, train_data: Optional[dataset.Dataset] = None) -> int: diff --git a/mediapipe/model_maker/python/core/utils/model_util_test.py b/mediapipe/model_maker/python/core/utils/model_util_test.py index 9c3908841..ce31c1877 100644 --- a/mediapipe/model_maker/python/core/utils/model_util_test.py +++ b/mediapipe/model_maker/python/core/utils/model_util_test.py @@ -15,7 +15,6 @@ import os from absl.testing import parameterized -import numpy as np import tensorflow as tf from mediapipe.model_maker.python.core.utils import model_util @@ -25,6 +24,18 @@ from mediapipe.model_maker.python.core.utils import test_util class ModelUtilTest(tf.test.TestCase, parameterized.TestCase): + def test_load_model(self): + input_dim = 4 + model = test_util.build_model(input_shape=[input_dim], num_classes=2) + saved_model_path = os.path.join(self.get_temp_dir(), 'saved_model') + model.save(saved_model_path) + loaded_model = model_util.load_keras_model(saved_model_path) + + input_tensors = test_util.create_random_sample(size=[1, input_dim]) + model_output = model.predict_on_batch(input_tensors) + loaded_model_output = loaded_model.predict_on_batch(input_tensors) + self.assertTrue((model_output == loaded_model_output).all()) + @parameterized.named_parameters( dict( testcase_name='input_only_steps_per_epoch', @@ -124,9 +135,9 @@ class ModelUtilTest(tf.test.TestCase, parameterized.TestCase): input_dim: int, max_input_value: int = 1000, atol: float = 1e-04): - np.random.seed(0) - random_input = np.random.uniform( - low=0, high=max_input_value, size=(1, input_dim)).astype(np.float32) + random_input = test_util.create_random_sample( + size=[1, input_dim], high=max_input_value) + random_input = tf.convert_to_tensor(random_input) self.assertTrue( test_util.is_same_output( diff --git a/mediapipe/model_maker/python/core/utils/test_util.py b/mediapipe/model_maker/python/core/utils/test_util.py index eb2952dd3..cac2a0e1f 100644 --- a/mediapipe/model_maker/python/core/utils/test_util.py +++ b/mediapipe/model_maker/python/core/utils/test_util.py @@ -46,6 +46,24 @@ def create_dataset(data_size: int, return dataset +def create_random_sample(size: Union[int, List[int]], + low: float = 0, + high: float = 1) -> np.ndarray: + """Creates and returns a random sample with floating point values. + + Args: + size: Size of the output multi-dimensional array. + low: Lower boundary of the output values. + high: Higher boundary of the output values. + + Returns: + 1D array if the size is scalar. Otherwise, N-D array whose dimension equals + input size. + """ + np.random.seed(0) + return np.random.uniform(low=low, high=high, size=size).astype(np.float32) + + def build_model(input_shape: List[int], num_classes: int) -> tf.keras.Model: """Builds a simple Keras model for test.""" inputs = tf.keras.layers.Input(shape=input_shape)