From d43f5b03c74d2963272bbf89b8520b43f84dd8e6 Mon Sep 17 00:00:00 2001 From: seidnerj Date: Fri, 24 May 2024 22:22:09 +0300 Subject: [PATCH 1/7] make Model2onnx agnostic to the type of model used by tensorflow (and not limit it to .h5 models) --- mltu/tensorflow/callbacks.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/mltu/tensorflow/callbacks.py b/mltu/tensorflow/callbacks.py index ea9479e..954b002 100644 --- a/mltu/tensorflow/callbacks.py +++ b/mltu/tensorflow/callbacks.py @@ -1,5 +1,6 @@ import os import tensorflow as tf +from pathlib import Path from keras.callbacks import Callback import logging @@ -14,7 +15,7 @@ def __init__( ) -> None: """ Converts the model to onnx format after training is finished. Args: - saved_model_path (str): Path to the saved .h5 model. + saved_model_path (str): Path to the saved model. metadata (dict, optional): Dictionary containing metadata to be added to the onnx model. Defaults to None. save_on_epoch_end (bool, optional): Save the onnx model on every epoch end. Defaults to False. """ @@ -73,7 +74,7 @@ def on_epoch_end(self, epoch: int, logs: dict=None): def on_train_end(self, logs=None): """ Converts the model to onnx format after training is finished. """ self.model.load_weights(self.saved_model_path) - onnx_model_path = self.saved_model_path.replace(".h5", ".onnx") + onnx_model_path = str(Path(self.saved_model_path).with_suffix('.onnx')) self.model2onnx(self.model, onnx_model_path) self.include_metadata(onnx_model_path, self.metadata) From a0863704ab5e482bec75abe35eeda14aa0018b68 Mon Sep 17 00:00:00 2001 From: seidnerj Date: Sun, 16 Mar 2025 18:21:04 +0200 Subject: [PATCH 2/7] Fixed issue with checking length on symbolic tensor --- mltu/tensorflow/transformer/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mltu/tensorflow/transformer/utils.py b/mltu/tensorflow/transformer/utils.py index 471f92e..461ae15 100644 --- a/mltu/tensorflow/transformer/utils.py +++ b/mltu/tensorflow/transformer/utils.py @@ -151,7 +151,7 @@ def update_state(self, y_true, y_pred, sample_weight=None): self.cer_accumulator.assign_add(tf.reduce_sum(distance)) # Increment the batch_counter by the batch size - self.batch_counter.assign_add(len(y_true)) + self.batch_counter.assign_add(y_true.shape[0]) def result(self): """ Computes and returns the metric result. From 9fbd7dd3a90485bf140122f8200ffecfe032cac7 Mon Sep 17 00:00:00 2001 From: seidnerj Date: Sun, 16 Mar 2025 18:31:33 +0200 Subject: [PATCH 3/7] fixed signature --- mltu/preprocessors.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mltu/preprocessors.py b/mltu/preprocessors.py index cb65ca1..9d9bb29 100644 --- a/mltu/preprocessors.py +++ b/mltu/preprocessors.py @@ -18,7 +18,7 @@ class ImageReader: """Read image from path and return image and label""" - def __init__(self, image_class: Image, log_level: int = logging.INFO, ) -> None: + def __init__(self, image_class: type[Image], log_level: int = logging.INFO) -> None: self.logger = logging.getLogger(self.__class__.__name__) self.logger.setLevel(log_level) self._image_class = image_class From 35b7669c478f19cc92d97d4f4ef581b2d8a362c5 Mon Sep 17 00:00:00 2001 From: seidnerj Date: Sun, 16 Mar 2025 20:35:26 +0200 Subject: [PATCH 4/7] fixed signature --- mltu/preprocessors.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/mltu/preprocessors.py b/mltu/preprocessors.py index 9d9bb29..206aab2 100644 --- a/mltu/preprocessors.py +++ b/mltu/preprocessors.py @@ -6,6 +6,8 @@ import matplotlib import logging +from typing import Type + from . import Image from mltu.annotations.audio import Audio From 1651e4055bd4f972d100e794ece8ce816036ff2a Mon Sep 17 00:00:00 2001 From: seidnerj Date: Sun, 16 Mar 2025 20:35:41 +0200 Subject: [PATCH 5/7] fixed signature --- mltu/preprocessors.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mltu/preprocessors.py b/mltu/preprocessors.py index 206aab2..acebe7d 100644 --- a/mltu/preprocessors.py +++ b/mltu/preprocessors.py @@ -20,7 +20,7 @@ class ImageReader: """Read image from path and return image and label""" - def __init__(self, image_class: type[Image], log_level: int = logging.INFO) -> None: + def __init__(self, image_class: Type[Image], log_level: int = logging.INFO) -> None: self.logger = logging.getLogger(self.__class__.__name__) self.logger.setLevel(log_level) self._image_class = image_class From f82c9508e77dea0ed0095a1e3d2b7fac5debc107 Mon Sep 17 00:00:00 2001 From: seidnerj Date: Sun, 16 Mar 2025 20:57:19 +0200 Subject: [PATCH 6/7] fixed Model2onnx callback --- mltu/tensorflow/callbacks.py | 38 +++++++++++++++--------------------- 1 file changed, 16 insertions(+), 22 deletions(-) diff --git a/mltu/tensorflow/callbacks.py b/mltu/tensorflow/callbacks.py index 954b002..cd8f058 100644 --- a/mltu/tensorflow/callbacks.py +++ b/mltu/tensorflow/callbacks.py @@ -36,35 +36,29 @@ def __init__( @staticmethod def model2onnx(model: tf.keras.Model, onnx_model_path: str): - try: - import tf2onnx - - # convert the model to onnx format - tf2onnx.convert.from_keras(model, output_path=onnx_model_path) + import tf2onnx - except Exception as e: - print(e) + # convert the model to onnx format + input_signature = [tf.TensorSpec(model.inputs[0].shape, model.inputs[0].dtype, name='digit')] + tf2onnx.convert.from_keras(model, input_signature=input_signature, opset=13, output_path=onnx_model_path) @staticmethod def include_metadata(onnx_model_path: str, metadata: dict=None): - try: - if metadata and isinstance(metadata, dict): + if metadata and isinstance(metadata, dict): - import onnx - # Load the ONNX model - onnx_model = onnx.load(onnx_model_path) + import onnx + # Load the ONNX model + onnx_model = onnx.load(onnx_model_path) - # Add the metadata dictionary to the model's metadata_props attribute - for key, value in metadata.items(): - meta = onnx_model.metadata_props.add() - meta.key = key - meta.value = str(value) + # Add the metadata dictionary to the model's metadata_props attribute + for key, value in metadata.items(): + meta = onnx_model.metadata_props.add() + meta.key = key + meta.value = str(value) - # Save the modified ONNX model - onnx.save(onnx_model, onnx_model_path) + # Save the modified ONNX model + onnx.save(onnx_model, onnx_model_path) - except Exception as e: - print(e) def on_epoch_end(self, epoch: int, logs: dict=None): """ Converts the model to onnx format on every epoch end. """ @@ -73,7 +67,7 @@ def on_epoch_end(self, epoch: int, logs: dict=None): def on_train_end(self, logs=None): """ Converts the model to onnx format after training is finished. """ - self.model.load_weights(self.saved_model_path) + self._model.load_weights(self.saved_model_path) onnx_model_path = str(Path(self.saved_model_path).with_suffix('.onnx')) self.model2onnx(self.model, onnx_model_path) self.include_metadata(onnx_model_path, self.metadata) From 0c388164f5402b03424d45fb8c2515c427a19678 Mon Sep 17 00:00:00 2001 From: seidnerj Date: Sun, 16 Mar 2025 21:12:00 +0200 Subject: [PATCH 7/7] fixed Model2onnx callback --- mltu/tensorflow/callbacks.py | 1 + 1 file changed, 1 insertion(+) diff --git a/mltu/tensorflow/callbacks.py b/mltu/tensorflow/callbacks.py index cd8f058..0c67e56 100644 --- a/mltu/tensorflow/callbacks.py +++ b/mltu/tensorflow/callbacks.py @@ -39,6 +39,7 @@ def model2onnx(model: tf.keras.Model, onnx_model_path: str): import tf2onnx # convert the model to onnx format + # NOTE: see here for more info https://github.com/keras-team/keras/issues/18430 input_signature = [tf.TensorSpec(model.inputs[0].shape, model.inputs[0].dtype, name='digit')] tf2onnx.convert.from_keras(model, input_signature=input_signature, opset=13, output_path=onnx_model_path)