Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[circle-mlir/tools-test] Introduce gen-onnx with Add models #14732

Merged
merged 1 commit into from
Feb 25, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions circle-mlir/circle-mlir/tools-test/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,4 @@
# check gtest is OK
add_subdirectory(check-gtest)
# generate onnx models
add_subdirectory(gen-onnx)
85 changes: 85 additions & 0 deletions circle-mlir/circle-mlir/tools-test/gen-onnx/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
# python3 venv folder
# NOTE Docker image for CI doesn't use venv
set(VENV_PATH "${CMAKE_SOURCE_DIR}/infra/overlay/venv")

# Copy test scripts
unset(GEN_SCRIPT_DEPS )
macro(COPY_SCRIPT FILENAME)
set(SCRIPT_SRC "${CMAKE_CURRENT_SOURCE_DIR}/${FILENAME}")
set(SCRIPT_DST "${CMAKE_CURRENT_BINARY_DIR}/${FILENAME}")
add_custom_command(
OUTPUT ${SCRIPT_DST}
COMMAND ${CMAKE_COMMAND} -E copy "${SCRIPT_SRC}" "${SCRIPT_DST}"
DEPENDS ${SCRIPT_SRC}
COMMENT "gen-onnx: prepare ${FILENAME}"
)
list(APPEND GEN_SCRIPT_DEPS "${SCRIPT_DST}")
endmacro(COPY_SCRIPT)

COPY_SCRIPT(run_gen_onnx.sh)
COPY_SCRIPT(run_gen_onnx.py)

# Models folder for unit testing
set(PYTORCH_UNIT_PATH "${CMAKE_SOURCE_DIR}/models/unit")
set(PYTORCH_NET_PATH "${CMAKE_SOURCE_DIR}/models/net")
Comment on lines +23 to +24
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is models/net going to be introduced later?

I'd like to understand the difference between units and net. How do we determine which module belongs to units or net?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is models/net going to be introduced later?

Yes

difference between units and net

unit is for single Op, net is for multiple Ops; a network for Ops.


# Pytorch script files
unset(PYTORCH_UNIT_ITEMS )
# gather all the names in /models/unit
file(GLOB PYTORCH_MODELS_ITEMS RELATIVE ${PYTORCH_UNIT_PATH} ${PYTORCH_UNIT_PATH}/*)
# for each name, if the item is a folder, add to the list
foreach(PYTORCH_ITEM IN ITEMS ${PYTORCH_MODELS_ITEMS})
if(IS_DIRECTORY ${PYTORCH_UNIT_PATH}/${PYTORCH_ITEM})
if(EXISTS ${PYTORCH_UNIT_PATH}/${PYTORCH_ITEM}/__init__.py)
list(APPEND PYTORCH_UNIT_ITEMS ${PYTORCH_ITEM})
endif()
endif()
endforeach()

unset(PYTORCH_MODELS_ITEMS)
unset(PYTORCH_NET_ITEMS )
# gather all the names in /models/net
file(GLOB PYTORCH_MODELS_ITEMS RELATIVE ${PYTORCH_NET_PATH} ${PYTORCH_NET_PATH}/*)
# for each name, if the item is a folder, add to the list
foreach(PYTORCH_ITEM IN ITEMS ${PYTORCH_MODELS_ITEMS})
if(IS_DIRECTORY ${PYTORCH_NET_PATH}/${PYTORCH_ITEM})
if(EXISTS ${PYTORCH_NET_PATH}/${PYTORCH_ITEM}/__init__.py)
list(APPEND PYTORCH_NET_ITEMS ${PYTORCH_ITEM})
endif()
endif()
endforeach()

unset(GEN_FILES_DEPS)
# for each items in the list, run run_gen_onnx to generate onnx model
foreach(PYTORCH_ITEM IN ITEMS ${PYTORCH_UNIT_ITEMS})
set(ONNX_FILE ${PYTORCH_ITEM}.onnx)
add_custom_command(OUTPUT ${ONNX_FILE}
COMMAND bash run_gen_onnx.sh
${VENV_PATH} ${PYTORCH_UNIT_PATH} ${PYTORCH_ITEM} ${ONNX_FILE}
DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/run_gen_onnx.sh
${CMAKE_CURRENT_BINARY_DIR}/run_gen_onnx.py
${PYTORCH_UNIT_PATH}/${PYTORCH_ITEM}/__init__.py
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
COMMENT "Generate ${ONNX_FILE}"
)
list(APPEND GEN_FILES_DEPS ${ONNX_FILE})
endforeach()

foreach(PYTORCH_ITEM IN ITEMS ${PYTORCH_NET_ITEMS})
set(ONNX_FILE ${PYTORCH_ITEM}.onnx)
add_custom_command(OUTPUT ${ONNX_FILE}
COMMAND bash run_gen_onnx.sh
${VENV_PATH} ${PYTORCH_NET_PATH} ${PYTORCH_ITEM} ${ONNX_FILE}
DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/run_gen_onnx.sh
${CMAKE_CURRENT_BINARY_DIR}/run_gen_onnx.py
${PYTORCH_NET_PATH}/${PYTORCH_ITEM}/__init__.py
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
COMMENT "Generate ${ONNX_FILE}"
)
list(APPEND GEN_FILES_DEPS ${ONNX_FILE})
endforeach()

# gen_onnx_target is used to make build dependency
add_custom_target(gen_onnx_target ALL DEPENDS ${GEN_SCRIPT_DEPS} ${GEN_FILES_DEPS})

set(GEN_ONNX_PATH ${CMAKE_CURRENT_BINARY_DIR} PARENT_SCOPE)
64 changes: 64 additions & 0 deletions circle-mlir/circle-mlir/tools-test/gen-onnx/run_gen_onnx.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
# Copyright (c) 2025 Samsung Electronics Co., Ltd. All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import torch
import importlib
import sys

from pathlib import Path


def generate_onnx(models_root, model_name, onnx_file):
sys.path.append(models_root)
module = importlib.import_module(model_name)

# default: refer https://github.com/pytorch/pytorch/blob/master/torch/onnx/utils.py
# and https://github.com/pytorch/pytorch/blob/master/torch/onnx/_constants.py
# and https://github.com/pytorch/pytorch/blob/master/tools/onnx/update_default_opset_version.py
opset_version = 14
if hasattr(module._model_, 'onnx_opset_version'):
opset_version = module._model_.onnx_opset_version()

m_keys = module.__dict__.keys()

if '_io_names_' in m_keys and '_dynamic_axes_' in m_keys:
# refer https://github.com/onnx/onnx/issues/654#issuecomment-521233285
# purpose of this is to set dynamic shape for inputs or inputs
# magic(?) is to set input/output names, and then set dyanmic shape by name/dim
# example) set output dim(0) as unknown
# _io_names_ = [['input'], ['output']]
# _dynamic_axes_ = {'output': {0: '?'}}
torch.onnx.export(module._model_,
module._inputs_,
onnx_file,
input_names=module._io_names_[0],
output_names=module._io_names_[1],
dynamic_axes=module._dynamic_axes_,
opset_version=opset_version)
else:
torch.onnx.export(module._model_,
module._inputs_,
onnx_file,
opset_version=opset_version)

if hasattr(module._model_, 'post_process'):
module._model_.post_process(onnx_file)


if __name__ == "__main__":
if len(sys.argv) != 4:
thispath = Path(sys.argv[0])
sys.exit("Usage: " + thispath.name + " [models_root] [model_name] [onnx_file]")

generate_onnx(sys.argv[1], sys.argv[2], sys.argv[3])
48 changes: 48 additions & 0 deletions circle-mlir/circle-mlir/tools-test/gen-onnx/run_gen_onnx.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
#!/bin/bash

# This script executes run_gen_onnx.py file to generate ONNX model
#
# HOW TO USE
#
# ./run_gen_onnx.sh <path/to/venv_dir> <path/to/models> <model_name> <onnx_name>
# venv_dir : python virtual environment home directory
# models : path where python modules exist
# model_name : name of model
# onnx_name : name of onnx file

THIS_SCRIPT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PY_SCRIPT_PATH="${THIS_SCRIPT_PATH}/run_gen_onnx.py"

VENV_PATH="$1"; shift
MODELS_ROOT_PATH="$1"; shift
MODEL_NAME="$1"; shift
ONNX_NAME="$1"; shift

PASSED_TAG="${ONNX_NAME}.passed"
GENERATE_LOG="${ONNX_NAME}.log"
rm -f "${PASSED_TAG}"

cat > "${GENERATE_LOG}" <(
exec 2>&1
set -ex

# NOTE enter venv if exist
if [[ -f "${VENV_PATH}/bin/activate" ]]; then
source "${VENV_PATH}/bin/activate"
fi

"python3" "${PY_SCRIPT_PATH}" "${MODELS_ROOT_PATH}" "${MODEL_NAME}" "${ONNX_NAME}"
if [[ $? -eq 0 ]]; then
touch "${PASSED_TAG}"
fi

if [[ -f "${VENV_PATH}/bin/activate" ]]; then
deactivate
fi
)

if [[ ! -f "${PASSED_TAG}" ]]; then
exit 255
fi
rm -f "${PASSED_TAG}"
exit 0
19 changes: 19 additions & 0 deletions circle-mlir/models/unit/Add_F32_R4/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
import torch


# Generate Add operator with Float32, Rank-4
class net_add(torch.nn.Module):
def __init__(self):
super().__init__()

def forward(self, inputs):
return torch.add(inputs[0], inputs[1])

def onnx_opset_version(self):
# TODO set version
return 10


_model_ = net_add()

_inputs_ = [torch.randn(1, 2, 3, 3), torch.randn(1, 2, 3, 3)]
22 changes: 22 additions & 0 deletions circle-mlir/models/unit/Add_F32_R4_C1/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import torch
import numpy as np


# Generate Add operator with Float32, Rank-4 with Constant input
class net_add(torch.nn.Module):
def __init__(self):
super().__init__()
rng = np.random.default_rng(seed=123)
self.C1 = torch.from_numpy(rng.random((1, 2, 3, 3), dtype=np.float32))

def forward(self, inputs):
return torch.add(inputs[0], self.C1)

def onnx_opset_version(self):
# TODO set version
return 10


_model_ = net_add()

_inputs_ = [torch.randn(1, 2, 3, 3)]
Loading