From 0366eccd7e08e55bb402fe175c4c2a99bcca5c1c Mon Sep 17 00:00:00 2001 From: Ju Young Date: Mon, 7 Oct 2024 05:17:31 +0000 Subject: [PATCH 1/2] [train] Revise maxPool2D in training This commit change casting to static cast. ONE-DCO-1.0-Signed-off-by: JuYoung Lee rsb98759@gmail.com --- runtime/onert/backend/train/ops/PoolLayer.cc | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/runtime/onert/backend/train/ops/PoolLayer.cc b/runtime/onert/backend/train/ops/PoolLayer.cc index 66a696c2283..cefa87c6863 100644 --- a/runtime/onert/backend/train/ops/PoolLayer.cc +++ b/runtime/onert/backend/train/ops/PoolLayer.cc @@ -58,8 +58,10 @@ class MaxPool2D final : public TrainingKernelRegistry _op_params.stride_width = strideWidth; _op_params.filter_height = kernelHeight; _op_params.filter_width = kernelWidth; - _op_params.padding_values.height = (int8_t)paddingTop; - _op_params.padding_values.width = (int8_t)paddingLeft; + assert(paddingTop < (1 << 8)); + assert(paddingLeft < (1 << 8)); + _op_params.padding_values.height = static_cast(paddingTop); + _op_params.padding_values.width = static_cast(paddingLeft); CalculateActivationRange(activation, &_op_params.float_activation_min, &_op_params.float_activation_max); } From 67c203149d22a2cabf852d8cf7616ae65b416417 Mon Sep 17 00:00:00 2001 From: Ju Young Date: Wed, 9 Oct 2024 12:35:48 +0000 Subject: [PATCH 2/2] [train] update casting to int16 This commit updates padding type to int16 ONE-DCO-1.0-Signed-off-by: JuYoung Lee rsb98759@gmail.com --- runtime/onert/backend/train/ops/PoolLayer.cc | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/runtime/onert/backend/train/ops/PoolLayer.cc b/runtime/onert/backend/train/ops/PoolLayer.cc index cefa87c6863..098389d8f10 100644 --- a/runtime/onert/backend/train/ops/PoolLayer.cc +++ b/runtime/onert/backend/train/ops/PoolLayer.cc @@ -58,10 +58,10 @@ class MaxPool2D final : public TrainingKernelRegistry _op_params.stride_width = strideWidth; _op_params.filter_height = kernelHeight; _op_params.filter_width = kernelWidth; - assert(paddingTop < (1 << 8)); - assert(paddingLeft < (1 << 8)); - _op_params.padding_values.height = static_cast(paddingTop); - _op_params.padding_values.width = static_cast(paddingLeft); + assert(paddingTop < (1 << 15)); + assert(paddingLeft < (1 << 15)); + _op_params.padding_values.height = static_cast(paddingTop); + _op_params.padding_values.width = static_cast(paddingLeft); CalculateActivationRange(activation, &_op_params.float_activation_min, &_op_params.float_activation_max); }