Skip to content

Commit

Permalink
[onert] Introduce backpropActivation to OperationUtils
Browse files Browse the repository at this point in the history
This PR adds backpropActivation function to the Operation Utility.
The function is to call proper ckernel according to an ir::Activation.

ONE-DCO-1.0-Signed-off-by: SeungHui Youn <[email protected]>
  • Loading branch information
zetwhite committed Jan 17, 2024
1 parent e74df47 commit f77ea01
Show file tree
Hide file tree
Showing 2 changed files with 71 additions and 2 deletions.
4 changes: 2 additions & 2 deletions runtime/onert/backend/train/ops/ElementwiseActivationLayer.cc
Original file line number Diff line number Diff line change
Expand Up @@ -78,8 +78,8 @@ void ElementwiseActivationLayer::configure(const IPortableTensor *input, IPortab
}
else
{
throw std::runtime_error("train ElementwiseActivationLayer : This layer does not "
"suppport other ReLU except for ReLU(0-inf) and ReLU6(0-6)");
throw std::runtime_error(
"train ElementwiseActivationLayer : Unsupported ReLU activation type");
}
}
else
Expand Down
69 changes: 69 additions & 0 deletions runtime/onert/backend/train/ops/OperationUtils.cc
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
/*
* Copyright (c) 2024 Samsung Electronics Co., Ltd. All Rights Reserved
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

#include "OperationUtils.h"

#include <cker/train/operation/ReLU.h>
#include <cker/train/operation/ReLU6.h>

namespace onert
{
namespace backend
{
namespace train
{
namespace ops
{

const IPortableTensor *backpropActivation(const ir::Activation &activation,
const IPortableTensor *output,
const IPortableTensor *input_backprop,
IPortableTensor *output_backprop)
{
if(activation == ir::Activation::NONE)
assert(output_backprop != nullptr);

const IPortableTensor *res;
switch (activation)
{
case ir::Activation::NONE:
res = input_backprop;
break;
case ir::Activation::RELU:
nnfw::cker::train::ReLUGrad(getShape(output), getBuffer<float>(output),
getShape(input_backprop), getBuffer<float>(input_backprop),
getShape(output_backprop), getBuffer<float>(output_backprop));
res = output_backprop;
break;
case ir::Activation::RELU6:
nnfw::cker::train::ReLU6Grad(getShape(output), getBuffer<float>(output),
getShape(input_backprop), getBuffer<float>(input_backprop),
getShape(output_backprop), getBuffer<float>(output_backprop));
res = output_backprop;
break;
default:
throw std::runtime_error("Unsupported activation type yet");
}

return res;
}

} // namespace ops
} // namespace train
} // namespace backend
} // namespace onert


0 comments on commit f77ea01

Please sign in to comment.