Skip to content

Commit

Permalink
[ NEURALNET ] change the loss scale property to Rigid Property
Browse files Browse the repository at this point in the history
Loss Scale is more like Rigid Property of model, rather than flexible
property.

Resolves:

**Self evaluation:**
1. Build test:	 [X]Passed [ ]Failed [ ]Skipped
2. Run test:	 [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: jijoong.moon <[email protected]>
  • Loading branch information
jijoongmoon committed May 2, 2024
1 parent 47ef55f commit cb0f83f
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 14 deletions.
3 changes: 1 addition & 2 deletions nntrainer/layers/layer_context.h
Original file line number Diff line number Diff line change
Expand Up @@ -172,8 +172,7 @@ class InitLayerContext {
/**
* @brief Request a new weight for the layer
*
* @param dim_v dimension of Variagble of the weight
* @param dim_g dimension of Gradient of the weight
* @param dim dimension of Variable of the weight
* @param init initializer for the weight
* @param reg regularizer for the weight
* @param reg_const regularization constant for the weight
Expand Down
13 changes: 7 additions & 6 deletions nntrainer/models/neuralnet.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -65,12 +65,13 @@
namespace nntrainer {

NeuralNetwork::NeuralNetwork() :
model_props(props::LossType(), {}, {}, props::ClipGradByGlobalNorm()),
model_props(props::LossType(), {}, {}, props::ClipGradByGlobalNorm(),
props::LossScale()),
model_flex_props(
props::Epochs(), props::TrainingBatchSize(), props::SavePath(),
props::ContinueTrain(), props::SaveBestPath(), props::MemoryOptimization(),
props::MemorySwap(), props::MemorySwapPath(), props::MemorySwapLookahead(),
props::TensorFormat(), props::ModelTensorDataType(), props::LossScale()),
props::TensorFormat(), props::ModelTensorDataType()),
load_path(std::string()),
epoch_idx(0),
iter(0),
Expand All @@ -83,12 +84,13 @@ NeuralNetwork::NeuralNetwork() :
}

NeuralNetwork::NeuralNetwork(AppContext app_context_) :
model_props(props::LossType(), {}, {}, props::ClipGradByGlobalNorm()),
model_props(props::LossType(), {}, {}, props::ClipGradByGlobalNorm(),
props::LossScale()),
model_flex_props(
props::Epochs(), props::TrainingBatchSize(), props::SavePath(),
props::ContinueTrain(), props::SaveBestPath(), props::MemoryOptimization(),
props::MemorySwap(), props::MemorySwapPath(), props::MemorySwapLookahead(),
props::TensorFormat(), props::ModelTensorDataType(), props::LossScale()),
props::TensorFormat(), props::ModelTensorDataType()),
load_path(std::string()),
epoch_idx(0),
iter(0),
Expand Down Expand Up @@ -179,9 +181,8 @@ int NeuralNetwork::compile() {
const std::string tensor_type =
to_string(std::get<props::ModelTensorDataType>(model_flex_props));

const float loss_scale = std::get<props::LossScale>(model_flex_props);
model_graph = NetworkGraph(memory_swap, memory_swap_path, lookahead,
tensor_format, tensor_type, loss_scale);
tensor_format, tensor_type);

model_graph.setMemoryOptimizations(
std::get<props::MemoryOptimization>(model_flex_props));
Expand Down
14 changes: 8 additions & 6 deletions nntrainer/models/neuralnet.h
Original file line number Diff line number Diff line change
Expand Up @@ -624,14 +624,16 @@ s * @retval shared_ptr<const Tensor>
const std::string file_path) override;

private:
using FlexiblePropTypes = std::tuple<
props::Epochs, props::TrainingBatchSize, props::SavePath,
props::ContinueTrain, props::SaveBestPath, props::MemoryOptimization,
props::MemorySwap, props::MemorySwapPath, props::MemorySwapLookahead,
props::TensorFormat, props::ModelTensorDataType, props::LossScale>;
using FlexiblePropTypes =
std::tuple<props::Epochs, props::TrainingBatchSize, props::SavePath,
props::ContinueTrain, props::SaveBestPath,
props::MemoryOptimization, props::MemorySwap,
props::MemorySwapPath, props::MemorySwapLookahead,
props::TensorFormat, props::ModelTensorDataType>;
using RigidPropTypes =
std::tuple<props::LossType, std::vector<props::InputConnection>,
std::vector<props::LabelLayer>, props::ClipGradByGlobalNorm>;
std::vector<props::LabelLayer>, props::ClipGradByGlobalNorm,
props::LossScale>;

RigidPropTypes model_props; /**< model props */
FlexiblePropTypes model_flex_props; /**< model train props */
Expand Down

0 comments on commit cb0f83f

Please sign in to comment.