Skip to content

Commit

Permalink
fix: do not dealloc when use QNN
Browse files Browse the repository at this point in the history
  • Loading branch information
yirongjie committed Nov 27, 2024
1 parent 6e354d9 commit 7d9e392
Show file tree
Hide file tree
Showing 2 changed files with 57 additions and 51 deletions.
36 changes: 19 additions & 17 deletions src/Layer.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -239,23 +239,25 @@ class Layer {
break;
}
}
for (auto input_tensor : input_tensors) {
if ((activation_tensors_num.find(input_tensor->name()) != activation_tensors_num.end())) {
switch (Tensor::tensor_status) {
case TENSOR_STATIC_INIT: {
activation_tensors_num[input_tensor->name()] += 1;
break;
}
case TENSOR_STATIC_READY: {
activation_tensors_num[input_tensor->name()] -= 1;
break;
}
default: {
}
}
if (activation_tensors_num[input_tensor->name()] == 0 && activation_tensors[input_tensor->name()]->sequence() > 1) {
activation_tensors[input_tensor->name()]->dealloc();
// std::cout << input_tensor->name() << "|" << std::endl;
if (Backend::global_backends.size() == 1) {
for (auto input_tensor : input_tensors) {
if ((activation_tensors_num.find(input_tensor->name()) != activation_tensors_num.end())) {
switch (Tensor::tensor_status) {
case TENSOR_STATIC_INIT: {
activation_tensors_num[input_tensor->name()] += 1;
break;
}
case TENSOR_STATIC_READY: {
activation_tensors_num[input_tensor->name()] -= 1;
break;
}
default: {
}
}
if (activation_tensors_num[input_tensor->name()] == 0 && activation_tensors[input_tensor->name()]->sequence() > 1) {
activation_tensors[input_tensor->name()]->dealloc();
// std::cout << input_tensor->name() << "|" << std::endl;
}
}
}
}
Expand Down
72 changes: 38 additions & 34 deletions src/Tensor.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -202,23 +202,25 @@ Tensor &Tensor::getFunc(const std::string &suffix, const TensorFuncType type,
default: {
}
}
for (auto input_tensor : tensorPtrs) {
if (activation_tensors_num.find(input_tensor->name()) != activation_tensors_num.end()) {
switch (Tensor::tensor_status) {
case TENSOR_STATIC_INIT: {
activation_tensors_num[input_tensor->name()] += 1;
break;
}
case TENSOR_STATIC_READY: {
activation_tensors_num[input_tensor->name()] -= 1;
break;
}
default: {
}
}
if (activation_tensors_num[input_tensor->name()] == 0 && module_tensors[input_tensor->name()]->sequence() > 1) {
module_tensors[input_tensor->name()]->dealloc();
// std::cout << input_tensor->name() << " |F" << std::endl;
if (Backend::global_backends.size() == 1) {
for (auto input_tensor : tensorPtrs) {
if (activation_tensors_num.find(input_tensor->name()) != activation_tensors_num.end()) {
switch (Tensor::tensor_status) {
case TENSOR_STATIC_INIT: {
activation_tensors_num[input_tensor->name()] += 1;
break;
}
case TENSOR_STATIC_READY: {
activation_tensors_num[input_tensor->name()] -= 1;
break;
}
default: {
}
}
if (activation_tensors_num[input_tensor->name()] == 0 && module_tensors[input_tensor->name()]->sequence() > 1) {
module_tensors[input_tensor->name()]->dealloc();
// std::cout << input_tensor->name() << " |F" << std::endl;
}
}
}
}
Expand Down Expand Up @@ -290,23 +292,25 @@ std::vector<std::reference_wrapper<Tensor>> Tensor::getStaticFunc(vector<std::st
default: {
}
}
for (auto input_tensor : input_tensors) {
if (activation_tensors_num.find(input_tensor->name()) != activation_tensors_num.end()) {
switch (Tensor::tensor_status) {
case TENSOR_STATIC_INIT: {
activation_tensors_num[input_tensor->name()] += 1;
break;
}
case TENSOR_STATIC_READY: {
activation_tensors_num[input_tensor->name()] -= 1;
break;
}
default: {
}
}
if (activation_tensors_num[input_tensor->name()] == 0 && module_tensors[input_tensor->name()]->sequence() > 1) {
module_tensors[input_tensor->name()]->dealloc();
// std::cout << input_tensor->name() << " |S "<< std::endl;// << out_names[0] << std::endl;
if (Backend::global_backends.size() == 1) {
for (auto input_tensor : input_tensors) {
if (activation_tensors_num.find(input_tensor->name()) != activation_tensors_num.end()) {
switch (Tensor::tensor_status) {
case TENSOR_STATIC_INIT: {
activation_tensors_num[input_tensor->name()] += 1;
break;
}
case TENSOR_STATIC_READY: {
activation_tensors_num[input_tensor->name()] -= 1;
break;
}
default: {
}
}
if (activation_tensors_num[input_tensor->name()] == 0 && module_tensors[input_tensor->name()]->sequence() > 1) {
module_tensors[input_tensor->name()]->dealloc();
// std::cout << input_tensor->name() << " |S "<< std::endl;// << out_names[0] << std::endl;
}
}
}
}
Expand Down

0 comments on commit 7d9e392

Please sign in to comment.