pytorch/caffe2/opt/dead_code_elim.cc
Nikita Shulga a9b0a921d5 Disable avoid-non-const-global-variables lint check (#62008)
Summary:
As GoogleTest `TEST` macro is non-compliant with it as well as `DEFINE_DISPATCH`

All changes but the ones to `.clang-tidy` are generated using following script:
```
for i in `find . -type f -iname "*.c*" -or -iname "*.h"|xargs grep cppcoreguidelines-avoid-non-const-global-variables|cut -f1 -d:|sort|uniq`;  do sed -i "/\/\/ NOLINTNEXTLINE(cppcoreguidelines-avoid-non-const-global-variables)/d" $i; done
```

Pull Request resolved: https://github.com/pytorch/pytorch/pull/62008

Reviewed By: driazati, r-barnes

Differential Revision: D29838584

Pulled By: malfet

fbshipit-source-id: 1b2f8602c945bd4ce50a9bfdd204755556e31d13
2021-07-22 18:04:40 -07:00

43 lines
1 KiB
C++

#include "caffe2/core/logging.h"
#include "caffe2/opt/converter.h"
#include "caffe2/opt/passes.h"
namespace caffe2 {
namespace opt {
using namespace nom;
using namespace nom::repr;
void deadCodeElim(NNModule* nn) {
// Iteratively remove unconsumed non-external outputs.
bool changed = false;
do {
changed = false;
for (const auto& node : nn->dataFlow.getMutableNodes()) {
NOM_REQUIRE_OR_CONT(nn::is<repr::NeuralNetOperator>(node));
bool isUsed = false;
for (const auto& output : nn::getOutputs(node)) {
if (nn::hasConsumer(output) || nn->outputs.count(output)) {
isUsed = true;
break;
}
}
NOM_REQUIRE_OR_CONT(!isUsed);
// No outputs are used, delete them and the node itself.
for (const auto& output : nn::getOutputs(node)) {
nn->dataFlow.deleteNode(output);
}
nn->dataFlow.deleteNode(node);
changed = true;
break;
}
} while (changed);
}
REGISTER_OPT_PASS_FROM_FUNC(DeadCodeElim, deadCodeElim);
} // namespace opt
} // namespace caffe2