pytorch/caffe2/core/net_async_task.cc
Nikita Shulga 4cb534f92e Make PyTorch code-base clang-tidy compliant (#56892)
Summary:
This is an automatic change generated by the following script:
```
#!/usr/bin/env python3
from subprocess import check_output, check_call
import os

def get_compiled_files_list():
    import json
    with open("build/compile_commands.json") as f:
        data = json.load(f)
    files = [os.path.relpath(node['file']) for node in data]
    for idx, fname in enumerate(files):
        if fname.startswith('build/') and fname.endswith('.DEFAULT.cpp'):
            files[idx] = fname[len('build/'):-len('.DEFAULT.cpp')]
    return files

def run_clang_tidy(fname):
    check_call(["python3", "tools/clang_tidy.py", "-c", "build", "-x", fname,"-s"])
    changes = check_output(["git", "ls-files", "-m"])
    if len(changes) == 0:
        return
    check_call(["git", "commit","--all", "-m", f"NOLINT stubs for {fname}"])

def main():
    git_files = check_output(["git", "ls-files"]).decode("ascii").split("\n")
    compiled_files = get_compiled_files_list()
    for idx, fname in enumerate(git_files):
        if fname not in compiled_files:
            continue
        if fname.startswith("caffe2/contrib/aten/"):
            continue
        print(f"[{idx}/{len(git_files)}] Processing {fname}")
        run_clang_tidy(fname)

if __name__ == "__main__":
    main()
```

Pull Request resolved: https://github.com/pytorch/pytorch/pull/56892

Reviewed By: H-Huang

Differential Revision: D27991944

Pulled By: malfet

fbshipit-source-id: 5415e1eb2c1b34319a4f03024bfaa087007d7179
2021-04-28 14:10:25 -07:00

109 lines
2.8 KiB
C++

#include "caffe2/core/net_async_task.h"
#include "caffe2/core/net_async_task_graph.h"
namespace caffe2 {
// NOLINTNEXTLINE(modernize-pass-by-value)
AsyncTask::AsyncTask(const std::vector<OperatorBase*>& ops) : ops_(ops) {
CAFFE_ENFORCE(!ops_.empty());
device_option_ = ops_.front()->device_option();
for (auto& op : ops_) {
CAFFE_ENFORCE(IsSameDevice(device_option_, op->device_option()));
}
Reset();
}
void AsyncTask::handleChainError(
OperatorBase* op,
const char* err_str,
bool save_exception) {
std::string err_msg = err_str;
if (op) {
err_msg += ", op " + (op->has_debug_def() ? op->type() : " unknown");
}
LOG(ERROR) << err_msg;
// save error message and exception in chain's Event
auto last_op = ops_.back();
if (save_exception) {
last_op->event().SetFinishedWithException(err_msg.c_str());
} else {
last_op->event().SetFinished(err_msg.c_str());
}
// set future as completed with an error
// TODO: exceptions in future
future_.SetCompleted(err_msg.c_str());
}
bool AsyncTask::Run(const ExecutionOptions& options) {
// TODO: insert CUDA's async stream waits; tracing and counters
OperatorBase* op = nullptr;
try {
// NOLINTNEXTLINE(modernize-loop-convert)
for (auto op_idx = 0U; op_idx < ops_.size(); ++op_idx) {
op = ops_[op_idx];
int stream_id = 0; // TODO: thread local stream id
if (!op->RunAsync(stream_id)) {
handleChainError(op, "Failed to execute an op");
return false;
}
}
if (options.finish_chain_) {
op = ops_.back();
op->Finish();
}
// set the future as successfully completed or, in case of async CPU,
// use op's callback
if (IsCPUDeviceType(device_option_.device_type()) &&
ops_.back()->HasAsyncPart()) {
auto& event = ops_.back()->event();
event.SetCallback([this, &event]() {
CAFFE_ENFORCE(event.IsFinished());
if (event.Query() == EventStatus::EVENT_SUCCESS) {
future_.SetCompleted();
} else {
// TODO: support for exceptions
future_.SetCompleted(event.ErrorMessage().c_str());
}
});
} else {
future_.SetCompleted();
}
} catch (const std::exception& e) {
handleChainError(op, e.what(), /* save_exception */ true);
return false;
} catch (...) {
handleChainError(
op,
"Failed to execute task: unknown error",
/* save_exception */ true);
return false;
}
return true;
}
void AsyncTask::Reset() {
for (auto& op : ops_) {
op->ResetEvent();
}
future_.ResetState();
}
DeviceOption AsyncTask::GetDeviceOption() const {
return device_option_;
}
AsyncTaskFuture& AsyncTask::GetFuture() {
return future_;
}
const AsyncTaskFuture& AsyncTask::GetFuture() const {
return future_;
}
}; // namespace caffe2