From d95e1afad37ab91a7c8d3fed19d86f2b7f2d218f Mon Sep 17 00:00:00 2001 From: Jiakai Liu Date: Thu, 29 Oct 2020 22:51:49 -0700 Subject: [PATCH] [pytorch] add script to run all codegen (#46243) Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/46243 Add util script to test whether any codegen output changes. Test Plan: Imported from OSS Reviewed By: ezyang Differential Revision: D24388873 Pulled By: ljk53 fbshipit-source-id: ef9ef7fe6067df1e0c53aba725fc13b0dfd7f4c2 --- .jenkins/pytorch/codegen-test.sh | 60 +++++++++++++++++++++++++ tools/autograd/gen_annotated_fn_args.py | 1 + tools/autograd/gen_autograd.py | 3 +- tools/jit/gen_unboxing_wrappers.py | 3 +- 4 files changed, 65 insertions(+), 2 deletions(-) create mode 100755 .jenkins/pytorch/codegen-test.sh diff --git a/.jenkins/pytorch/codegen-test.sh b/.jenkins/pytorch/codegen-test.sh new file mode 100755 index 00000000000..3b75999ceb2 --- /dev/null +++ b/.jenkins/pytorch/codegen-test.sh @@ -0,0 +1,60 @@ +#!/usr/bin/env bash + +# This script can also be used to test whether your diff changes any codegen output. +# +# Run it before and after your change: +# .jenkins/pytorch/codegen-test.sh +# .jenkins/pytorch/codegen-test.sh +# +# Then run diff to compare the generated files: +# diff -Naur + +set -eu -o pipefail + +if [ "$#" -eq 0 ]; then + COMPACT_JOB_NAME="${BUILD_ENVIRONMENT}" + source "$(dirname "${BASH_SOURCE[0]}")/common.sh" + OUT="$(dirname "${BASH_SOURCE[0]}")/../../codegen_result" +else + OUT=$1 +fi + +set -x + +rm -rf "$OUT" + +# aten codegen +python -m tools.codegen.gen \ + -d "$OUT"/torch/share/ATen + +# torch codegen +python -m tools.setup_helpers.generate_code \ + --declarations-path "$OUT"/torch/share/ATen/Declarations.yaml \ + --install_dir "$OUT" + +# pyi codegen +mkdir -p "$OUT"/pyi/torch/_C +mkdir -p "$OUT"/pyi/torch/nn +python -m tools.pyi.gen_pyi \ + --declarations-path "$OUT"/torch/share/ATen/Declarations.yaml \ + --out "$OUT"/pyi + +# autograd codegen (called by torch codegen but can run independently) +python -m tools.autograd.gen_autograd \ + "$OUT"/torch/share/ATen/Declarations.yaml \ + "$OUT"/autograd \ + tools/autograd + +# unboxing_wrappers codegen (called by torch codegen but can run independently) +mkdir -p "$OUT"/unboxing_wrappers +python -m tools.jit.gen_unboxing_wrappers \ + "$OUT"/torch/share/ATen/Declarations.yaml \ + "$OUT"/unboxing_wrappers \ + tools/jit/templates + +# annotated_fn_args codegen (called by torch codegen but can run independently) +mkdir -p "$OUT"/annotated_fn_args +python -m tools.autograd.gen_annotated_fn_args \ + "$OUT"/torch/share/ATen/Declarations.yaml \ + "$OUT"/annotated_fn_args \ + tools/autograd diff --git a/tools/autograd/gen_annotated_fn_args.py b/tools/autograd/gen_annotated_fn_args.py index 7b4b0ece8da..661694f3d6b 100644 --- a/tools/autograd/gen_annotated_fn_args.py +++ b/tools/autograd/gen_annotated_fn_args.py @@ -20,6 +20,7 @@ from .gen_python_functions import ( get_py_variable_methods, op_name, ) +import argparse import textwrap from .gen_autograd import load_aten_declarations diff --git a/tools/autograd/gen_autograd.py b/tools/autograd/gen_autograd.py index da937a4377f..2783eb644bc 100644 --- a/tools/autograd/gen_autograd.py +++ b/tools/autograd/gen_autograd.py @@ -302,7 +302,8 @@ def main(): parser.add_argument('autograd', metavar='AUTOGRAD', help='path to autograd directory') args = parser.parse_args() - gen_autograd(args.declarations, args.out, args.autograd) + gen_autograd(args.declarations, args.out, args.autograd, + SelectiveBuilder.get_nop_selector()) if __name__ == '__main__': diff --git a/tools/jit/gen_unboxing_wrappers.py b/tools/jit/gen_unboxing_wrappers.py index 8d1fb00fc8d..f2896fac7f2 100644 --- a/tools/jit/gen_unboxing_wrappers.py +++ b/tools/jit/gen_unboxing_wrappers.py @@ -535,7 +535,8 @@ def main(): parser.add_argument('template_path', metavar='TEMPLATE_PATH', help='path to templates directory') args = parser.parse_args() - gen_unboxing_wrappers(args.declarations, args.out, args.template_path) + gen_unboxing_wrappers(args.declarations, args.out, args.template_path, + SelectiveBuilder.get_nop_selector()) if __name__ == '__main__':