Skip to content

Commit

Permalink
[pytorch] clean up unused util srcs under tools/autograd (pytorch#50611)
Browse files Browse the repository at this point in the history
Summary:
Pull Request resolved: pytorch#50611

Removed the unused old-style code to prevent it from being used.
Added all autograd/gen_pyi sources to mypy-strict.ini config.

Confirmed byte-for-byte compatible with the old codegen:
```
Run it before and after this PR:
  .jenkins/pytorch/codegen-test.sh <baseline_output_dir>
  .jenkins/pytorch/codegen-test.sh <test_output_dir>

Then run diff to compare the generated files:
  diff -Naur <baseline_output_dir> <test_output_dir>
```

Confirmed clean mypy-strict run:
```
mypy --config mypy-strict.ini
```

Test Plan: Imported from OSS

Reviewed By: ezyang

Differential Revision: D25929730

Pulled By: ljk53

fbshipit-source-id: 1fc94436fd4a6b9b368ee0736e99bfb3c01d38ef
  • Loading branch information
ljk53 authored and facebook-github-bot committed Jan 19, 2021
1 parent b75cdce commit 5252e98
Show file tree
Hide file tree
Showing 7 changed files with 47 additions and 184 deletions.
2 changes: 0 additions & 2 deletions caffe2/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -430,8 +430,6 @@ if(NOT INTERN_BUILD_MOBILE OR NOT BUILD_CAFFE2_MOBILE)
"${TOOLS_PATH}/autograd/gen_variable_factories.py"
"${TOOLS_PATH}/autograd/gen_variable_type.py"
"${TOOLS_PATH}/autograd/load_derivatives.py"
"${TOOLS_PATH}/autograd/nested_dict.py"
"${TOOLS_PATH}/autograd/utils.py"
WORKING_DIRECTORY "${TORCH_ROOT}")


Expand Down
9 changes: 2 additions & 7 deletions mypy-strict.ini
Original file line number Diff line number Diff line change
Expand Up @@ -30,13 +30,8 @@ implicit_reexport = False
strict_equality = True

files = tools/codegen/gen.py,
tools/autograd/gen_annotated_fn_args.py,
tools/autograd/gen_autograd.py,
tools/autograd/gen_python_functions.py,
tools/autograd/gen_trace_type.py,
tools/autograd/gen_variable_factories.py,
tools/autograd/gen_variable_type.py,
tools/autograd/load_derivatives.py,
tools/autograd/*.py,
tools/pyi/*.py,
torch/utils/benchmark/utils/common.py,
torch/utils/benchmark/utils/timer.py,
torch/utils/benchmark/utils/valgrind_wrapper/*.py,
Expand Down
19 changes: 0 additions & 19 deletions tools/autograd/nested_dict.py

This file was deleted.

114 changes: 0 additions & 114 deletions tools/autograd/utils.py

This file was deleted.

2 changes: 1 addition & 1 deletion tools/code_analyzer/op_deps_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
import argparse
import yaml

from ..autograd.utils import CodeTemplate
from tools.codegen.code_template import CodeTemplate

BAZEL_OUTPUT = CodeTemplate("""\
TORCH_DEPS = {
Expand Down
52 changes: 25 additions & 27 deletions tools/pyi/gen_pyi.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,13 @@

import os
import collections
from pprint import pformat

import argparse

from tools.codegen.model import *
from tools.codegen.api.python import *
from tools.codegen.gen import FileManager
from typing import Sequence, List, Dict

from ..autograd.utils import CodeTemplate, write
from ..autograd.gen_python_functions import should_generate_py_binding, load_signatures, group_overloads

"""
Expand Down Expand Up @@ -166,7 +164,7 @@ def sig_for_ops(opname: str) -> List[str]:
raise Exception("unknown op", opname)

def generate_type_hints(sig_group: PythonSignatureGroup) -> List[str]:
type_hints = []
type_hints: List[str] = []

# Some deprecated ops that are on the blocklist are still included in pyi
if sig_group.signature.name in blocklist and not sig_group.signature.deprecated:
Expand All @@ -193,7 +191,7 @@ def generate_type_hints(sig_group: PythonSignatureGroup) -> List[str]:

return type_hints

def gen_nn_functional(out: str) -> None:
def gen_nn_functional(fm: FileManager) -> None:
# Functions imported into `torch.nn.functional` from `torch`, perhaps being filtered
# through an `_add_docstr` call
imports = [
Expand Down Expand Up @@ -241,28 +239,22 @@ def gen_nn_functional(out: str) -> None:
import_code = ["from .. import {0} as {0}".format(_) for _ in imports]
# TODO make these types more precise
dispatch_code = ["{}: Callable".format(_) for _ in (dispatches + from_c)]
stubs = CodeTemplate.from_file(os.path.join('torch', 'nn', 'functional.pyi.in'))
env = {
fm.write_with_template('torch/nn/functional.pyi', 'torch/nn/functional.pyi.in', lambda: {
'imported_hints': import_code,
'dispatched_hints': dispatch_code
}
write(out, 'torch/nn/functional.pyi', stubs, env)
'dispatched_hints': dispatch_code,
})

# functional.pyi already contains the definitions for those functions
# so, we don't export then to it
from_c.extend(['hardtanh', 'leaky_relu', 'hardsigmoid'])
dispatch_code = ["{}: Callable".format(_) for _ in (dispatches + from_c)]
env = {
fm.write_with_template('torch/_C/_nn.pyi', 'torch/_C/_nn.pyi.in', lambda: {
'imported_hints': import_code,
'dispatched_hints': dispatch_code
}
stubs = CodeTemplate.from_file(os.path.join('torch', '_C', '_nn.pyi.in'))
write(out, 'torch/_C/_nn.pyi', stubs, env)
'dispatched_hints': dispatch_code,
})

def gen_nn_pyi(out: str) -> None:
gen_nn_functional(out)

def gen_pyi(native_yaml_path: str, deprecated_yaml_path: str, out: str) -> None:
def gen_pyi(native_yaml_path: str, deprecated_yaml_path: str, fm: FileManager) -> None:
"""gen_pyi()
This function generates a pyi file for torch.
Expand Down Expand Up @@ -550,14 +542,19 @@ def gen_pyi(native_yaml_path: str, deprecated_yaml_path: str, out: str) -> None:
'dtype_class_hints': dtype_class_hints,
'all_directive': all_directive
}
TORCH_C_TYPE_STUBS = CodeTemplate.from_file(os.path.join('torch', '_C', '__init__.pyi.in'))
TORCH_C_VARIABLE_FUNCTIONS_TYPE_STUBS = \
CodeTemplate.from_file(os.path.join('torch', '_C', '_VariableFunctions.pyi.in'))

write(out, 'torch/_C/__init__.pyi', TORCH_C_TYPE_STUBS, env)
write(out, 'torch/_C/_VariableFunctions.pyi', TORCH_C_VARIABLE_FUNCTIONS_TYPE_STUBS, env)
write(out, 'torch/_VF.pyi', TORCH_C_VARIABLE_FUNCTIONS_TYPE_STUBS, env)
gen_nn_pyi(out)
fm.write_with_template('torch/_C/__init__.pyi', 'torch/_C/__init__.pyi.in', lambda: {
'generated_comment': '@' + 'generated from torch/_C/__init__.pyi.in',
**env,
})
fm.write_with_template('torch/_C/_VariableFunctions.pyi', 'torch/_C/_VariableFunctions.pyi.in', lambda: {
'generated_comment': '@' + 'generated from torch/_C/_VariableFunctions.pyi.in',
**env,
})
fm.write_with_template('torch/_VF.pyi', 'torch/_C/_VariableFunctions.pyi.in', lambda: {
'generated_comment': '@' + 'generated from torch/_C/_VariableFunctions.pyi.in',
**env,
})
gen_nn_functional(fm)


def main() -> None:
Expand All @@ -573,7 +570,8 @@ def main() -> None:
default='.',
help='path to output directory')
args = parser.parse_args()
gen_pyi(args.native_functions_path, args.deprecated_functions_path, args.out)
fm = FileManager(install_dir=args.out, template_dir='.', dry_run=False)
gen_pyi(args.native_functions_path, args.deprecated_functions_path, fm)


if __name__ == '__main__':
Expand Down
33 changes: 19 additions & 14 deletions tools/setup_helpers/generate_code.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,13 @@
import argparse
import os
import sys
import yaml

try:
# use faster C loader if available
from yaml import CLoader as YamlLoader
except ImportError:
from yaml import Loader as YamlLoader

source_files = {'.py', '.cpp', '.h'}

Expand Down Expand Up @@ -76,15 +83,16 @@ def generate_code(ninja_global=None,
python_install_dir,
autograd_dir)


def get_selector_from_legacy_operator_selection_list(
selected_op_list_path: str,
):
from tools.autograd.utils import load_op_list_and_strip_overload

selected_op_list = load_op_list_and_strip_overload(
None,
selected_op_list_path,
)
with open(selected_op_list_path, 'r') as f:
# strip out the overload part
# It's only for legacy config - do NOT copy this code!
selected_op_list = {
opname.split('.', 1)[0] for opname in yaml.load(f, Loader=YamlLoader)
}

# Internal build doesn't use this flag any more. Only used by OSS
# build now. Every operator should be considered a root operator
Expand All @@ -96,14 +104,11 @@ def get_selector_from_legacy_operator_selection_list(
is_used_for_training = True

from tools.codegen.selective_build.selector import SelectiveBuilder

selector: SelectiveBuilder = SelectiveBuilder.get_nop_selector()
if selected_op_list is not None:
selector = SelectiveBuilder.from_legacy_op_registration_allow_list(
selected_op_list,
is_root_operator,
is_used_for_training,
)
selector = SelectiveBuilder.from_legacy_op_registration_allow_list(
selected_op_list,
is_root_operator,
is_used_for_training,
)

return selector

Expand Down

0 comments on commit 5252e98

Please sign in to comment.