Skip to content

Commit

Permalink
Rename tools/codegen to torchgen (pytorch#76275)
Browse files Browse the repository at this point in the history
Summary:
Pull Request resolved: pytorch#76275

In preparation for addressing
pytorch#73212

Diff was generated with:

```
git mv tools/codegen torchgen
git grep -l 'tools.codegen' | xargs sed -i 's/tools.codegen/torchgen/g'
sed -i "s/\${TOOLS_PATH}\/codegen/\${TORCH_ROOT}\/torchgen/g" caffe2/CMakeLists.txt
```

and a manual edits to:

* tools/test/test_gen_backend_stubs.py
* torchgen/build.bzl
* torchgen/gen_backend_stubs.py

aka this diff:

```
 diff --git a/tools/test/test_gen_backend_stubs.py b/tools/test/test_gen_backend_stubs.py
index 3dc26c6d2d..104054575e 100644
 --- a/tools/test/test_gen_backend_stubs.py
+++ b/tools/test/test_gen_backend_stubs.py
@@ -9,7 +9,7 @@ from torchgen.gen_backend_stubs import run
 from torchgen.gen import _GLOBAL_PARSE_NATIVE_YAML_CACHE  # noqa: F401

 path = os.path.dirname(os.path.realpath(__file__))
-gen_backend_stubs_path = os.path.join(path, '../torchgen/gen_backend_stubs.py')
+gen_backend_stubs_path = os.path.join(path, '../../torchgen/gen_backend_stubs.py')

 # gen_backend_stubs.py is an integration point that is called directly by external backends.
 # The tests here are to confirm that badly formed inputs result in reasonable error messages.
 diff --git a/torchgen/build.bzl b/torchgen/build.bzl
index ed04e35a43..d00078a3cf 100644
 --- a/torchgen/build.bzl
+++ b/torchgen/build.bzl
@@ -1,6 +1,6 @@
 def define_targets(rules):
     rules.py_library(
-        name = "codegen",
+        name = "torchgen",
         srcs = rules.glob(["**/*.py"]),
         deps = [
             rules.requirement("PyYAML"),
@@ -11,6 +11,6 @@ def define_targets(rules):

     rules.py_binary(
         name = "gen",
-        srcs = [":codegen"],
+        srcs = [":torchgen"],
         visibility = ["//visibility:public"],
     )
 diff --git a/torchgen/gen_backend_stubs.py b/torchgen/gen_backend_stubs.py
index c1a672a655..beee7a15e0 100644
 --- a/torchgen/gen_backend_stubs.py
+++ b/torchgen/gen_backend_stubs.py
@@ -474,7 +474,7 @@ def run(
 ) -> None:

     # Assumes that this file lives at PYTORCH_ROOT/torchgen/gen_backend_stubs.py
-    pytorch_root = pathlib.Path(__file__).parent.parent.parent.absolute()
+    pytorch_root = pathlib.Path(__file__).parent.parent.absolute()
     template_dir = os.path.join(pytorch_root, "aten/src/ATen/templates")

     def make_file_manager(install_dir: str) -> FileManager:
```

run_all_fbandroid_tests

Test Plan: sandcastle

Reviewed By: albanD, ngimel

Differential Revision: D35770317

fbshipit-source-id: 153ac4a7fef15b1e750812a90bfafdbc8f1ebcdf
(cherry picked from commit c6d485d)
  • Loading branch information
ezyang authored and pytorchmergebot committed Apr 25, 2022
1 parent 8d31706 commit 36420b5
Show file tree
Hide file tree
Showing 85 changed files with 271 additions and 271 deletions.
2 changes: 1 addition & 1 deletion .circleci/scripts/cpp_doc_push_script.sh
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ sudo apt-get -y install doxygen
# Generate ATen files
pushd "${pt_checkout}"
pip install -r requirements.txt
time python -m tools.codegen.gen \
time python -m torchgen.gen \
-s aten/src/ATen \
-d build/aten/src/ATen

Expand Down
2 changes: 1 addition & 1 deletion .jenkins/pytorch/codegen-test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ set -x
rm -rf "$OUT"

# aten codegen
python -m tools.codegen.gen \
python -m torchgen.gen \
-d "$OUT"/torch/share/ATen

# torch codegen
Expand Down
4 changes: 2 additions & 2 deletions BUILD.bazel
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ generate_aten(
aten_ufunc_generated_cuda_sources("aten/src/ATen/{}") +
["aten/src/ATen/Declarations.yaml"]
),
generator = "//tools/codegen:gen",
generator = "//torchgen:gen",
)

libtorch_cpp_generated_sources = [
Expand Down Expand Up @@ -1345,7 +1345,7 @@ cc_library(
py_binary(
name = "gen_op",
srcs = ["caffe2/contrib/aten/gen_op.py"],
deps = ["//tools/codegen"],
deps = ["//torchgen"],
)

genrule(
Expand Down
2 changes: 1 addition & 1 deletion aten/src/ATen/gen_vulkan_glsl.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import glob
import sys
import os
from tools.codegen.code_template import CodeTemplate
from torchgen.code_template import CodeTemplate

H_NAME = "glsl.h"
CPP_NAME = "glsl.cpp"
Expand Down
2 changes: 1 addition & 1 deletion aten/src/ATen/gen_vulkan_spv.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import os
import sys
import subprocess
from tools.codegen.code_template import CodeTemplate
from torchgen.code_template import CodeTemplate

H_NAME = "spv.h"
CPP_NAME = "spv.cpp"
Expand Down
2 changes: 1 addition & 1 deletion aten/src/ATen/native/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -291,7 +291,7 @@ If two backends have the same dispatch function, you can write `CPU, CUDA: func`
to reuse the same function name in both cases.

Available backend options can be found by searching `dispatch_keys` in
[codegen](https://github.com/pytorch/pytorch/blob/master/tools/codegen/gen.py).
[codegen](https://github.com/pytorch/pytorch/blob/master/torchgen/gen.py).
There are also two special "generic" backends:

- `CompositeExplicitAutograd` (previously known as `DefaultBackend`):
Expand Down
2 changes: 1 addition & 1 deletion c10/core/DispatchKey.h
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ enum class BackendComponent : uint8_t {

// See Note [DispatchKeySet Internal Representation] for more details.
//
// NOTE: Keep the list in sync with `DispatchKey` in tools/codegen/model.py
// NOTE: Keep the list in sync with `DispatchKey` in torchgen/model.py
enum class DispatchKey : uint16_t {

// ~~~~~~~~~~~~~~~~~~~~~~~~~~ UNDEFINED ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ //
Expand Down
10 changes: 5 additions & 5 deletions caffe2/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ if(INTERN_BUILD_ATEN_OPS)
set(CMAKE_POSITION_INDEPENDENT_CODE ${__caffe2_CMAKE_POSITION_INDEPENDENT_CODE})

# Generate the headers wrapped by our operator
file(GLOB_RECURSE all_python "${PROJECT_SOURCE_DIR}/tools/codegen/*.py")
file(GLOB_RECURSE all_python "${PROJECT_SOURCE_DIR}/torchgen/*.py")
add_custom_command(OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/contrib/aten/aten_op.h
COMMAND
"${PYTHON_EXECUTABLE}" ${CMAKE_CURRENT_SOURCE_DIR}/contrib/aten/gen_op.py
Expand Down Expand Up @@ -458,10 +458,10 @@ if(NOT INTERN_BUILD_MOBILE OR NOT BUILD_CAFFE2_MOBILE)
"${TOOLS_PATH}/autograd/gen_variable_type.py"
"${TOOLS_PATH}/autograd/gen_inplace_or_view_type.py"
"${TOOLS_PATH}/autograd/load_derivatives.py"
"${TOOLS_PATH}/codegen/gen_backend_stubs.py"
"${TOOLS_PATH}/codegen/gen_lazy_tensor.py"
"${TOOLS_PATH}/codegen/api/lazy.py"
"${TOOLS_PATH}/codegen/dest/lazy_ir.py"
"${TORCH_ROOT}/torchgen/gen_backend_stubs.py"
"${TORCH_ROOT}/torchgen/gen_lazy_tensor.py"
"${TORCH_ROOT}/torchgen/api/lazy.py"
"${TORCH_ROOT}/torchgen/dest/lazy_ir.py"
WORKING_DIRECTORY "${TORCH_ROOT}")


Expand Down
4 changes: 2 additions & 2 deletions caffe2/contrib/aten/gen_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,9 @@
raise ValueError('aten_root ({}) does not exist'.format(
args.aten_root))
sys.path.insert(0, os.path.join(args.aten_root, '..'))
from tools.codegen.code_template import CodeTemplate as CT
from torchgen.code_template import CodeTemplate as CT
else:
from tools.codegen.code_template import CodeTemplate as CT
from torchgen.code_template import CodeTemplate as CT

OP_TEMPLATE = CT.from_file(
os.path.join(args.template_dir, 'aten_op_template.h'))
Expand Down
4 changes: 2 additions & 2 deletions cmake/Codegen.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ if(INTERN_BUILD_ATEN_OPS)
set_source_files_properties(${CMAKE_CURRENT_LIST_DIR}/../aten/src/ATen/MapAllocator.cpp PROPERTIES COMPILE_FLAGS "-fno-openmp")
endif()

file(GLOB_RECURSE all_python "${CMAKE_CURRENT_LIST_DIR}/../tools/codegen/*.py")
file(GLOB_RECURSE all_python "${CMAKE_CURRENT_LIST_DIR}/../torchgen/*.py")

set(GEN_ROCM_FLAG)
if(USE_ROCM)
Expand Down Expand Up @@ -148,7 +148,7 @@ if(INTERN_BUILD_ATEN_OPS)
endif()

set(GEN_COMMAND
"${PYTHON_EXECUTABLE}" -m tools.codegen.gen
"${PYTHON_EXECUTABLE}" -m torchgen.gen
--source-path ${CMAKE_CURRENT_LIST_DIR}/../aten/src/ATen
--install_dir ${CMAKE_BINARY_DIR}/aten/src/ATen
${GEN_PER_OPERATOR_FLAG}
Expand Down
2 changes: 1 addition & 1 deletion docs/cpp/source/check-doxygen.sh
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ pushd "$(dirname "$0")/../../.."

cp torch/_utils_internal.py tools/shared

python -m tools.codegen.gen
python -m torchgen.gen

python tools/setup_helpers/generate_code.py \
--native-functions-path aten/src/ATen/native/native_functions.yaml
Expand Down
2 changes: 1 addition & 1 deletion test/jit/fixtures_srcs/generate_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ def div_Tensor_0_3(self: Tensor, other: Tensor) -> Tensor:
fbcode/caffe2/torch/csrc/jit/mobile/upgrader_mobile.cpp
```
python pytorch/tools/codegen/operator_versions/gen_mobile_upgraders.py
python pytorch/torchgen/operator_versions/gen_mobile_upgraders.py
```
4. Generate the test to cover upgrader.
Expand Down
2 changes: 1 addition & 1 deletion test/mobile/test_upgrader_codegen.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from torch.testing._internal.common_utils import TestCase, run_tests

from tools.codegen.operator_versions.gen_mobile_upgraders import (
from torchgen.operator_versions.gen_mobile_upgraders import (
sort_upgrader,
write_cpp,
)
Expand Down
2 changes: 1 addition & 1 deletion tools/autograd/build.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,6 @@ def define_targets(rules):
visibility = ["//:__subpackages__"],
deps = [
rules.requirement("PyYAML"),
"//tools/codegen",
"//torchgen:torchgen",
],
)
6 changes: 3 additions & 3 deletions tools/autograd/context.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from tools.codegen.api.autograd import NativeFunctionWithDifferentiabilityInfo as NFWDI
from tools.codegen.context import native_function_manager
from tools.codegen.utils import T
from torchgen.api.autograd import NativeFunctionWithDifferentiabilityInfo as NFWDI
from torchgen.context import native_function_manager
from torchgen.utils import T

import functools
from typing import Callable
Expand Down
10 changes: 5 additions & 5 deletions tools/autograd/gen_annotated_fn_args.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,11 @@

from typing import Dict, List, Any

from tools.codegen.gen import parse_native_yaml
from tools.codegen.utils import FileManager
from tools.codegen.context import with_native_function
from tools.codegen.model import BaseOperatorName, NativeFunction
import tools.codegen.api.python as python
from torchgen.gen import parse_native_yaml
from torchgen.utils import FileManager
from torchgen.context import with_native_function
from torchgen.model import BaseOperatorName, NativeFunction
import torchgen.api.python as python
from .gen_python_functions import (
should_generate_py_binding,
is_py_torch_function,
Expand Down
8 changes: 4 additions & 4 deletions tools/autograd/gen_autograd.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,13 +24,13 @@

import argparse
import os
from tools.codegen.api import cpp
from tools.codegen.api.autograd import (
from torchgen.api import cpp
from torchgen.api.autograd import (
match_differentiability_info,
NativeFunctionWithDifferentiabilityInfo,
)
from tools.codegen.gen import parse_native_yaml
from tools.codegen.selective_build.selector import SelectiveBuilder
from torchgen.gen import parse_native_yaml
from torchgen.selective_build.selector import SelectiveBuilder
from typing import List
from . import gen_python_functions
from .gen_autograd_functions import (
Expand Down
10 changes: 5 additions & 5 deletions tools/autograd/gen_autograd_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,14 @@

from typing import List, Sequence, Tuple

from tools.codegen.api.autograd import (
from torchgen.api.autograd import (
Derivative,
DifferentiabilityInfo,
SavedAttribute,
uses_retain_variables,
uses_single_grad,
)
from tools.codegen.api.types import (
from torchgen.api.types import (
Binding,
BaseCType,
OptionalCType,
Expand All @@ -32,9 +32,9 @@
ArrayRefCType,
optionalIntArrayRefT,
)
from tools.codegen.code_template import CodeTemplate
from tools.codegen.utils import FileManager
from tools.codegen.model import Argument
from torchgen.code_template import CodeTemplate
from torchgen.utils import FileManager
from torchgen.model import Argument

FUNCTION_DECLARATION = CodeTemplate(
"""\
Expand Down
14 changes: 7 additions & 7 deletions tools/autograd/gen_inplace_or_view_type.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,13 @@
# if updates are needed in torch/csrc/autograd/autograd_not_implemented_fallback.cpp
# The fallback is expected to mimick this codegen, so we should keep the two in sync.

from tools.codegen.api import cpp
from tools.codegen.api.autograd import (
from torchgen.api import cpp
from torchgen.api.autograd import (
NativeFunctionWithDifferentiabilityInfo,
gen_differentiable_outputs,
dispatch_strategy,
)
from tools.codegen.api.types import (
from torchgen.api.types import (
Binding,
DispatcherSignature,
CType,
Expand All @@ -21,9 +21,9 @@
intArrayRefT,
symIntArrayRefT,
)
from tools.codegen.code_template import CodeTemplate
from tools.codegen.context import with_native_function
from tools.codegen.model import (
from torchgen.code_template import CodeTemplate
from torchgen.context import with_native_function
from torchgen.model import (
Type,
NativeFunction,
SelfArgument,
Expand All @@ -32,7 +32,7 @@
is_foreach_op,
)
from typing import List, Optional, Sequence, Tuple, Dict
from tools.codegen.utils import FileManager
from torchgen.utils import FileManager
from .context import with_native_function_with_differentiability_info
from .gen_trace_type import (
MANUAL_AUTOGRAD,
Expand Down
16 changes: 8 additions & 8 deletions tools/autograd/gen_python_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,10 +37,10 @@

from .gen_trace_type import should_trace

from tools.codegen.code_template import CodeTemplate
from tools.codegen.api import cpp
from tools.codegen.api.types import CppSignatureGroup
from tools.codegen.api.python import (
from torchgen.code_template import CodeTemplate
from torchgen.api import cpp
from torchgen.api.types import CppSignatureGroup
from torchgen.api.python import (
PythonArgument,
PythonSignature,
PythonSignatureDeprecated,
Expand All @@ -57,16 +57,16 @@
namedtuple_fieldnames,
signature,
)
from tools.codegen.gen import cpp_string, parse_native_yaml
from tools.codegen.context import with_native_function
from tools.codegen.model import (
from torchgen.gen import cpp_string, parse_native_yaml
from torchgen.context import with_native_function
from torchgen.model import (
Argument,
BaseOperatorName,
NativeFunction,
Type,
Variant,
)
from tools.codegen.utils import split_name_params, YamlLoader, FileManager
from torchgen.utils import split_name_params, YamlLoader, FileManager

from typing import Dict, Optional, List, Tuple, Set, Sequence, Callable

Expand Down
12 changes: 6 additions & 6 deletions tools/autograd/gen_trace_type.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
import itertools
from typing import List, Sequence, Union, Dict

from tools.codegen.api.types import DispatcherSignature
from tools.codegen.api import cpp
from tools.codegen.code_template import CodeTemplate
from tools.codegen.context import with_native_function
from tools.codegen.utils import FileManager
from tools.codegen.model import (
from torchgen.api.types import DispatcherSignature
from torchgen.api import cpp
from torchgen.code_template import CodeTemplate
from torchgen.context import with_native_function
from torchgen.utils import FileManager
from torchgen.model import (
Argument,
NativeFunction,
SchemaKind,
Expand Down
14 changes: 7 additions & 7 deletions tools/autograd/gen_variable_factories.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,13 @@
import re
from typing import Optional, List

from tools.codegen.api.types import CppSignatureGroup
from tools.codegen.api import cpp
import tools.codegen.api.python as python
from tools.codegen.gen import parse_native_yaml
from tools.codegen.context import with_native_function
from tools.codegen.utils import mapMaybe, FileManager
from tools.codegen.model import NativeFunction, TensorOptionsArguments, Variant
from torchgen.api.types import CppSignatureGroup
from torchgen.api import cpp
import torchgen.api.python as python
from torchgen.gen import parse_native_yaml
from torchgen.context import with_native_function
from torchgen.utils import mapMaybe, FileManager
from torchgen.model import NativeFunction, TensorOptionsArguments, Variant

OPTIONAL_TYPE_PATTERN = re.compile(r"c10::optional<(.+)>")
TYPE_PATTERN = re.compile(r"(?:const\s+)?([A-Z]\w+)")
Expand Down
14 changes: 7 additions & 7 deletions tools/autograd/gen_variable_type.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@
AUTOGRAD_NOT_IMPLEMENTED_REGISTRATION,
)

from tools.codegen.api.types import (
from torchgen.api.types import (
Binding,
DispatcherSignature,
BaseCType,
Expand All @@ -68,19 +68,19 @@
TupleCType,
VectorCType,
)
from tools.codegen.api.autograd import (
from torchgen.api.autograd import (
DifferentiableInput,
NativeFunctionWithDifferentiabilityInfo,
SavedAttribute,
dispatch_strategy,
gen_differentiable_outputs,
is_differentiable,
)
from tools.codegen.api import cpp
from tools.codegen.code_template import CodeTemplate
from tools.codegen.context import native_function_manager, with_native_function
from tools.codegen.utils import mapMaybe, FileManager
from tools.codegen.model import (
from torchgen.api import cpp
from torchgen.code_template import CodeTemplate
from torchgen.context import native_function_manager, with_native_function
from torchgen.utils import mapMaybe, FileManager
from torchgen.model import (
Argument,
NativeFunction,
SchemaKind,
Expand Down
Loading

0 comments on commit 36420b5

Please sign in to comment.