Skip to content

Commit 195d0a6

Browse files
Skylion007pytorchmergebot
authored andcommitted
[BE][Ez]: Use interned hardcoded string FURB156 (pytorch#138330)
Uses string constants from string module. Pull Request resolved: pytorch#138330 Approved by: https://github.com/albanD
1 parent 9c2a803 commit 195d0a6

File tree

4 files changed

+9
-6
lines changed

4 files changed

+9
-6
lines changed

test/test_testing.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@
3030
from torch.testing._internal.common_modules import modules, module_db, ModuleInfo
3131
from torch.testing._internal.opinfo.core import SampleInput, DecorateInfo, OpInfo
3232
import operator
33+
import string
3334

3435
# For testing TestCase methods and torch.testing functions
3536
class TestTesting(TestCase):
@@ -2299,7 +2300,7 @@ def test_no_mutate_global_logging_on_import(self, path) -> None:
22992300
# Calling logging.basicConfig, among other things, modifies the global
23002301
# logging state. It is not OK to modify the global logging state on
23012302
# `import torch` (or other submodules we own) because users do not expect it.
2302-
expected = 'abcdefghijklmnopqrstuvwxyz'
2303+
expected = string.ascii_lowercase
23032304
commands = [
23042305
'import logging',
23052306
f'import {path}',

test/torch_np/numpy_tests/lib/test_function_base.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,8 @@
3333
IS_WASM = False
3434
IS_PYPY = False
3535

36+
import string
37+
3638
# FIXME: make from torch._numpy
3739
# These are commented, as if they are imported, some of the tests pass for the wrong reasons
3840
# from numpy lib import digitize, piecewise, trapz, select, trim_zeros, interp
@@ -1528,7 +1530,7 @@ def test_execution_order_ticket_1487(self):
15281530
def test_string_ticket_1892(self):
15291531
# Test vectorization over strings: issue 1892.
15301532
f = np.vectorize(lambda x: x)
1531-
s = "0123456789" * 10
1533+
s = string.digits * 10
15321534
assert_equal(s, f(s))
15331535

15341536
def test_cache(self):

torch/distributed/tensor/_ops/_common_rules.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
# Copyright (c) Meta Platforms, Inc. and affiliates
2+
import string
23
from typing import cast, Dict, List, Optional, Tuple
34

45
import torch
@@ -234,7 +235,7 @@ def pointwise_rule(op_schema: OpSchema, linearity: bool = False) -> OutputShardi
234235
ij,ij->ij - addition/mul
235236
ij,j->ij - broadcasted addition
236237
"""
237-
alphabet = "abcdefghijklmnopqrstuvwxyz"
238+
alphabet = string.ascii_lowercase
238239
# find the max_dim first in case we need to broadcasting
239240
input_specs = op_schema.args_spec
240241
max_dim = max(input.ndim for input in input_specs)

torchgen/native_function_generation.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
from __future__ import annotations
22

3+
import string
34
from collections import defaultdict
45
from typing import Sequence
56

@@ -194,9 +195,7 @@ def generate_out_args_from_schema(
194195
lambda a: [] if a.annotation is None else a.annotation.alias_set,
195196
func.arguments.flat_all,
196197
)
197-
valid_annotations = [
198-
x for x in "abcdefghijklmnopqrstuvwxyz" if x not in used_annotations
199-
]
198+
valid_annotations = [x for x in string.ascii_lowercase if x not in used_annotations]
200199

201200
all_rets_are_tensors = all(r.type == BaseType(BaseTy.Tensor) for r in func.returns)
202201

0 commit comments

Comments
 (0)