Skip to content

Commit

Permalink
Update on "Make CI error on inductor fallback when decomp is available"
Browse files Browse the repository at this point in the history
Fixes #99446 

Remove the warning, as that annoyed end-users who don't know what to do about it.

Instead, try to hold the line by preventing any decomp from being added without making
the corresponding change to inductor's fallbacks.

Note: we probably still need to better document how to update inductor's decomps,
for now it's pretty much "go ask the inductor team for advice"

cc soumith voznesenskym penguinwu anijain2305 EikanWang jgong5 Guobing-Chen XiaobingSuper zhuhaozhe blzheng Xia-Weiwen wenzhe-nrv jiayisunx peterbell10 desertfire

[ghstack-poisoned]
  • Loading branch information
wconstab committed Apr 21, 2023
2 parents f2d9b28 + 87b6843 commit 7a672ed
Show file tree
Hide file tree
Showing 3 changed files with 9 additions and 7 deletions.
3 changes: 1 addition & 2 deletions torch/_inductor/graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@
from .ir import Constant, FixedLayout, InputBuffer, Pointwise, Reduction, TensorBox
from .lowering import (
FALLBACK_ALLOW_LIST,
FALLBACK_DENY_LIST,
fallback_handler,
fallback_node_due_to_unsupported_type,
layout_constraints,
Expand Down Expand Up @@ -365,7 +364,7 @@ def call_function(self, target, args, kwargs):
base_name = target.name().split(".")[0]
if base_name in FALLBACK_ALLOW_LIST:
make_fallback(target)
elif config.implicit_fallbacks and base_name not in FALLBACK_DENY_LIST:
elif config.implicit_fallbacks:
error = (
MissingOperatorWithDecomp
if get_decompositions([target])
Expand Down
4 changes: 3 additions & 1 deletion torch/_inductor/lowering.py
Original file line number Diff line number Diff line change
Expand Up @@ -1322,7 +1322,6 @@ def apply_constraint(arg, fx_arg):
FALLBACK_ALLOW_LIST = {
"torchvision::roi_align",
}
FALLBACK_DENY_LIST = {"aten::exponential"} # fails accuracy on test_torch.py
make_fallback(aten._adaptive_avg_pool2d_backward, require_dense)
make_fallback(aten.convolution_backward, constrain_to_fx_strides)
make_fallback(aten._cudnn_rnn, require_dense)
Expand Down Expand Up @@ -1496,6 +1495,9 @@ def apply_constraint(arg, fx_arg):
make_fallback(aten._linalg_eigh)
make_fallback(aten.zeros.names)

# fails accuracy on test_torch.py, and explicit fallback required to avoid warn=True on implicit
make_fallback(aten.exponential.default, warn=False)


@register_lowering(aten.clone)
def clone(x, *, memory_format=0):
Expand Down
9 changes: 5 additions & 4 deletions torch/testing/_internal/common_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import inspect
import io
import json
import logging
import math
import operator
import os
Expand Down Expand Up @@ -91,6 +92,7 @@

from .composite_compliance import no_dispatch

log = logging.getLogger(__name__)
torch.backends.disable_global_flags()

FILE_SCHEMA = "file://"
Expand All @@ -113,11 +115,10 @@
slow_tests_dict = {}

def maybe_load_json(filename):
if os.path.exists(filename):
if os.path.isfile(filename):
with open(filename, 'r') as fp:
data = json.load(fp)
return data
print(f"Attempted to load {filename} but it does not exist.")
return json.load(fp)
log.warning("Attempted to load json file '%s' but it does not exist.", filename)
return {}

# set them here in case the tests are running in a subprocess that doesn't call run_tests
Expand Down

0 comments on commit 7a672ed

Please sign in to comment.