From 2fd773574f12036b3920a06652a236c94c44a3a0 Mon Sep 17 00:00:00 2001 From: Jianyu Huang Date: Tue, 30 Sep 2025 23:51:46 -0700 Subject: [PATCH] Resolve list not recognizable issue MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Summary: X-link: https://github.com/facebookresearch/FBGEMM/pull/1978 Resolve the failure: ``` │ /data/users/jianyuhuang/fbsource/genai/msl/../llama4x/llama4x/ops/attention/ │ │ cutlass.py:7 in │ │ │ │ 4 import torch │ │ 5 │ │ 6 try: │ │ ❱ 7 │ from fbgemm_gpu.experimental.gen_ai.attention.cutlass_blackwell_fm │ │ 8 │ │ cutlass_blackwell_fmha_interface as fmha, │ │ 9 │ ) │ │ 10 │ │ │ │ ╭───────────────────────────────── locals ─────────────────────────────────╮ │ │ │ Any = typing.Any │ │ │ │ dataclass = │ │ │ │ Iterable = typing.Iterable │ │ │ │ torch = │ │ │ │ /home/jianyuhuang/.fbpkg_conda_envs/xlformers_msl_rl_conda_feedstock-df42b69 │ │ /lib/python3.10/site-packages/fbgemm_gpu/experimental/gen_ai/attention/cutla │ │ ss_blackwell_fmha/cutlass_blackwell_fmha_custom_op.py:15 in │ ╰──────────────────────────────────────────────────────────────────────────────╯ TypeError: unhashable type: 'list' ``` Differential Revision: D83633629 --- .../cutlass_blackwell_fmha_custom_op.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/fbgemm_gpu/experimental/gen_ai/gen_ai/attention/cutlass_blackwell_fmha/cutlass_blackwell_fmha_custom_op.py b/fbgemm_gpu/experimental/gen_ai/gen_ai/attention/cutlass_blackwell_fmha/cutlass_blackwell_fmha_custom_op.py index 0e2f7e6e9a..40673f13e2 100644 --- a/fbgemm_gpu/experimental/gen_ai/gen_ai/attention/cutlass_blackwell_fmha/cutlass_blackwell_fmha_custom_op.py +++ b/fbgemm_gpu/experimental/gen_ai/gen_ai/attention/cutlass_blackwell_fmha/cutlass_blackwell_fmha_custom_op.py @@ -13,13 +13,13 @@ torch.library.define( "blackwell_fmha::fmha_fwd", "(Tensor q, Tensor k, Tensor v, Tensor? cu_seqlens_q, Tensor? cu_seqlens_k, int? max_seq_len_q, int? max_seq_len_k, float? softmax_scale, bool? causal, Tensor? seqlen_kv) -> (Tensor, Tensor)", - tags=[torch.Tag.pt2_compliant_tag], + tags=torch.Tag.pt2_compliant_tag, ) torch.library.define( "blackwell_fmha::fmha_bwd", "(Tensor dout, Tensor q, Tensor k, Tensor v, Tensor out, Tensor softmax_lse, Tensor? cu_seqlens_q, Tensor? cu_seqlens_k, int? max_seq_len_q, int? max_seq_len_k, bool? causal) -> (Tensor, Tensor, Tensor)", - tags=[torch.Tag.pt2_compliant_tag], + tags=torch.Tag.pt2_compliant_tag, )