From 2c87145d2a1445fe6c2aa58c77d006a32fd954d4 Mon Sep 17 00:00:00 2001 From: Mengwei Liu Date: Mon, 28 Jul 2025 09:24:13 -0700 Subject: [PATCH] Pin transformers version to 4.53.1 to avoid breakage To avoid this error: ``` Traceback (most recent call last): File "/opt/conda/envs/py_3.10/lib/python3.10/runpy.py", line 196, in _run_module_as_main return _run_code(code, main_globals, None, File "/opt/conda/envs/py_3.10/lib/python3.10/runpy.py", line 86, in _run_code exec(code, run_globals) File "/opt/conda/envs/py_3.10/lib/python3.10/site-packages/executorch/examples/models/phi-3-mini/export_phi-3-mini.py", line 168, in main() File "/opt/conda/envs/py_3.10/lib/python3.10/site-packages/executorch/examples/models/phi-3-mini/export_phi-3-mini.py", line 164, in main export(parser.parse_args()) File "/opt/conda/envs/py_3.10/lib/python3.10/site-packages/executorch/examples/models/phi-3-mini/export_phi-3-mini.py", line 79, in export exportable_module = TorchExportableModuleForDecoderOnlyLM( File "/opt/conda/envs/py_3.10/lib/python3.10/site-packages/transformers/integrations/executorch.py", line 67, in __init__ self.model = TorchExportableModuleWithStaticCache(model) File "/opt/conda/envs/py_3.10/lib/python3.10/site-packages/transformers/integrations/executorch.py", line 293, in __init__ max_batch_size=self.model.generation_config.cache_config.get("batch_size"), AttributeError: 'StaticCacheConfig' object has no attribute 'get' ``` --- requirements-examples.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-examples.txt b/requirements-examples.txt index 7426df861a2..0923cf8fefc 100644 --- a/requirements-examples.txt +++ b/requirements-examples.txt @@ -4,4 +4,4 @@ datasets == 3.6.0 # 4.0.0 deprecates trust_remote_code and load scripts. For now timm == 1.0.7 torchsr == 1.0.4 torchtune >= 0.6.1 -transformers >= 4.53.1 +transformers == 4.53.1