From 4a12840235a6cbbe95e3c40c7f00b7ebfc2d5f36 Mon Sep 17 00:00:00 2001 From: Luke Date: Tue, 18 Nov 2025 13:21:47 -0500 Subject: [PATCH 1/5] [Bugfix] Fix config registry issue Signed-off-by: Luke --- vllm/transformers_utils/config.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/vllm/transformers_utils/config.py b/vllm/transformers_utils/config.py index ac4a71648cec..516e44dfd5b6 100644 --- a/vllm/transformers_utils/config.py +++ b/vllm/transformers_utils/config.py @@ -24,7 +24,7 @@ RepositoryNotFoundError, RevisionNotFoundError, ) -from transformers import DeepseekV3Config, GenerationConfig, PretrainedConfig +from transformers import GenerationConfig, PretrainedConfig from transformers.models.auto.image_processing_auto import get_image_processor_config from transformers.models.auto.modeling_auto import ( MODEL_FOR_CAUSAL_LM_MAPPING_NAMES, @@ -80,7 +80,7 @@ def __getitem__(self, key): afmoe="AfmoeConfig", chatglm="ChatGLMConfig", deepseek_vl_v2="DeepseekVLV2Config", - deepseek_v32=DeepseekV3Config, + deepseek_v32="DeepseekV3Config", flex_olmo="FlexOlmoConfig", kimi_linear="KimiLinearConfig", kimi_vl="KimiVLConfig", From d16f226a9e9180bab216925b564a6a7d25ee81ec Mon Sep 17 00:00:00 2001 From: Luke Date: Tue, 18 Nov 2025 17:01:41 -0500 Subject: [PATCH 2/5] [Bugfix] Expose DeepseekV3Config in configs for lazy loading Signed-off-by: Luke --- vllm/transformers_utils/configs/__init__.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/vllm/transformers_utils/configs/__init__.py b/vllm/transformers_utils/configs/__init__.py index dcae05a15fec..d8fa1df0580b 100644 --- a/vllm/transformers_utils/configs/__init__.py +++ b/vllm/transformers_utils/configs/__init__.py @@ -5,11 +5,13 @@ - There is no configuration file defined by HF Hub or Transformers library. - There is a need to override the existing config to support vLLM. +- The HF model_type is not recognized by Transformers library but maps to a config in Transformers library, ex., deepseek-ai/DeepSeek-V3.2-Exp. """ from vllm.transformers_utils.configs.afmoe import AfmoeConfig from vllm.transformers_utils.configs.chatglm import ChatGLMConfig from vllm.transformers_utils.configs.deepseek_vl2 import DeepseekVLV2Config +from transformers import DeepseekV3Config from vllm.transformers_utils.configs.dotsocr import DotsOCRConfig from vllm.transformers_utils.configs.eagle import EAGLEConfig @@ -44,6 +46,7 @@ "AfmoeConfig", "ChatGLMConfig", "DeepseekVLV2Config", + "DeepseekV3Config", "DotsOCRConfig", "EAGLEConfig", "FlexOlmoConfig", From 1d531f1bc627314d34e651ae0b100a297e044db7 Mon Sep 17 00:00:00 2001 From: Luke Date: Tue, 18 Nov 2025 17:14:48 -0500 Subject: [PATCH 3/5] [Bugfix] Break a long line to meet the 88-character limit Signed-off-by: Luke --- vllm/transformers_utils/configs/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/vllm/transformers_utils/configs/__init__.py b/vllm/transformers_utils/configs/__init__.py index d8fa1df0580b..56e7d9ad24fb 100644 --- a/vllm/transformers_utils/configs/__init__.py +++ b/vllm/transformers_utils/configs/__init__.py @@ -5,7 +5,8 @@ - There is no configuration file defined by HF Hub or Transformers library. - There is a need to override the existing config to support vLLM. -- The HF model_type is not recognized by Transformers library but maps to a config in Transformers library, ex., deepseek-ai/DeepSeek-V3.2-Exp. +- The HF model_type is not recognized by Transformers library but maps to a + config in Transformers library, ex., deepseek-ai/DeepSeek-V3.2-Exp. """ from vllm.transformers_utils.configs.afmoe import AfmoeConfig From 51ce74243c259c48c206099e4b262e9dc6af8177 Mon Sep 17 00:00:00 2001 From: Luke Date: Tue, 18 Nov 2025 18:26:38 -0500 Subject: [PATCH 4/5] Apply ruff auto-fixes Signed-off-by: Luke --- vllm/transformers_utils/configs/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/vllm/transformers_utils/configs/__init__.py b/vllm/transformers_utils/configs/__init__.py index 56e7d9ad24fb..1c413b5443e7 100644 --- a/vllm/transformers_utils/configs/__init__.py +++ b/vllm/transformers_utils/configs/__init__.py @@ -9,10 +9,11 @@ config in Transformers library, ex., deepseek-ai/DeepSeek-V3.2-Exp. """ +from transformers import DeepseekV3Config + from vllm.transformers_utils.configs.afmoe import AfmoeConfig from vllm.transformers_utils.configs.chatglm import ChatGLMConfig from vllm.transformers_utils.configs.deepseek_vl2 import DeepseekVLV2Config -from transformers import DeepseekV3Config from vllm.transformers_utils.configs.dotsocr import DotsOCRConfig from vllm.transformers_utils.configs.eagle import EAGLEConfig From 5062047210f17f7c8312329ad04cbbe45b1067b7 Mon Sep 17 00:00:00 2001 From: Luke Date: Wed, 19 Nov 2025 00:11:59 -0500 Subject: [PATCH 5/5] Polish the docstring Signed-off-by: Luke --- vllm/transformers_utils/configs/__init__.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/vllm/transformers_utils/configs/__init__.py b/vllm/transformers_utils/configs/__init__.py index 1c413b5443e7..d28fd8d03337 100644 --- a/vllm/transformers_utils/configs/__init__.py +++ b/vllm/transformers_utils/configs/__init__.py @@ -5,8 +5,9 @@ - There is no configuration file defined by HF Hub or Transformers library. - There is a need to override the existing config to support vLLM. -- The HF model_type is not recognized by Transformers library but maps to a - config in Transformers library, ex., deepseek-ai/DeepSeek-V3.2-Exp. +- The HF model_type isn't recognized by the Transformers library but can + be mapped to an existing Transformers config, such as + deepseek-ai/DeepSeek-V3.2-Exp. """ from transformers import DeepseekV3Config