diff --git a/providers/gatewayz/models/alibaba-tongyi-deepresearch-30b-a3b.toml b/providers/gatewayz/models/alibaba-tongyi-deepresearch-30b-a3b.toml new file mode 100644 index 00000000..7a37f78f --- /dev/null +++ b/providers/gatewayz/models/alibaba-tongyi-deepresearch-30b-a3b.toml @@ -0,0 +1,16 @@ +name = "Tongyi DeepResearch 30B A3B" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = true +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 131072 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/alibaba-tongyi-deepresearch-30b-a3bfree.toml b/providers/gatewayz/models/alibaba-tongyi-deepresearch-30b-a3bfree.toml new file mode 100644 index 00000000..76a57925 --- /dev/null +++ b/providers/gatewayz/models/alibaba-tongyi-deepresearch-30b-a3bfree.toml @@ -0,0 +1,16 @@ +name = "Tongyi DeepResearch 30B A3B (free)" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = true +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 131072 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/amazon-nova-premier-v1.toml b/providers/gatewayz/models/amazon-nova-premier-v1.toml new file mode 100644 index 00000000..51fc408b --- /dev/null +++ b/providers/gatewayz/models/amazon-nova-premier-v1.toml @@ -0,0 +1,16 @@ +name = "Amazon: Nova Premier 1.0" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 1000000 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/anthropic-claude-haiku-4.5.toml b/providers/gatewayz/models/anthropic-claude-haiku-4.5.toml new file mode 100644 index 00000000..5abe8011 --- /dev/null +++ b/providers/gatewayz/models/anthropic-claude-haiku-4.5.toml @@ -0,0 +1,16 @@ +name = "Anthropic: Claude Haiku 4.5" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 200000 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/anthropic-claude-sonnet-4.5.toml b/providers/gatewayz/models/anthropic-claude-sonnet-4.5.toml new file mode 100644 index 00000000..657e1d98 --- /dev/null +++ b/providers/gatewayz/models/anthropic-claude-sonnet-4.5.toml @@ -0,0 +1,16 @@ +name = "Anthropic: Claude Sonnet 4.5" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 1000000 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/arcee-ai-afm-4.5b.toml b/providers/gatewayz/models/arcee-ai-afm-4.5b.toml new file mode 100644 index 00000000..4761a298 --- /dev/null +++ b/providers/gatewayz/models/arcee-ai-afm-4.5b.toml @@ -0,0 +1,16 @@ +name = "Arcee AI: AFM 4.5B" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 65536 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/baidu-ernie-4.5-21b-a3b-thinking.toml b/providers/gatewayz/models/baidu-ernie-4.5-21b-a3b-thinking.toml new file mode 100644 index 00000000..79b7fb04 --- /dev/null +++ b/providers/gatewayz/models/baidu-ernie-4.5-21b-a3b-thinking.toml @@ -0,0 +1,16 @@ +name = "Baidu: ERNIE 4.5 21B A3B Thinking" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = true +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 131072 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/deepcogito-cogito-v2-preview-llama-405b.toml b/providers/gatewayz/models/deepcogito-cogito-v2-preview-llama-405b.toml new file mode 100644 index 00000000..138049c8 --- /dev/null +++ b/providers/gatewayz/models/deepcogito-cogito-v2-preview-llama-405b.toml @@ -0,0 +1,16 @@ +name = "Deep Cogito: Cogito V2 Preview Llama 405B" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 32768 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/deepseek-deepseek-v3.1-terminus.toml b/providers/gatewayz/models/deepseek-deepseek-v3.1-terminus.toml new file mode 100644 index 00000000..99dddd36 --- /dev/null +++ b/providers/gatewayz/models/deepseek-deepseek-v3.1-terminus.toml @@ -0,0 +1,16 @@ +name = "DeepSeek: DeepSeek V3.1 Terminus" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 163840 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/deepseek-deepseek-v3.1-terminusexacto.toml b/providers/gatewayz/models/deepseek-deepseek-v3.1-terminusexacto.toml new file mode 100644 index 00000000..a71ae51b --- /dev/null +++ b/providers/gatewayz/models/deepseek-deepseek-v3.1-terminusexacto.toml @@ -0,0 +1,16 @@ +name = "DeepSeek: DeepSeek V3.1 Terminus (exacto)" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 131072 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/deepseek-deepseek-v3.2-exp.toml b/providers/gatewayz/models/deepseek-deepseek-v3.2-exp.toml new file mode 100644 index 00000000..a878480d --- /dev/null +++ b/providers/gatewayz/models/deepseek-deepseek-v3.2-exp.toml @@ -0,0 +1,16 @@ +name = "DeepSeek: DeepSeek V3.2 Exp" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 163840 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/google-gemini-2.5-flash-image.toml b/providers/gatewayz/models/google-gemini-2.5-flash-image.toml new file mode 100644 index 00000000..1fd9f316 --- /dev/null +++ b/providers/gatewayz/models/google-gemini-2.5-flash-image.toml @@ -0,0 +1,16 @@ +name = "Google: Gemini 2.5 Flash Image (Nano Banana)" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 32768 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text", "image"] diff --git a/providers/gatewayz/models/google-gemini-2.5-flash-lite-preview-09-2025.toml b/providers/gatewayz/models/google-gemini-2.5-flash-lite-preview-09-2025.toml new file mode 100644 index 00000000..3251a798 --- /dev/null +++ b/providers/gatewayz/models/google-gemini-2.5-flash-lite-preview-09-2025.toml @@ -0,0 +1,16 @@ +name = "Google: Gemini 2.5 Flash Lite Preview 09-2025" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 1048576 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/google-gemini-2.5-flash-preview-09-2025.toml b/providers/gatewayz/models/google-gemini-2.5-flash-preview-09-2025.toml new file mode 100644 index 00000000..e5654699 --- /dev/null +++ b/providers/gatewayz/models/google-gemini-2.5-flash-preview-09-2025.toml @@ -0,0 +1,16 @@ +name = "Google: Gemini 2.5 Flash Preview 09-2025" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 1048576 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/ibm-granite-granite-4.0-h-micro.toml b/providers/gatewayz/models/ibm-granite-granite-4.0-h-micro.toml new file mode 100644 index 00000000..4cbd268b --- /dev/null +++ b/providers/gatewayz/models/ibm-granite-granite-4.0-h-micro.toml @@ -0,0 +1,16 @@ +name = "IBM: Granite 4.0 Micro" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 131000 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/inclusionai-ling-1t.toml b/providers/gatewayz/models/inclusionai-ling-1t.toml new file mode 100644 index 00000000..be061f8c --- /dev/null +++ b/providers/gatewayz/models/inclusionai-ling-1t.toml @@ -0,0 +1,16 @@ +name = "inclusionAI: Ling-1T" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 131072 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/inclusionai-ring-1t.toml b/providers/gatewayz/models/inclusionai-ring-1t.toml new file mode 100644 index 00000000..99c2b2cb --- /dev/null +++ b/providers/gatewayz/models/inclusionai-ring-1t.toml @@ -0,0 +1,16 @@ +name = "inclusionAI: Ring 1T" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 131072 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/liquid-lfm-2.2-6b.toml b/providers/gatewayz/models/liquid-lfm-2.2-6b.toml new file mode 100644 index 00000000..81b0d300 --- /dev/null +++ b/providers/gatewayz/models/liquid-lfm-2.2-6b.toml @@ -0,0 +1,16 @@ +name = "LiquidAI/LFM2-2.6B" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 32768 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/liquid-lfm2-8b-a1b.toml b/providers/gatewayz/models/liquid-lfm2-8b-a1b.toml new file mode 100644 index 00000000..8981beaa --- /dev/null +++ b/providers/gatewayz/models/liquid-lfm2-8b-a1b.toml @@ -0,0 +1,16 @@ +name = "LiquidAI/LFM2-8B-A1B" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 32768 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/meituan-longcat-flash-chat.toml b/providers/gatewayz/models/meituan-longcat-flash-chat.toml new file mode 100644 index 00000000..064f830b --- /dev/null +++ b/providers/gatewayz/models/meituan-longcat-flash-chat.toml @@ -0,0 +1,16 @@ +name = "Meituan: LongCat Flash Chat" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 131072 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/meituan-longcat-flash-chatfree.toml b/providers/gatewayz/models/meituan-longcat-flash-chatfree.toml new file mode 100644 index 00000000..6b09d847 --- /dev/null +++ b/providers/gatewayz/models/meituan-longcat-flash-chatfree.toml @@ -0,0 +1,16 @@ +name = "Meituan: LongCat Flash Chat (free)" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 131072 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/minimax-minimax-m2.toml b/providers/gatewayz/models/minimax-minimax-m2.toml new file mode 100644 index 00000000..5187955e --- /dev/null +++ b/providers/gatewayz/models/minimax-minimax-m2.toml @@ -0,0 +1,16 @@ +name = "MiniMax: MiniMax M2" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 196608 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/minimax-minimax-m2free.toml b/providers/gatewayz/models/minimax-minimax-m2free.toml new file mode 100644 index 00000000..dc24cdfb --- /dev/null +++ b/providers/gatewayz/models/minimax-minimax-m2free.toml @@ -0,0 +1,16 @@ +name = "MiniMax: MiniMax M2 (free)" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 204800 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/mistralai-voxtral-small-24b-2507.toml b/providers/gatewayz/models/mistralai-voxtral-small-24b-2507.toml new file mode 100644 index 00000000..df751430 --- /dev/null +++ b/providers/gatewayz/models/mistralai-voxtral-small-24b-2507.toml @@ -0,0 +1,16 @@ +name = "Mistral: Voxtral Small 24B 2507" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 32000 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/nvidia-llama-3.3-nemotron-super-49b-v1.5.toml b/providers/gatewayz/models/nvidia-llama-3.3-nemotron-super-49b-v1.5.toml new file mode 100644 index 00000000..37fa209b --- /dev/null +++ b/providers/gatewayz/models/nvidia-llama-3.3-nemotron-super-49b-v1.5.toml @@ -0,0 +1,16 @@ +name = "NVIDIA: Llama 3.3 Nemotron Super 49B V1.5" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 131072 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/nvidia-nemotron-nano-12b-v2-vl.toml b/providers/gatewayz/models/nvidia-nemotron-nano-12b-v2-vl.toml new file mode 100644 index 00000000..90c6db2a --- /dev/null +++ b/providers/gatewayz/models/nvidia-nemotron-nano-12b-v2-vl.toml @@ -0,0 +1,16 @@ +name = "NVIDIA: Nemotron Nano 12B 2 VL" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 131072 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/nvidia-nemotron-nano-12b-v2-vlfree.toml b/providers/gatewayz/models/nvidia-nemotron-nano-12b-v2-vlfree.toml new file mode 100644 index 00000000..ab0a1aea --- /dev/null +++ b/providers/gatewayz/models/nvidia-nemotron-nano-12b-v2-vlfree.toml @@ -0,0 +1,16 @@ +name = "NVIDIA: Nemotron Nano 12B 2 VL (free)" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 128000 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/nvidia-nemotron-nano-9b-v2free.toml b/providers/gatewayz/models/nvidia-nemotron-nano-9b-v2free.toml new file mode 100644 index 00000000..e148d3c0 --- /dev/null +++ b/providers/gatewayz/models/nvidia-nemotron-nano-9b-v2free.toml @@ -0,0 +1,16 @@ +name = "NVIDIA: Nemotron Nano 9B V2 (free)" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 128000 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/openai-gpt-5-codex.toml b/providers/gatewayz/models/openai-gpt-5-codex.toml new file mode 100644 index 00000000..142973bf --- /dev/null +++ b/providers/gatewayz/models/openai-gpt-5-codex.toml @@ -0,0 +1,16 @@ +name = "OpenAI: GPT-5 Codex" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 400000 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/openai-gpt-5-image-mini.toml b/providers/gatewayz/models/openai-gpt-5-image-mini.toml new file mode 100644 index 00000000..2c7a5f87 --- /dev/null +++ b/providers/gatewayz/models/openai-gpt-5-image-mini.toml @@ -0,0 +1,16 @@ +name = "OpenAI: GPT-5 Image Mini" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 400000 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text", "image"] diff --git a/providers/gatewayz/models/openai-gpt-5-image.toml b/providers/gatewayz/models/openai-gpt-5-image.toml new file mode 100644 index 00000000..a833e6fd --- /dev/null +++ b/providers/gatewayz/models/openai-gpt-5-image.toml @@ -0,0 +1,16 @@ +name = "OpenAI: GPT-5 Image" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 400000 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text", "image"] diff --git a/providers/gatewayz/models/openai-gpt-5-pro.toml b/providers/gatewayz/models/openai-gpt-5-pro.toml new file mode 100644 index 00000000..b5b3eeef --- /dev/null +++ b/providers/gatewayz/models/openai-gpt-5-pro.toml @@ -0,0 +1,16 @@ +name = "OpenAI: GPT-5 Pro" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 400000 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/openai-gpt-oss-safeguard-20b.toml b/providers/gatewayz/models/openai-gpt-oss-safeguard-20b.toml new file mode 100644 index 00000000..fdb220bb --- /dev/null +++ b/providers/gatewayz/models/openai-gpt-oss-safeguard-20b.toml @@ -0,0 +1,16 @@ +name = "OpenAI: gpt-oss-safeguard-20b" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 131072 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/openai-o3-deep-research.toml b/providers/gatewayz/models/openai-o3-deep-research.toml new file mode 100644 index 00000000..afcaaeeb --- /dev/null +++ b/providers/gatewayz/models/openai-o3-deep-research.toml @@ -0,0 +1,16 @@ +name = "OpenAI: o3 Deep Research" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = true +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 200000 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/openai-o4-mini-deep-research.toml b/providers/gatewayz/models/openai-o4-mini-deep-research.toml new file mode 100644 index 00000000..dbfe3272 --- /dev/null +++ b/providers/gatewayz/models/openai-o4-mini-deep-research.toml @@ -0,0 +1,16 @@ +name = "OpenAI: o4 Mini Deep Research" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = true +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 200000 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/openai-text-embedding-3-large.toml b/providers/gatewayz/models/openai-text-embedding-3-large.toml new file mode 100644 index 00000000..f59f3890 --- /dev/null +++ b/providers/gatewayz/models/openai-text-embedding-3-large.toml @@ -0,0 +1,16 @@ +name = "OpenAI: Text Embedding 3 Large" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 8192 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/opengvlab-internvl3-78b.toml b/providers/gatewayz/models/opengvlab-internvl3-78b.toml new file mode 100644 index 00000000..c6743def --- /dev/null +++ b/providers/gatewayz/models/opengvlab-internvl3-78b.toml @@ -0,0 +1,16 @@ +name = "OpenGVLab: InternVL3 78B" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 32768 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/perplexity-sonar-pro-search.toml b/providers/gatewayz/models/perplexity-sonar-pro-search.toml new file mode 100644 index 00000000..82bfc558 --- /dev/null +++ b/providers/gatewayz/models/perplexity-sonar-pro-search.toml @@ -0,0 +1,16 @@ +name = "Perplexity: Sonar Pro Search" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 200000 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/qwen-qwen-plus-2025-07-28.toml b/providers/gatewayz/models/qwen-qwen-plus-2025-07-28.toml new file mode 100644 index 00000000..ecd24ae6 --- /dev/null +++ b/providers/gatewayz/models/qwen-qwen-plus-2025-07-28.toml @@ -0,0 +1,16 @@ +name = "Qwen: Qwen Plus 0728" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 1000000 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/qwen-qwen-plus-2025-07-28thinking.toml b/providers/gatewayz/models/qwen-qwen-plus-2025-07-28thinking.toml new file mode 100644 index 00000000..56662637 --- /dev/null +++ b/providers/gatewayz/models/qwen-qwen-plus-2025-07-28thinking.toml @@ -0,0 +1,16 @@ +name = "Qwen: Qwen Plus 0728 (thinking)" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = true +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 1000000 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/qwen-qwen3-coder-flash.toml b/providers/gatewayz/models/qwen-qwen3-coder-flash.toml new file mode 100644 index 00000000..1f06f20b --- /dev/null +++ b/providers/gatewayz/models/qwen-qwen3-coder-flash.toml @@ -0,0 +1,16 @@ +name = "Qwen: Qwen3 Coder Flash" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 128000 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/qwen-qwen3-coder-plus.toml b/providers/gatewayz/models/qwen-qwen3-coder-plus.toml new file mode 100644 index 00000000..9036775f --- /dev/null +++ b/providers/gatewayz/models/qwen-qwen3-coder-plus.toml @@ -0,0 +1,16 @@ +name = "Qwen: Qwen3 Coder Plus" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 128000 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/qwen-qwen3-max.toml b/providers/gatewayz/models/qwen-qwen3-max.toml new file mode 100644 index 00000000..df03c7cb --- /dev/null +++ b/providers/gatewayz/models/qwen-qwen3-max.toml @@ -0,0 +1,16 @@ +name = "Qwen: Qwen3 Max" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 256000 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/qwen-qwen3-next-80b-a3b-instruct.toml b/providers/gatewayz/models/qwen-qwen3-next-80b-a3b-instruct.toml new file mode 100644 index 00000000..2190ba33 --- /dev/null +++ b/providers/gatewayz/models/qwen-qwen3-next-80b-a3b-instruct.toml @@ -0,0 +1,16 @@ +name = "Qwen: Qwen3 Next 80B A3B Instruct" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 262144 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/qwen-qwen3-next-80b-a3b-thinking.toml b/providers/gatewayz/models/qwen-qwen3-next-80b-a3b-thinking.toml new file mode 100644 index 00000000..ff2007e7 --- /dev/null +++ b/providers/gatewayz/models/qwen-qwen3-next-80b-a3b-thinking.toml @@ -0,0 +1,16 @@ +name = "Qwen: Qwen3 Next 80B A3B Thinking" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = true +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 262144 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/qwen-qwen3-vl-235b-a22b-instruct.toml b/providers/gatewayz/models/qwen-qwen3-vl-235b-a22b-instruct.toml new file mode 100644 index 00000000..8de68cdf --- /dev/null +++ b/providers/gatewayz/models/qwen-qwen3-vl-235b-a22b-instruct.toml @@ -0,0 +1,16 @@ +name = "Qwen: Qwen3 VL 235B A22B Instruct" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 131072 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/qwen-qwen3-vl-235b-a22b-thinking.toml b/providers/gatewayz/models/qwen-qwen3-vl-235b-a22b-thinking.toml new file mode 100644 index 00000000..7081fa60 --- /dev/null +++ b/providers/gatewayz/models/qwen-qwen3-vl-235b-a22b-thinking.toml @@ -0,0 +1,16 @@ +name = "Qwen: Qwen3 VL 235B A22B Thinking" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = true +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 262144 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/qwen-qwen3-vl-30b-a3b-instruct.toml b/providers/gatewayz/models/qwen-qwen3-vl-30b-a3b-instruct.toml new file mode 100644 index 00000000..0e8e1898 --- /dev/null +++ b/providers/gatewayz/models/qwen-qwen3-vl-30b-a3b-instruct.toml @@ -0,0 +1,16 @@ +name = "Qwen: Qwen3 VL 30B A3B Instruct" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 0 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/qwen-qwen3-vl-30b-a3b-thinking.toml b/providers/gatewayz/models/qwen-qwen3-vl-30b-a3b-thinking.toml new file mode 100644 index 00000000..2935b04b --- /dev/null +++ b/providers/gatewayz/models/qwen-qwen3-vl-30b-a3b-thinking.toml @@ -0,0 +1,16 @@ +name = "Qwen: Qwen3 VL 30B A3B Thinking" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = true +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 131072 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/qwen-qwen3-vl-32b-instruct.toml b/providers/gatewayz/models/qwen-qwen3-vl-32b-instruct.toml new file mode 100644 index 00000000..9199745f --- /dev/null +++ b/providers/gatewayz/models/qwen-qwen3-vl-32b-instruct.toml @@ -0,0 +1,16 @@ +name = "Qwen: Qwen3 VL 32B Instruct" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 262144 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/qwen-qwen3-vl-8b-instruct.toml b/providers/gatewayz/models/qwen-qwen3-vl-8b-instruct.toml new file mode 100644 index 00000000..45a960c2 --- /dev/null +++ b/providers/gatewayz/models/qwen-qwen3-vl-8b-instruct.toml @@ -0,0 +1,16 @@ +name = "Qwen: Qwen3 VL 8B Instruct" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 131072 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/qwen-qwen3-vl-8b-thinking.toml b/providers/gatewayz/models/qwen-qwen3-vl-8b-thinking.toml new file mode 100644 index 00000000..90c7cb86 --- /dev/null +++ b/providers/gatewayz/models/qwen-qwen3-vl-8b-thinking.toml @@ -0,0 +1,16 @@ +name = "Qwen: Qwen3 VL 8B Thinking" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = true +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 256000 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/relace-relace-apply-3.toml b/providers/gatewayz/models/relace-relace-apply-3.toml new file mode 100644 index 00000000..d1bde167 --- /dev/null +++ b/providers/gatewayz/models/relace-relace-apply-3.toml @@ -0,0 +1,16 @@ +name = "Relace: Relace Apply 3" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 256000 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/thedrummer-cydonia-24b-v4.1.toml b/providers/gatewayz/models/thedrummer-cydonia-24b-v4.1.toml new file mode 100644 index 00000000..820884c1 --- /dev/null +++ b/providers/gatewayz/models/thedrummer-cydonia-24b-v4.1.toml @@ -0,0 +1,16 @@ +name = "TheDrummer: Cydonia 24B V4.1" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 131072 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/x-ai-grok-4-fast.toml b/providers/gatewayz/models/x-ai-grok-4-fast.toml new file mode 100644 index 00000000..a4a5eb27 --- /dev/null +++ b/providers/gatewayz/models/x-ai-grok-4-fast.toml @@ -0,0 +1,16 @@ +name = "xAI: Grok 4 Fast" +release_date = "2025-11" +last_updated = "2025-11" +attachment = true +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 2000000 +output = 4096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/gatewayz/models/z-ai-glm-4.6.toml b/providers/gatewayz/models/z-ai-glm-4.6.toml new file mode 100644 index 00000000..cc4244a6 --- /dev/null +++ b/providers/gatewayz/models/z-ai-glm-4.6.toml @@ -0,0 +1,16 @@ +name = "Z.AI: GLM 4.6" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 202752 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/models/z-ai-glm-4.6exacto.toml b/providers/gatewayz/models/z-ai-glm-4.6exacto.toml new file mode 100644 index 00000000..3de4cd9b --- /dev/null +++ b/providers/gatewayz/models/z-ai-glm-4.6exacto.toml @@ -0,0 +1,16 @@ +name = "Z.AI: GLM 4.6 (exacto)" +release_date = "2025-11" +last_updated = "2025-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = false + +[limit] +context = 202752 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/gatewayz/provider.toml b/providers/gatewayz/provider.toml new file mode 100644 index 00000000..892367df --- /dev/null +++ b/providers/gatewayz/provider.toml @@ -0,0 +1,5 @@ +name = "Gatewayz" +env = ["GATEWAYZ_API_KEY"] +npm = "@ai-sdk/openai-compatible" +api = "https://api.gatewayz.ai/v1" +doc = "https://api.gatewayz.ai/docs"