From 4f063776e47aae4340792aab0134c5bda5366244 Mon Sep 17 00:00:00 2001 From: Oskar Stark Date: Mon, 22 Sep 2025 18:23:50 +0200 Subject: [PATCH] [AI Bundle][Platform] Add `ModelCatalog` --- demo/config/packages/ai.yaml | 23 +- examples/aimlapi/chat.php | 4 +- examples/aimlapi/image-input-binary.php | 8 +- examples/aimlapi/toolcall.php | 8 +- examples/aimlapi/vectorizing.php | 6 +- examples/albert/chat.php | 5 +- examples/anthropic/chat.php | 4 +- examples/anthropic/image-input-binary.php | 4 +- examples/anthropic/image-input-url.php | 4 +- examples/anthropic/pdf-input-binary.php | 4 +- examples/anthropic/pdf-input-url.php | 4 +- examples/anthropic/stream.php | 4 +- examples/anthropic/token-metadata.php | 4 +- examples/anthropic/toolcall.php | 4 +- examples/azure/audio-transcript.php | 4 +- examples/azure/chat-gpt.php | 5 +- examples/azure/chat-llama.php | 4 +- examples/azure/embeddings.php | 4 +- examples/bedrock/chat-claude.php | 4 +- examples/bedrock/chat-llama.php | 4 +- examples/bedrock/chat-nova.php | 4 +- examples/bedrock/image-claude-binary.php | 4 +- examples/bedrock/image-nova.php | 4 +- examples/bedrock/toolcall-claude.php | 4 +- examples/bedrock/toolcall-nova.php | 4 +- examples/cerebras/chat.php | 3 +- examples/cerebras/stream.php | 3 +- examples/dockermodelrunner/chat.php | 4 +- examples/dockermodelrunner/embeddings.php | 3 +- examples/dockermodelrunner/toolcall.php | 4 +- .../document/vectorizing-text-documents.php | 4 +- examples/document/vectorizing.php | 4 +- examples/elevenlabs/speech-to-text.php | 4 +- .../elevenlabs/text-to-speech-as-stream.php | 6 +- examples/elevenlabs/text-to-speech.php | 6 +- examples/gemini/audio-input.php | 4 +- examples/gemini/chat.php | 4 +- examples/gemini/embeddings.php | 4 +- examples/gemini/image-input.php | 4 +- examples/gemini/pdf-input-binary.php | 4 +- examples/gemini/server-tools.php | 11 +- examples/gemini/stream.php | 4 +- examples/gemini/structured-output-clock.php | 4 +- examples/gemini/structured-output-math.php | 4 +- examples/gemini/token-metadata.php | 4 +- examples/gemini/toolcall.php | 4 +- examples/huggingface/audio-classification.php | 4 +- .../automatic-speech-recognition.php | 4 +- examples/huggingface/chat-completion.php | 4 +- examples/huggingface/feature-extraction.php | 4 +- examples/huggingface/fill-mask.php | 4 +- examples/huggingface/image-classification.php | 4 +- examples/huggingface/image-segmentation.php | 4 +- examples/huggingface/image-to-text.php | 4 +- examples/huggingface/object-detection.php | 4 +- examples/huggingface/question-answering.php | 4 +- examples/huggingface/sentence-similarity.php | 4 +- examples/huggingface/summarization.php | 4 +- .../huggingface/table-question-answering.php | 4 +- examples/huggingface/text-classification.php | 4 +- examples/huggingface/text-generation.php | 4 +- examples/huggingface/text-to-image.php | 4 +- examples/huggingface/token-classification.php | 4 +- examples/huggingface/translation.php | 4 +- .../huggingface/zero-shot-classification.php | 4 +- examples/indexer/index-file-loader.php | 3 +- examples/indexer/index-inmemory-loader.php | 3 +- examples/indexer/index-rss-loader.php | 3 +- examples/indexer/index-with-filters.php | 3 +- examples/lmstudio/chat.php | 4 +- examples/lmstudio/image-input-binary.php | 8 +- examples/memory/mariadb.php | 9 +- examples/memory/static.php | 6 +- examples/misc/chat-system-prompt.php | 4 +- examples/misc/parallel-chat-gpt.php | 4 +- examples/misc/parallel-embeddings.php | 7 +- examples/misc/persistent-chat.php | 4 +- examples/mistral/chat-multiple.php | 3 +- examples/mistral/chat.php | 4 +- examples/mistral/embeddings.php | 4 +- examples/mistral/image.php | 4 +- examples/mistral/pdf-input-binary.php | 4 +- examples/mistral/pdf-input-url.php | 4 +- examples/mistral/stream.php | 4 +- examples/mistral/structured-output-math.php | 5 +- examples/mistral/token-metadata.php | 4 +- examples/mistral/toolcall-stream.php | 4 +- examples/mistral/toolcall.php | 4 +- examples/ollama/chat-llama.php | 4 +- examples/ollama/embeddings.php | 3 +- examples/ollama/indexer.php | 3 +- examples/ollama/rag.php | 7 +- examples/ollama/stream.php | 4 +- examples/ollama/structured-output-math.php | 4 +- examples/ollama/toolcall.php | 4 +- examples/openai/audio-input.php | 4 +- examples/openai/audio-transcript.php | 4 +- examples/openai/chat-o1.php | 4 +- examples/openai/chat.php | 4 +- examples/openai/embeddings.php | 4 +- examples/openai/image-input-binary.php | 4 +- examples/openai/image-input-url.php | 4 +- examples/openai/image-output-dall-e-2.php | 3 +- examples/openai/image-output-dall-e-3.php | 3 +- examples/openai/pdf-input-binary.php | 4 +- examples/openai/stream.php | 4 +- examples/openai/structured-output-clock.php | 4 +- ...tured-output-list-of-polymorphic-items.php | 5 +- examples/openai/structured-output-math.php | 5 +- .../openai/structured-output-union-types.php | 5 +- examples/openai/token-metadata.php | 4 +- examples/openai/toolcall-stream.php | 4 +- examples/openai/toolcall.php | 4 +- examples/openrouter/chat-gemini.php | 4 +- examples/perplexity/academic-search.php | 4 +- examples/perplexity/chat.php | 4 +- examples/perplexity/disable-search.php | 4 +- examples/perplexity/image-input-url.php | 4 +- examples/perplexity/pdf-input-url.php | 4 +- examples/perplexity/stream.php | 4 +- examples/perplexity/token-metadata.php | 4 +- examples/perplexity/web-search.php | 4 +- examples/rag/cache.php | 8 +- examples/rag/chromadb.php | 8 +- examples/rag/clickhouse.php | 8 +- examples/rag/cloudflare.php | 8 +- examples/rag/in-memory.php | 8 +- examples/rag/mariadb-gemini.php | 11 +- examples/rag/mariadb-openai.php | 8 +- examples/rag/meilisearch.php | 8 +- examples/rag/milvus.php | 8 +- examples/rag/mongodb.php | 8 +- examples/rag/neo4j.php | 8 +- examples/rag/pinecone.php | 8 +- examples/rag/postgres.php | 8 +- examples/rag/qdrant.php | 8 +- examples/rag/surrealdb.php | 8 +- examples/rag/typesense.php | 8 +- examples/rag/weaviate.php | 7 +- examples/replicate/chat-llama.php | 4 +- examples/scaleway/chat.php | 4 +- examples/scaleway/embeddings.php | 3 +- examples/scaleway/stream.php | 4 +- examples/scaleway/structured-output-math.php | 4 +- examples/scaleway/toolcall-stream.php | 4 +- examples/scaleway/toolcall.php | 4 +- examples/scaleway/vision.php | 6 +- examples/toolbox/brave.php | 4 +- examples/toolbox/clock.php | 4 +- examples/toolbox/firecrawl-crawl.php | 4 +- examples/toolbox/firecrawl-map.php | 4 +- examples/toolbox/firecrawl-scrape.php | 4 +- examples/toolbox/mapbox-geocode.php | 4 +- examples/toolbox/mapbox-reverse-geocode.php | 4 +- examples/toolbox/serpapi.php | 4 +- examples/toolbox/tavily.php | 4 +- examples/toolbox/weather-event.php | 4 +- examples/transformers/text-generation.php | 4 +- examples/vertexai/audio-input.php | 4 +- examples/vertexai/chat.php | 4 +- examples/vertexai/embeddings.php | 4 +- examples/vertexai/image-input.php | 4 +- examples/vertexai/pdf-input-binary.php | 4 +- examples/vertexai/server-tools.php | 5 +- examples/vertexai/stream.php | 4 +- examples/vertexai/structured-output-clock.php | 4 +- examples/vertexai/structured-output-math.php | 4 +- examples/vertexai/token-metadata.php | 4 +- examples/vertexai/toolcall.php | 4 +- examples/voyage/embeddings.php | 4 +- examples/voyage/multiple-embeddings.php | 4 +- src/agent/src/Agent.php | 9 +- src/agent/src/Memory/EmbeddingProvider.php | 2 +- src/agent/tests/AgentTest.php | 143 ++- .../ModelOverrideInputProcessorTest.php | 26 +- .../SystemPromptInputProcessorTest.php | 14 +- .../tests/Memory/EmbeddingProviderTest.php | 16 +- src/ai-bundle/config/options.php | 163 ++- src/ai-bundle/config/services.php | 42 + src/ai-bundle/src/AiBundle.php | 63 +- .../src/Profiler/TraceablePlatform.php | 8 +- .../DependencyInjection/AiBundleTest.php | 164 ++- .../tests/Profiler/DataCollectorTest.php | 5 +- .../src/Bridge/AiMlApi/Completions.php | 121 -- .../src/Bridge/AiMlApi/Embeddings.php | 16 - .../src/Bridge/AiMlApi/ModelCatalog.php | 1082 +++++++++++++++++ .../src/Bridge/AiMlApi/PlatformFactory.php | 5 +- .../src/Bridge/Albert/ModelCatalog.php | 44 + .../src/Bridge/Albert/PlatformFactory.php | 1 + src/platform/src/Bridge/Anthropic/Claude.php | 11 +- .../src/Bridge/Anthropic/ModelCatalog.php | 152 +++ .../src/Bridge/Anthropic/PlatformFactory.php | 3 + .../src/Bridge/Azure/Meta/ModelCatalog.php | 140 +++ .../src/Bridge/Azure/Meta/PlatformFactory.php | 4 +- .../src/Bridge/Azure/OpenAi/ModelCatalog.php | 125 ++ .../Bridge/Azure/OpenAi/PlatformFactory.php | 3 + .../src/Bridge/Bedrock/ModelCatalog.php | 295 +++++ src/platform/src/Bridge/Bedrock/Nova/Nova.php | 23 - .../src/Bridge/Bedrock/PlatformFactory.php | 3 + src/platform/src/Bridge/Cerebras/Model.php | 27 - .../src/Bridge/Cerebras/ModelCatalog.php | 106 ++ .../src/Bridge/Cerebras/PlatformFactory.php | 3 + .../Bridge/DockerModelRunner/Completions.php | 22 - .../Bridge/DockerModelRunner/Embeddings.php | 10 - .../Bridge/DockerModelRunner/ModelCatalog.php | 170 +++ .../DockerModelRunner/PlatformFactory.php | 7 +- .../src/Bridge/ElevenLabs/ElevenLabs.php | 35 - .../src/Bridge/ElevenLabs/ModelCatalog.php | 128 ++ .../src/Bridge/ElevenLabs/PlatformFactory.php | 3 + src/platform/src/Bridge/Gemini/Embeddings.php | 9 +- src/platform/src/Bridge/Gemini/Gemini.php | 27 - .../src/Bridge/Gemini/ModelCatalog.php | 140 +++ .../src/Bridge/Gemini/PlatformFactory.php | 3 + .../src/Bridge/HuggingFace/ModelCatalog.php | 23 + .../Bridge/HuggingFace/PlatformFactory.php | 1 + .../src/Bridge/LmStudio/Completions.php | 14 - .../src/Bridge/LmStudio/ModelCatalog.php | 23 + .../src/Bridge/LmStudio/PlatformFactory.php | 7 +- src/platform/src/Bridge/Meta/Llama.php | 29 - .../src/Bridge/Mistral/Embeddings.php | 10 - src/platform/src/Bridge/Mistral/Mistral.php | 52 - .../src/Bridge/Mistral/ModelCatalog.php | 161 +++ .../src/Bridge/Mistral/PlatformFactory.php | 3 + .../src/Bridge/Ollama/ModelCatalog.php | 213 ++++ src/platform/src/Bridge/Ollama/Ollama.php | 62 - .../src/Bridge/Ollama/PlatformFactory.php | 5 +- src/platform/src/Bridge/OpenAi/DallE.php | 14 - src/platform/src/Bridge/OpenAi/Embeddings.php | 8 +- src/platform/src/Bridge/OpenAi/Gpt.php | 75 +- .../src/Bridge/OpenAi/ModelCatalog.php | 262 ++++ .../src/Bridge/OpenAi/PlatformFactory.php | 3 + src/platform/src/Bridge/OpenAi/Whisper.php | 15 - .../src/Bridge/OpenRouter/ModelCatalog.php | 23 + .../src/Bridge/OpenRouter/PlatformFactory.php | 3 + .../src/Bridge/Perplexity/ModelCatalog.php | 87 ++ .../src/Bridge/Perplexity/Perplexity.php | 26 - .../src/Bridge/Perplexity/PlatformFactory.php | 3 + .../src/Bridge/Replicate/ModelCatalog.php | 138 +++ .../src/Bridge/Replicate/PlatformFactory.php | 3 + .../src/Bridge/Scaleway/Embeddings.php | 6 +- .../src/Bridge/Scaleway/ModelCatalog.php | 147 +++ .../src/Bridge/Scaleway/PlatformFactory.php | 3 + src/platform/src/Bridge/Scaleway/Scaleway.php | 23 +- .../Bridge/TransformersPhp/ModelCatalog.php | 23 + .../TransformersPhp/PlatformFactory.php | 5 +- .../src/Bridge/VertexAi/Embeddings/Model.php | 15 - .../src/Bridge/VertexAi/Gemini/Model.php | 26 - .../src/Bridge/VertexAi/ModelCatalog.php | 125 ++ .../src/Bridge/VertexAi/PlatformFactory.php | 3 + .../src/Bridge/Voyage/ModelCatalog.php | 69 ++ .../src/Bridge/Voyage/PlatformFactory.php | 4 +- src/platform/src/Bridge/Voyage/Voyage.php | 16 +- src/platform/src/InMemoryPlatform.php | 18 +- src/platform/src/Model.php | 2 +- .../src/ModelCatalog/AbstractModelCatalog.php | 93 ++ .../src/ModelCatalog/DynamicModelCatalog.php | 39 + .../ModelCatalog/ModelCatalogInterface.php | 31 + src/platform/src/Platform.php | 10 +- src/platform/src/PlatformInterface.php | 10 +- .../src/Tests/ModelCatalogTestCase.php | 123 ++ .../tests/Bridge/AiMlApi/ModelCatalogTest.php | 174 +++ .../tests/Bridge/Albert/ModelCatalogTest.php | 34 + .../tests/Bridge/Anthropic/ClaudeTest.php | 12 +- .../AssistantMessageNormalizerTest.php | 2 +- .../Bridge/Anthropic/ModelCatalogTest.php | 44 + .../Bridge/Anthropic/ModelClientTest.php | 2 +- .../Bridge/Azure/Meta/ModelCatalogTest.php | 48 + .../OpenAi/EmbeddingsModelClientTest.php | 4 +- .../Azure/OpenAi/GptModelClientTest.php | 4 +- .../Bridge/Azure/OpenAi/ModelCatalogTest.php | 45 + .../Azure/OpenAi/WhisperModelClientTest.php | 8 +- .../tests/Bridge/Bedrock/ModelCatalogTest.php | 39 + .../Bridge/Bedrock/Nova/ContractTest.php | 2 +- .../Bedrock/Nova/NovaResultConverterTest.php | 4 +- .../Bridge/Cerebras/ModelCatalogTest.php | 42 + .../tests/Bridge/Cerebras/ModelClientTest.php | 4 +- .../Bridge/Cerebras/ResultConverterTest.php | 2 +- .../DockerModelRunner/ModelCatalogTest.php | 57 + .../Contract/ElevenLabsContractTest.php | 2 +- .../ElevenLabs/ElevenLabsClientTest.php | 29 +- .../ElevenLabs/ElevenLabsConverterTest.php | 2 +- .../Bridge/ElevenLabs/ElevenLabsTest.php | 135 -- .../Bridge/ElevenLabs/ModelCatalogTest.php | 45 + .../AssistantMessageNormalizerTest.php | 2 +- .../Contract/MessageBagNormalizerTest.php | 2 +- .../ToolCallMessageNormalizerTest.php | 2 +- .../Gemini/Contract/ToolNormalizerTest.php | 2 +- .../Contract/UserMessageNormalizerTest.php | 2 +- .../Gemini/Embeddings/ModelClientTest.php | 2 +- .../tests/Bridge/Gemini/ModelCatalogTest.php | 45 + .../Contract/DocumentNormalizerTest.php | 2 +- .../Contract/DocumentUrlNormalizerTest.php | 2 +- .../tests/Bridge/Mistral/ModelCatalogTest.php | 47 + .../AssistantMessageNormalizerTest.php | 2 +- .../tests/Bridge/Ollama/ModelCatalogTest.php | 54 + .../tests/Bridge/Ollama/OllamaClientTest.php | 6 +- .../Ollama/OllamaResultConverterTest.php | 2 +- .../tests/Bridge/Ollama/OllamaTest.php | 104 -- .../Contract/DocumentNormalizerTest.php | 2 +- .../Bridge/OpenAi/DallE/ModelClientTest.php | 6 +- .../tests/Bridge/OpenAi/DallETest.php | 8 +- .../OpenAi/Embeddings/ModelClientTest.php | 10 +- .../tests/Bridge/OpenAi/EmbeddingsTest.php | 12 +- .../Bridge/OpenAi/Gpt/ModelClientTest.php | 8 +- src/platform/tests/Bridge/OpenAi/GptTest.php | 8 +- .../tests/Bridge/OpenAi/ModelCatalogTest.php | 68 ++ .../Bridge/OpenAi/Whisper/ModelClientTest.php | 12 +- .../tests/Bridge/OpenAi/WhisperTest.php | 8 +- .../Contract/FileUrlNormalizerTest.php | 2 +- .../Bridge/Perplexity/ModelCatalogTest.php | 38 + .../Bridge/Perplexity/ModelClientTest.php | 6 +- .../Bridge/Perplexity/PerplexityTest.php | 8 +- .../Bridge/Replicate/ModelCatalogTest.php | 48 + .../Scaleway/Embeddings/ModelClientTest.php | 8 +- .../tests/Bridge/Scaleway/EmbeddingsTest.php | 4 +- .../Bridge/Scaleway/Llm/ModelClientTest.php | 8 +- .../Bridge/Scaleway/ModelCatalogTest.php | 49 + .../TransformersPhp/ModelCatalogTest.php | 38 + .../AssistantMessageNormalizerTest.php | 2 +- .../Contract/MessageBagNormalizerTest.php | 2 +- .../ToolCallMessageNormalizerTest.php | 2 +- .../VertexAi/Contract/ToolNormalizerTest.php | 2 +- .../Contract/UserMessageNormalizerTest.php | 2 +- .../VertexAi/Embeddings/ModelClientTest.php | 2 +- .../VertexAi/Gemini/ModelClientTest.php | 4 +- .../Bridge/VertexAi/ModelCatalogTest.php | 45 + .../tests/Bridge/Voyage/ModelCatalogTest.php | 40 + .../Bridge/Voyage/ResultConverterTest.php | 20 +- .../Message/MessageBagNormalizerTest.php | 4 +- src/platform/tests/ContractTest.php | 16 +- .../tests/DynamicModelCatalogTest.php | 66 + src/platform/tests/InMemoryPlatformTest.php | 6 +- src/store/src/Document/Vectorizer.php | 5 +- src/store/tests/Document/VectorizerTest.php | 202 +-- .../tests/Double/PlatformTestHandler.php | 6 +- src/store/tests/IndexerTest.php | 47 +- 336 files changed, 5975 insertions(+), 2099 deletions(-) create mode 100644 src/platform/src/Bridge/AiMlApi/ModelCatalog.php create mode 100644 src/platform/src/Bridge/Albert/ModelCatalog.php create mode 100644 src/platform/src/Bridge/Anthropic/ModelCatalog.php create mode 100644 src/platform/src/Bridge/Azure/Meta/ModelCatalog.php create mode 100644 src/platform/src/Bridge/Azure/OpenAi/ModelCatalog.php create mode 100644 src/platform/src/Bridge/Bedrock/ModelCatalog.php create mode 100644 src/platform/src/Bridge/Cerebras/ModelCatalog.php create mode 100644 src/platform/src/Bridge/DockerModelRunner/ModelCatalog.php create mode 100644 src/platform/src/Bridge/ElevenLabs/ModelCatalog.php create mode 100644 src/platform/src/Bridge/Gemini/ModelCatalog.php create mode 100644 src/platform/src/Bridge/HuggingFace/ModelCatalog.php create mode 100644 src/platform/src/Bridge/LmStudio/ModelCatalog.php create mode 100644 src/platform/src/Bridge/Mistral/ModelCatalog.php create mode 100644 src/platform/src/Bridge/Ollama/ModelCatalog.php create mode 100644 src/platform/src/Bridge/OpenAi/ModelCatalog.php create mode 100644 src/platform/src/Bridge/OpenRouter/ModelCatalog.php create mode 100644 src/platform/src/Bridge/Perplexity/ModelCatalog.php create mode 100644 src/platform/src/Bridge/Replicate/ModelCatalog.php create mode 100644 src/platform/src/Bridge/Scaleway/ModelCatalog.php create mode 100644 src/platform/src/Bridge/TransformersPhp/ModelCatalog.php create mode 100644 src/platform/src/Bridge/VertexAi/ModelCatalog.php create mode 100644 src/platform/src/Bridge/Voyage/ModelCatalog.php create mode 100644 src/platform/src/ModelCatalog/AbstractModelCatalog.php create mode 100644 src/platform/src/ModelCatalog/DynamicModelCatalog.php create mode 100644 src/platform/src/ModelCatalog/ModelCatalogInterface.php create mode 100644 src/platform/src/Tests/ModelCatalogTestCase.php create mode 100644 src/platform/tests/Bridge/AiMlApi/ModelCatalogTest.php create mode 100644 src/platform/tests/Bridge/Albert/ModelCatalogTest.php create mode 100644 src/platform/tests/Bridge/Anthropic/ModelCatalogTest.php create mode 100644 src/platform/tests/Bridge/Azure/Meta/ModelCatalogTest.php create mode 100644 src/platform/tests/Bridge/Azure/OpenAi/ModelCatalogTest.php create mode 100644 src/platform/tests/Bridge/Bedrock/ModelCatalogTest.php create mode 100644 src/platform/tests/Bridge/Cerebras/ModelCatalogTest.php create mode 100644 src/platform/tests/Bridge/DockerModelRunner/ModelCatalogTest.php delete mode 100644 src/platform/tests/Bridge/ElevenLabs/ElevenLabsTest.php create mode 100644 src/platform/tests/Bridge/ElevenLabs/ModelCatalogTest.php create mode 100644 src/platform/tests/Bridge/Gemini/ModelCatalogTest.php create mode 100644 src/platform/tests/Bridge/Mistral/ModelCatalogTest.php create mode 100644 src/platform/tests/Bridge/Ollama/ModelCatalogTest.php delete mode 100644 src/platform/tests/Bridge/Ollama/OllamaTest.php create mode 100644 src/platform/tests/Bridge/OpenAi/ModelCatalogTest.php create mode 100644 src/platform/tests/Bridge/Perplexity/ModelCatalogTest.php create mode 100644 src/platform/tests/Bridge/Replicate/ModelCatalogTest.php create mode 100644 src/platform/tests/Bridge/Scaleway/ModelCatalogTest.php create mode 100644 src/platform/tests/Bridge/TransformersPhp/ModelCatalogTest.php create mode 100644 src/platform/tests/Bridge/VertexAi/ModelCatalogTest.php create mode 100644 src/platform/tests/Bridge/Voyage/ModelCatalogTest.php create mode 100644 src/platform/tests/DynamicModelCatalogTest.php diff --git a/demo/config/packages/ai.yaml b/demo/config/packages/ai.yaml index 494bb029e..09a2bd1e7 100644 --- a/demo/config/packages/ai.yaml +++ b/demo/config/packages/ai.yaml @@ -4,9 +4,7 @@ ai: api_key: '%env(OPENAI_API_KEY)%' agent: blog: - model: - class: 'Symfony\AI\Platform\Bridge\OpenAi\Gpt' - name: !php/const Symfony\AI\Platform\Bridge\OpenAi\Gpt::GPT_4O_MINI + model: 'gpt-4o-mini' tools: - 'Symfony\AI\Agent\Toolbox\Tool\SimilaritySearch' - service: 'clock' @@ -14,9 +12,7 @@ ai: description: 'Provides the current date and time.' method: 'now' stream: - model: - class: 'Symfony\AI\Platform\Bridge\OpenAi\Gpt' - name: !php/const Symfony\AI\Platform\Bridge\OpenAi\Gpt::GPT_4O_MINI + model: 'gpt-4o-mini' prompt: | You are an example chat application where messages from the LLM are streamed to the user using Server-Sent Events via `symfony/ux-turbo` / Turbo Streams. This example does not use any custom @@ -24,14 +20,11 @@ ai: Whatever the user asks, tell them about the application & used technologies. tools: false youtube: - model: - class: 'Symfony\AI\Platform\Bridge\OpenAi\Gpt' - name: !php/const Symfony\AI\Platform\Bridge\OpenAi\Gpt::GPT_4O_MINI + model: 'gpt-4o-mini' tools: false wikipedia: model: - class: 'Symfony\AI\Platform\Bridge\OpenAi\Gpt' - name: !php/const Symfony\AI\Platform\Bridge\OpenAi\Gpt::GPT_4O_MINI + name: 'gpt-4o-mini' options: temperature: 0.5 prompt: @@ -40,9 +33,7 @@ ai: tools: - 'Symfony\AI\Agent\Toolbox\Tool\Wikipedia' audio: - model: - class: 'Symfony\AI\Platform\Bridge\OpenAi\Gpt' - name: 'gpt-4o-mini?temperature=1.0' + model: 'gpt-4o-mini?temperature=1.0' prompt: 'You are a friendly chatbot that likes to have a conversation with users and asks them some questions.' tools: # Agent in agent 🤯 @@ -55,9 +46,7 @@ ai: collection: 'symfony_blog' vectorizer: openai: - model: - class: 'Symfony\AI\Platform\Bridge\OpenAi\Embeddings' - name: !php/const Symfony\AI\Platform\Bridge\OpenAi\Embeddings::TEXT_ADA_002 + model: 'text-embedding-ada-002' indexer: blog: loader: 'Symfony\AI\Store\Document\Loader\RssFeedLoader' diff --git a/examples/aimlapi/chat.php b/examples/aimlapi/chat.php index 0f385fb04..9265f2eaa 100644 --- a/examples/aimlapi/chat.php +++ b/examples/aimlapi/chat.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\AiMlApi\Completions; use Symfony\AI\Platform\Bridge\AiMlApi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -17,13 +16,12 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('AIMLAPI_API_KEY'), http_client()); -$model = new Completions(Completions::GEMINI_2_0_FLASH); $messages = new MessageBag( Message::forSystem('You are a pirate and you write funny.'), Message::ofUser('What is the Symfony framework?'), ); -$result = $platform->invoke($model, $messages, [ +$result = $platform->invoke('gemini-2.0-flash', $messages, [ 'max_tokens' => 500, // specific options just for this call ]); diff --git a/examples/aimlapi/image-input-binary.php b/examples/aimlapi/image-input-binary.php index 84e4314c2..7362681e2 100644 --- a/examples/aimlapi/image-input-binary.php +++ b/examples/aimlapi/image-input-binary.php @@ -9,9 +9,7 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\AiMlApi\Completions; use Symfony\AI\Platform\Bridge\AiMlApi\PlatformFactory; -use Symfony\AI\Platform\Capability; use Symfony\AI\Platform\Message\Content\Image; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -19,10 +17,6 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('AIMLAPI_API_KEY'), http_client()); -$model = new Completions( - name: Completions::GOOGLE_GEMMA_3_27B_IT, - capabilities: [...Completions::DEFAULT_CAPABILITIES, Capability::INPUT_IMAGE] -); $messages = new MessageBag( Message::forSystem('You are an image analyzer bot that helps identify the content of images.'), @@ -31,6 +25,6 @@ Image::fromFile(dirname(__DIR__, 2).'/fixtures/image.jpg'), ), ); -$result = $platform->invoke($model, $messages); +$result = $platform->invoke('google/gemma-3-27b-it', $messages); echo $result->getResult()->getContent().\PHP_EOL; diff --git a/examples/aimlapi/toolcall.php b/examples/aimlapi/toolcall.php index 5895d18dd..65f96cb61 100644 --- a/examples/aimlapi/toolcall.php +++ b/examples/aimlapi/toolcall.php @@ -13,24 +13,18 @@ use Symfony\AI\Agent\Toolbox\AgentProcessor; use Symfony\AI\Agent\Toolbox\Tool\Wikipedia; use Symfony\AI\Agent\Toolbox\Toolbox; -use Symfony\AI\Platform\Bridge\AiMlApi\Completions; use Symfony\AI\Platform\Bridge\AiMlApi\PlatformFactory; -use Symfony\AI\Platform\Capability; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('AIMLAPI_API_KEY'), http_client()); -$model = new Completions( - name: Completions::GOOGLE_GEMINI_2_5_FLASH, - capabilities: [...Completions::DEFAULT_CAPABILITIES, Capability::TOOL_CALLING] -); $wikipedia = new Wikipedia(http_client()); $toolbox = new Toolbox([$wikipedia], logger: logger()); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'google/gemini-2.5-flash', [$processor], [$processor], logger: logger()); $messages = new MessageBag(Message::ofUser('Who is the current chancellor of Germany?')); $result = $agent->call($messages); diff --git a/examples/aimlapi/vectorizing.php b/examples/aimlapi/vectorizing.php index 61c91a278..f693428bf 100644 --- a/examples/aimlapi/vectorizing.php +++ b/examples/aimlapi/vectorizing.php @@ -9,18 +9,14 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\AiMlApi\Embeddings; use Symfony\AI\Platform\Bridge\AiMlApi\PlatformFactory; use Symfony\AI\Store\Document\Vectorizer; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('AIMLAPI_API_KEY'), http_client()); -$embeddings = new Embeddings( - name: Embeddings::TEXT_EMBEDDING_3_SMALL -); -$vectorizer = new Vectorizer($platform, $embeddings); +$vectorizer = new Vectorizer($platform, 'text-embedding-3-small'); $string = 'Hello World'; $vector = $vectorizer->vectorize($string); diff --git a/examples/albert/chat.php b/examples/albert/chat.php index 31d30a36d..9a30ed73e 100644 --- a/examples/albert/chat.php +++ b/examples/albert/chat.php @@ -10,7 +10,6 @@ */ use Symfony\AI\Platform\Bridge\Albert\PlatformFactory; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -18,8 +17,6 @@ $platform = PlatformFactory::create(env('ALBERT_API_KEY'), env('ALBERT_API_URL'), http_client()); -$model = new Gpt('gpt-4o'); - $documentContext = <<<'CONTEXT' Document: AI Strategy of France @@ -42,6 +39,6 @@ Message::ofUser('What are the main objectives of France\'s AI strategy?'), ); -$result = $platform->invoke($model, $messages); +$result = $platform->invoke('llama-3.3-70b-instruct', $messages); echo $result->getResult()->getContent().\PHP_EOL; diff --git a/examples/anthropic/chat.php b/examples/anthropic/chat.php index 84e2e9bee..979d25fdf 100644 --- a/examples/anthropic/chat.php +++ b/examples/anthropic/chat.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\Anthropic\Claude; use Symfony\AI\Platform\Bridge\Anthropic\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -17,12 +16,11 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('ANTHROPIC_API_KEY'), httpClient: http_client()); -$model = new Claude(Claude::SONNET_37); $messages = new MessageBag( Message::forSystem('You are a pirate and you write funny.'), Message::ofUser('What is the Symfony framework?'), ); -$result = $platform->invoke($model, $messages); +$result = $platform->invoke('claude-3-5-sonnet-20241022', $messages); echo $result->getResult()->getContent().\PHP_EOL; diff --git a/examples/anthropic/image-input-binary.php b/examples/anthropic/image-input-binary.php index e0a519c03..2ffb2bb36 100644 --- a/examples/anthropic/image-input-binary.php +++ b/examples/anthropic/image-input-binary.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\Anthropic\Claude; use Symfony\AI\Platform\Bridge\Anthropic\PlatformFactory; use Symfony\AI\Platform\Message\Content\Image; use Symfony\AI\Platform\Message\Message; @@ -18,7 +17,6 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('ANTHROPIC_API_KEY'), httpClient: http_client()); -$model = new Claude(Claude::SONNET_37); $messages = new MessageBag( Message::forSystem('You are an image analyzer bot that helps identify the content of images.'), @@ -27,6 +25,6 @@ 'Describe this image.', ), ); -$result = $platform->invoke($model, $messages); +$result = $platform->invoke('claude-3-5-sonnet-20241022', $messages); echo $result->getResult()->getContent().\PHP_EOL; diff --git a/examples/anthropic/image-input-url.php b/examples/anthropic/image-input-url.php index 471992d0d..f08fa83bb 100644 --- a/examples/anthropic/image-input-url.php +++ b/examples/anthropic/image-input-url.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\Anthropic\Claude; use Symfony\AI\Platform\Bridge\Anthropic\PlatformFactory; use Symfony\AI\Platform\Message\Content\ImageUrl; use Symfony\AI\Platform\Message\Message; @@ -18,7 +17,6 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('ANTHROPIC_API_KEY'), httpClient: http_client()); -$model = new Claude(Claude::SONNET_37); $messages = new MessageBag( Message::forSystem('You are an image analyzer bot that helps identify the content of images.'), @@ -27,6 +25,6 @@ 'Describe this image.', ), ); -$result = $platform->invoke($model, $messages); +$result = $platform->invoke('claude-3-5-sonnet-20241022', $messages); echo $result->getResult()->getContent().\PHP_EOL; diff --git a/examples/anthropic/pdf-input-binary.php b/examples/anthropic/pdf-input-binary.php index 1364bbeae..b93975906 100644 --- a/examples/anthropic/pdf-input-binary.php +++ b/examples/anthropic/pdf-input-binary.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\Anthropic\Claude; use Symfony\AI\Platform\Bridge\Anthropic\PlatformFactory; use Symfony\AI\Platform\Message\Content\Document; use Symfony\AI\Platform\Message\Message; @@ -18,7 +17,6 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('ANTHROPIC_API_KEY'), httpClient: http_client()); -$model = new Claude(Claude::SONNET_37); $messages = new MessageBag( Message::ofUser( @@ -26,6 +24,6 @@ 'What is this document about?', ), ); -$result = $platform->invoke($model, $messages); +$result = $platform->invoke('claude-3-5-sonnet-20241022', $messages); echo $result->getResult()->getContent().\PHP_EOL; diff --git a/examples/anthropic/pdf-input-url.php b/examples/anthropic/pdf-input-url.php index e96babe31..9276a1edc 100644 --- a/examples/anthropic/pdf-input-url.php +++ b/examples/anthropic/pdf-input-url.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\Anthropic\Claude; use Symfony\AI\Platform\Bridge\Anthropic\PlatformFactory; use Symfony\AI\Platform\Message\Content\DocumentUrl; use Symfony\AI\Platform\Message\Message; @@ -18,7 +17,6 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('ANTHROPIC_API_KEY'), httpClient: http_client()); -$model = new Claude(Claude::SONNET_37); $messages = new MessageBag( Message::ofUser( @@ -26,6 +24,6 @@ 'What is this document about?', ), ); -$result = $platform->invoke($model, $messages); +$result = $platform->invoke('claude-3-5-sonnet-20241022', $messages); echo $result->getResult()->getContent().\PHP_EOL; diff --git a/examples/anthropic/stream.php b/examples/anthropic/stream.php index 8f7a89e73..65035a109 100644 --- a/examples/anthropic/stream.php +++ b/examples/anthropic/stream.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\Anthropic\Claude; use Symfony\AI\Platform\Bridge\Anthropic\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -17,12 +16,11 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('ANTHROPIC_API_KEY'), httpClient: http_client()); -$model = new Claude(Claude::SONNET_37); $messages = new MessageBag( Message::forSystem('You are a thoughtful philosopher.'), Message::ofUser('What is the purpose of an ant?'), ); -$result = $platform->invoke($model, $messages, ['stream' => true]); +$result = $platform->invoke('claude-3-5-sonnet-20241022', $messages, ['stream' => true]); print_stream($result); diff --git a/examples/anthropic/token-metadata.php b/examples/anthropic/token-metadata.php index cf1c37c43..08e074719 100644 --- a/examples/anthropic/token-metadata.php +++ b/examples/anthropic/token-metadata.php @@ -10,7 +10,6 @@ */ use Symfony\AI\Agent\Agent; -use Symfony\AI\Platform\Bridge\Anthropic\Claude; use Symfony\AI\Platform\Bridge\Anthropic\PlatformFactory; use Symfony\AI\Platform\Bridge\Anthropic\TokenOutputProcessor; use Symfony\AI\Platform\Message\Message; @@ -19,9 +18,8 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('ANTHROPIC_API_KEY'), http_client()); -$model = new Claude(Claude::SONNET_37); -$agent = new Agent($platform, $model, outputProcessors: [new TokenOutputProcessor()], logger: logger()); +$agent = new Agent($platform, 'claude-3-5-sonnet-20241022', outputProcessors: [new TokenOutputProcessor()], logger: logger()); $messages = new MessageBag( Message::forSystem('You are a pirate and you write funny.'), Message::ofUser('What is the Symfony framework?'), diff --git a/examples/anthropic/toolcall.php b/examples/anthropic/toolcall.php index 9fee56792..a3b500119 100644 --- a/examples/anthropic/toolcall.php +++ b/examples/anthropic/toolcall.php @@ -13,7 +13,6 @@ use Symfony\AI\Agent\Toolbox\AgentProcessor; use Symfony\AI\Agent\Toolbox\Tool\Wikipedia; use Symfony\AI\Agent\Toolbox\Toolbox; -use Symfony\AI\Platform\Bridge\Anthropic\Claude; use Symfony\AI\Platform\Bridge\Anthropic\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -21,12 +20,11 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('ANTHROPIC_API_KEY'), httpClient: http_client()); -$model = new Claude(Claude::SONNET_37); $wikipedia = new Wikipedia(http_client()); $toolbox = new Toolbox([$wikipedia], logger: logger()); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'claude-3-5-sonnet-20241022', [$processor], [$processor], logger: logger()); $messages = new MessageBag(Message::ofUser('Who is the current chancellor of Germany?')); $result = $agent->call($messages); diff --git a/examples/azure/audio-transcript.php b/examples/azure/audio-transcript.php index 9d8f2b5d3..3f96ad5a6 100644 --- a/examples/azure/audio-transcript.php +++ b/examples/azure/audio-transcript.php @@ -10,7 +10,6 @@ */ use Symfony\AI\Platform\Bridge\Azure\OpenAi\PlatformFactory; -use Symfony\AI\Platform\Bridge\OpenAi\Whisper; use Symfony\AI\Platform\Message\Content\Audio; require_once dirname(__DIR__).'/bootstrap.php'; @@ -22,9 +21,8 @@ env('AZURE_OPENAI_KEY'), http_client(), ); -$model = new Whisper(Whisper::WHISPER_1); $file = Audio::fromFile(dirname(__DIR__, 2).'/fixtures/audio.mp3'); -$result = $platform->invoke($model, $file); +$result = $platform->invoke('whisper-1', $file); echo $result->asText().\PHP_EOL; diff --git a/examples/azure/chat-gpt.php b/examples/azure/chat-gpt.php index 6f68968a2..2ee4b4413 100644 --- a/examples/azure/chat-gpt.php +++ b/examples/azure/chat-gpt.php @@ -10,7 +10,6 @@ */ use Symfony\AI\Platform\Bridge\Azure\OpenAi\PlatformFactory; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -23,12 +22,10 @@ env('AZURE_OPENAI_KEY'), http_client(), ); -$model = new Gpt(Gpt::GPT_4O_MINI); - $messages = new MessageBag( Message::forSystem('You are a pirate and you write funny.'), Message::ofUser('What is the Symfony framework?'), ); -$result = $platform->invoke($model, $messages); +$result = $platform->invoke('gpt-4o-mini', $messages); echo $result->getResult()->getContent().\PHP_EOL; diff --git a/examples/azure/chat-llama.php b/examples/azure/chat-llama.php index bc4631eb4..a8c34c39a 100644 --- a/examples/azure/chat-llama.php +++ b/examples/azure/chat-llama.php @@ -10,17 +10,15 @@ */ use Symfony\AI\Platform\Bridge\Azure\Meta\PlatformFactory; -use Symfony\AI\Platform\Bridge\Meta\Llama; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('AZURE_LLAMA_BASEURL'), env('AZURE_LLAMA_KEY'), http_client()); -$model = new Llama(Llama::V3_3_70B_INSTRUCT); $messages = new MessageBag(Message::ofUser('I am going to Paris, what should I see?')); -$result = $platform->invoke($model, $messages, [ +$result = $platform->invoke('llama-3.3-70B-Instruct', $messages, [ 'max_tokens' => 2048, 'temperature' => 0.8, 'top_p' => 0.1, diff --git a/examples/azure/embeddings.php b/examples/azure/embeddings.php index 3f4195d63..24db6e2be 100644 --- a/examples/azure/embeddings.php +++ b/examples/azure/embeddings.php @@ -10,7 +10,6 @@ */ use Symfony\AI\Platform\Bridge\Azure\OpenAi\PlatformFactory; -use Symfony\AI\Platform\Bridge\OpenAi\Embeddings; require_once dirname(__DIR__).'/bootstrap.php'; @@ -21,9 +20,8 @@ env('AZURE_OPENAI_KEY'), http_client(), ); -$embeddings = new Embeddings(Embeddings::TEXT_3_SMALL); -$result = $platform->invoke($embeddings, <<invoke('text-embedding-3-small', <<invoke($model, $messages); +$result = $platform->invoke('claude-3-7-sonnet-20250219', $messages); echo $result->getResult()->getContent().\PHP_EOL; diff --git a/examples/bedrock/chat-llama.php b/examples/bedrock/chat-llama.php index bbc7406f0..4ebceda3f 100644 --- a/examples/bedrock/chat-llama.php +++ b/examples/bedrock/chat-llama.php @@ -10,7 +10,6 @@ */ use Symfony\AI\Platform\Bridge\Bedrock\PlatformFactory; -use Symfony\AI\Platform\Bridge\Meta\Llama; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -23,12 +22,11 @@ } $platform = PlatformFactory::create(); -$model = new Llama(Llama::V3_2_3B_INSTRUCT); $messages = new MessageBag( Message::forSystem('You are a pirate and you write funny.'), Message::ofUser('What is the Symfony framework?'), ); -$result = $platform->invoke($model, $messages); +$result = $platform->invoke('llama-3.2-3b-instruct', $messages); echo $result->getResult()->getContent().\PHP_EOL; diff --git a/examples/bedrock/chat-nova.php b/examples/bedrock/chat-nova.php index 4a4c6ea25..f5db0923e 100644 --- a/examples/bedrock/chat-nova.php +++ b/examples/bedrock/chat-nova.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\Bedrock\Nova\Nova; use Symfony\AI\Platform\Bridge\Bedrock\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -23,12 +22,11 @@ } $platform = PlatformFactory::create(); -$model = new Nova(Nova::PRO); $messages = new MessageBag( Message::forSystem('You are a pirate and you write funny.'), Message::ofUser('What is the Symfony framework?'), ); -$result = $platform->invoke($model, $messages); +$result = $platform->invoke('nova-pro', $messages); echo $result->getResult()->getContent().\PHP_EOL; diff --git a/examples/bedrock/image-claude-binary.php b/examples/bedrock/image-claude-binary.php index 9d3fa9df9..85fe32f9c 100644 --- a/examples/bedrock/image-claude-binary.php +++ b/examples/bedrock/image-claude-binary.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\Anthropic\Claude; use Symfony\AI\Platform\Bridge\Bedrock\PlatformFactory; use Symfony\AI\Platform\Message\Content\Image; use Symfony\AI\Platform\Message\Message; @@ -24,7 +23,6 @@ } $platform = PlatformFactory::create(); -$model = new Claude('claude-3-7-sonnet-20250219'); $messages = new MessageBag( Message::forSystem('You are an image analyzer bot that helps identify the content of images.'), @@ -33,6 +31,6 @@ Image::fromFile(dirname(__DIR__, 2).'/fixtures/image.jpg'), ), ); -$result = $platform->invoke($model, $messages); +$result = $platform->invoke('claude-3-7-sonnet-20250219', $messages); echo $result->getResult()->getContent().\PHP_EOL; diff --git a/examples/bedrock/image-nova.php b/examples/bedrock/image-nova.php index 328d5dccf..253a2afc4 100644 --- a/examples/bedrock/image-nova.php +++ b/examples/bedrock/image-nova.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\Bedrock\Nova\Nova; use Symfony\AI\Platform\Bridge\Bedrock\PlatformFactory; use Symfony\AI\Platform\Message\Content\Image; use Symfony\AI\Platform\Message\Message; @@ -24,7 +23,6 @@ } $platform = PlatformFactory::create(); -$model = new Nova(Nova::PRO); $messages = new MessageBag( Message::forSystem('You are an image analyzer bot that helps identify the content of images.'), @@ -33,6 +31,6 @@ Image::fromFile(dirname(__DIR__, 2).'/fixtures/image.jpg'), ), ); -$result = $platform->invoke($model, $messages); +$result = $platform->invoke('nova-pro', $messages); echo $result->getResult()->getContent().\PHP_EOL; diff --git a/examples/bedrock/toolcall-claude.php b/examples/bedrock/toolcall-claude.php index 16b23403b..62a5f1c18 100644 --- a/examples/bedrock/toolcall-claude.php +++ b/examples/bedrock/toolcall-claude.php @@ -13,7 +13,6 @@ use Symfony\AI\Agent\Toolbox\AgentProcessor; use Symfony\AI\Agent\Toolbox\Tool\Wikipedia; use Symfony\AI\Agent\Toolbox\Toolbox; -use Symfony\AI\Platform\Bridge\Anthropic\Claude; use Symfony\AI\Platform\Bridge\Bedrock\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -27,12 +26,11 @@ } $platform = PlatformFactory::create(); -$model = new Claude('claude-3-7-sonnet-20250219'); $wikipedia = new Wikipedia(http_client()); $toolbox = new Toolbox([$wikipedia]); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'claude-3-7-sonnet-20250219', [$processor], [$processor], logger: logger()); $messages = new MessageBag(Message::ofUser('Who is the current chancellor of Germany?')); $result = $agent->call($messages); diff --git a/examples/bedrock/toolcall-nova.php b/examples/bedrock/toolcall-nova.php index 0dbc25a00..6ce64fe14 100644 --- a/examples/bedrock/toolcall-nova.php +++ b/examples/bedrock/toolcall-nova.php @@ -13,7 +13,6 @@ use Symfony\AI\Agent\Toolbox\AgentProcessor; use Symfony\AI\Agent\Toolbox\Tool\Wikipedia; use Symfony\AI\Agent\Toolbox\Toolbox; -use Symfony\AI\Platform\Bridge\Bedrock\Nova\Nova; use Symfony\AI\Platform\Bridge\Bedrock\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -27,12 +26,11 @@ } $platform = PlatformFactory::create(); -$model = new Nova(Nova::PRO); $wikipedia = new Wikipedia(http_client()); $toolbox = new Toolbox([$wikipedia]); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'nova-pro', [$processor], [$processor], logger: logger()); $messages = new MessageBag( Message::ofUser('Who is the current chancellor of Germany? Use Wikipedia to find the answer.') diff --git a/examples/cerebras/chat.php b/examples/cerebras/chat.php index 50d6bb58c..a6ccfa797 100644 --- a/examples/cerebras/chat.php +++ b/examples/cerebras/chat.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\Cerebras\Model; use Symfony\AI\Platform\Bridge\Cerebras\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -22,6 +21,6 @@ Message::forSystem('You are a helpful assistant.'), Message::ofUser('What is the capital of Japan?'), ); -$result = $platform->invoke(new Model(Model::LLAMA3_1_8B), $messages); +$result = $platform->invoke('llama3.1-8b', $messages); echo $result->getResult()->getContent().\PHP_EOL; diff --git a/examples/cerebras/stream.php b/examples/cerebras/stream.php index 87aebeadd..7e5f4995b 100644 --- a/examples/cerebras/stream.php +++ b/examples/cerebras/stream.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\Cerebras\Model; use Symfony\AI\Platform\Bridge\Cerebras\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -23,7 +22,7 @@ Message::ofUser('What are the top three destinations in France?'), ); -$result = $platform->invoke(new Model(Model::LLAMA3_1_8B), $messages, [ +$result = $platform->invoke('llama3.1-8b', $messages, [ 'stream' => true, ]); diff --git a/examples/dockermodelrunner/chat.php b/examples/dockermodelrunner/chat.php index 48126f53f..7189f984d 100644 --- a/examples/dockermodelrunner/chat.php +++ b/examples/dockermodelrunner/chat.php @@ -10,7 +10,6 @@ */ use Symfony\AI\Agent\Agent; -use Symfony\AI\Platform\Bridge\DockerModelRunner\Completions; use Symfony\AI\Platform\Bridge\DockerModelRunner\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -18,9 +17,8 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('DOCKER_MODEL_RUNNER_HOST_URL'), http_client()); -$model = new Completions(Completions::GEMMA_3_N); -$agent = new Agent($platform, $model, logger: logger()); +$agent = new Agent($platform, 'ai/gemma3n', logger: logger()); $messages = new MessageBag( Message::forSystem('You are a pirate and you write funny.'), Message::ofUser('What is the Symfony framework?'), diff --git a/examples/dockermodelrunner/embeddings.php b/examples/dockermodelrunner/embeddings.php index 23dce0573..8e1404867 100644 --- a/examples/dockermodelrunner/embeddings.php +++ b/examples/dockermodelrunner/embeddings.php @@ -9,13 +9,12 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\DockerModelRunner\Embeddings; use Symfony\AI\Platform\Bridge\DockerModelRunner\PlatformFactory; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('DOCKER_MODEL_RUNNER_HOST_URL'), http_client()); -$response = $platform->invoke(new Embeddings(Embeddings::NOMIC_EMBED_TEXT), <<invoke('ai/nomic-embed-text-v1.5', <<call($messages); diff --git a/examples/document/vectorizing-text-documents.php b/examples/document/vectorizing-text-documents.php index 425c6bc3a..1106222b6 100644 --- a/examples/document/vectorizing-text-documents.php +++ b/examples/document/vectorizing-text-documents.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\OpenAi\Embeddings; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Store\Document\TextDocument; use Symfony\AI\Store\Document\VectorDocument; @@ -19,7 +18,6 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$embeddings = new Embeddings(Embeddings::TEXT_3_LARGE); $textDocuments = [ new TextDocument(Uuid::v4(), 'Hello World'), @@ -27,7 +25,7 @@ new TextDocument(Uuid::v4(), 'PHP Hypertext Preprocessor'), ]; -$vectorizer = new Vectorizer($platform, $embeddings); +$vectorizer = new Vectorizer($platform, 'text-embedding-3-large'); $vectorDocuments = $vectorizer->vectorizeTextDocuments($textDocuments); dump(array_map(fn (VectorDocument $document) => $document->vector->getDimensions(), $vectorDocuments)); diff --git a/examples/document/vectorizing.php b/examples/document/vectorizing.php index 8d70baaa4..4b01b2c01 100644 --- a/examples/document/vectorizing.php +++ b/examples/document/vectorizing.php @@ -9,16 +9,14 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\OpenAi\Embeddings; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Store\Document\Vectorizer; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$embeddings = new Embeddings(Embeddings::TEXT_3_LARGE); -$vectorizer = new Vectorizer($platform, $embeddings); +$vectorizer = new Vectorizer($platform, 'text-embedding-3-large'); $string = 'Hello World'; $vector = $vectorizer->vectorize($string); diff --git a/examples/elevenlabs/speech-to-text.php b/examples/elevenlabs/speech-to-text.php index 327b35757..2490c9da3 100644 --- a/examples/elevenlabs/speech-to-text.php +++ b/examples/elevenlabs/speech-to-text.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\ElevenLabs\ElevenLabs; use Symfony\AI\Platform\Bridge\ElevenLabs\PlatformFactory; use Symfony\AI\Platform\Message\Content\Audio; @@ -19,8 +18,7 @@ apiKey: env('ELEVEN_LABS_API_KEY'), httpClient: http_client() ); -$model = new ElevenLabs(ElevenLabs::SCRIBE_V1); -$result = $platform->invoke($model, Audio::fromFile(dirname(__DIR__, 2).'/fixtures/audio.mp3')); +$result = $platform->invoke('scribe_v1', Audio::fromFile(dirname(__DIR__, 2).'/fixtures/audio.mp3')); echo $result->asText().\PHP_EOL; diff --git a/examples/elevenlabs/text-to-speech-as-stream.php b/examples/elevenlabs/text-to-speech-as-stream.php index f16b9f5e2..27e789710 100644 --- a/examples/elevenlabs/text-to-speech-as-stream.php +++ b/examples/elevenlabs/text-to-speech-as-stream.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\ElevenLabs\ElevenLabs; use Symfony\AI\Platform\Bridge\ElevenLabs\PlatformFactory; use Symfony\AI\Platform\Message\Content\Text; @@ -19,13 +18,12 @@ apiKey: env('ELEVEN_LABS_API_KEY'), httpClient: http_client(), ); -$model = new ElevenLabs(ElevenLabs::ELEVEN_MULTILINGUAL_V2, [ + +$result = $platform->invoke('eleven_multilingual_v2', new Text('The first move is what sets everything in motion.'), [ 'voice' => 'Dslrhjl3ZpzrctukrQSN', // Brad (https://elevenlabs.io/app/voice-library?voiceId=Dslrhjl3ZpzrctukrQSN) 'stream' => true, ]); -$result = $platform->invoke($model, new Text('The first move is what sets everything in motion.')); - $content = ''; foreach ($result->asStream() as $chunk) { diff --git a/examples/elevenlabs/text-to-speech.php b/examples/elevenlabs/text-to-speech.php index 8e2a80e73..da8994194 100644 --- a/examples/elevenlabs/text-to-speech.php +++ b/examples/elevenlabs/text-to-speech.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\ElevenLabs\ElevenLabs; use Symfony\AI\Platform\Bridge\ElevenLabs\PlatformFactory; use Symfony\AI\Platform\Message\Content\Text; @@ -19,10 +18,9 @@ apiKey: env('ELEVEN_LABS_API_KEY'), httpClient: http_client(), ); -$model = new ElevenLabs(ElevenLabs::ELEVEN_MULTILINGUAL_V2, [ + +$result = $platform->invoke('eleven_multilingual_v2', new Text('Hello world'), [ 'voice' => 'Dslrhjl3ZpzrctukrQSN', // Brad (https://elevenlabs.io/app/voice-library?voiceId=Dslrhjl3ZpzrctukrQSN) ]); -$result = $platform->invoke($model, new Text('Hello world')); - echo $result->asBinary().\PHP_EOL; diff --git a/examples/gemini/audio-input.php b/examples/gemini/audio-input.php index a37c1a29f..9a975936f 100644 --- a/examples/gemini/audio-input.php +++ b/examples/gemini/audio-input.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\Gemini\Gemini; use Symfony\AI\Platform\Bridge\Gemini\PlatformFactory; use Symfony\AI\Platform\Message\Content\Audio; use Symfony\AI\Platform\Message\Message; @@ -18,7 +17,6 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('GEMINI_API_KEY'), http_client()); -$model = new Gemini(Gemini::GEMINI_1_5_FLASH); $messages = new MessageBag( Message::ofUser( @@ -26,6 +24,6 @@ Audio::fromFile(dirname(__DIR__, 2).'/fixtures/audio.mp3'), ), ); -$result = $platform->invoke($model, $messages); +$result = $platform->invoke('gemini-1.5-flash', $messages); echo $result->getResult()->getContent().\PHP_EOL; diff --git a/examples/gemini/chat.php b/examples/gemini/chat.php index 27d495be5..b1e8aefba 100644 --- a/examples/gemini/chat.php +++ b/examples/gemini/chat.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\Gemini\Gemini; use Symfony\AI\Platform\Bridge\Gemini\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -17,12 +16,11 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('GEMINI_API_KEY'), http_client()); -$model = new Gemini(Gemini::GEMINI_2_FLASH); $messages = new MessageBag( Message::forSystem('You are a pirate and you write funny.'), Message::ofUser('What is the Symfony framework?'), ); -$result = $platform->invoke($model, $messages); +$result = $platform->invoke('gemini-2.0-flash', $messages); echo $result->getResult()->getContent().\PHP_EOL; diff --git a/examples/gemini/embeddings.php b/examples/gemini/embeddings.php index f5295efaf..7dbe4c797 100644 --- a/examples/gemini/embeddings.php +++ b/examples/gemini/embeddings.php @@ -9,15 +9,13 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\Gemini\Embeddings; use Symfony\AI\Platform\Bridge\Gemini\PlatformFactory; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('GEMINI_API_KEY'), http_client()); -$embeddings = new Embeddings(Embeddings::GEMINI_EMBEDDING_EXP_03_07); -$result = $platform->invoke($embeddings, <<invoke('gemini-embedding-exp-03-07', <<invoke($model, $messages); +$result = $platform->invoke('gemini-1.5-flash', $messages); echo $result->getResult()->getContent().\PHP_EOL; diff --git a/examples/gemini/pdf-input-binary.php b/examples/gemini/pdf-input-binary.php index eecbf5aed..316fef388 100644 --- a/examples/gemini/pdf-input-binary.php +++ b/examples/gemini/pdf-input-binary.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\Gemini\Gemini; use Symfony\AI\Platform\Bridge\Gemini\PlatformFactory; use Symfony\AI\Platform\Message\Content\Document; use Symfony\AI\Platform\Message\Message; @@ -18,7 +17,6 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('GEMINI_API_KEY'), http_client()); -$model = new Gemini(Gemini::GEMINI_1_5_FLASH); $messages = new MessageBag( Message::ofUser( @@ -26,6 +24,6 @@ 'What is this document about?', ), ); -$result = $platform->invoke($model, $messages); +$result = $platform->invoke('gemini-1.5-flash', $messages); echo $result->getResult()->getContent().\PHP_EOL; diff --git a/examples/gemini/server-tools.php b/examples/gemini/server-tools.php index 1bd27da41..16bffc736 100644 --- a/examples/gemini/server-tools.php +++ b/examples/gemini/server-tools.php @@ -13,7 +13,6 @@ use Symfony\AI\Agent\Toolbox\AgentProcessor; use Symfony\AI\Agent\Toolbox\Tool\Clock; use Symfony\AI\Agent\Toolbox\Toolbox; -use Symfony\AI\Platform\Bridge\Gemini\Gemini; use Symfony\AI\Platform\Bridge\Gemini\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -22,12 +21,9 @@ $platform = PlatformFactory::create(env('GEMINI_API_KEY'), http_client()); -// Available server-side tools as of 2025-06-28: url_context, google_search, code_execution -$llm = new Gemini('gemini-2.5-pro-preview-03-25', ['server_tools' => ['url_context' => true], 'temperature' => 1.0]); - $toolbox = new Toolbox([new Clock()], logger: logger()); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $llm, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'gemini-2.5-pro-preview-03-25', [$processor], [$processor], logger: logger()); $messages = new MessageBag( Message::ofUser( @@ -37,6 +33,9 @@ ), ); -$result = $agent->call($messages); +$result = $agent->call($messages, [ + 'server_tools' => ['url_context' => true], + 'temperature' => 1.0, +]); echo $result->getContent().\PHP_EOL; diff --git a/examples/gemini/stream.php b/examples/gemini/stream.php index 6f7633223..18dd1cd2e 100644 --- a/examples/gemini/stream.php +++ b/examples/gemini/stream.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\Gemini\Gemini; use Symfony\AI\Platform\Bridge\Gemini\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -17,13 +16,12 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('GEMINI_API_KEY'), http_client()); -$model = new Gemini(Gemini::GEMINI_2_FLASH); $messages = new MessageBag( Message::forSystem('You are a funny clown that entertains people.'), Message::ofUser('What is the purpose of an ant?'), ); -$result = $platform->invoke($model, $messages, [ +$result = $platform->invoke('gemini-2.0-flash', $messages, [ 'stream' => true, // enable streaming of response text ]); diff --git a/examples/gemini/structured-output-clock.php b/examples/gemini/structured-output-clock.php index b25a2fd2c..bed2d07bb 100644 --- a/examples/gemini/structured-output-clock.php +++ b/examples/gemini/structured-output-clock.php @@ -14,7 +14,6 @@ use Symfony\AI\Agent\Toolbox\AgentProcessor as ToolProcessor; use Symfony\AI\Agent\Toolbox\Tool\Clock; use Symfony\AI\Agent\Toolbox\Toolbox; -use Symfony\AI\Platform\Bridge\Gemini\Gemini; use Symfony\AI\Platform\Bridge\Gemini\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -23,13 +22,12 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('GEMINI_API_KEY'), http_client()); -$model = new Gemini(Gemini::GEMINI_1_5_FLASH); $clock = new Clock(new SymfonyClock()); $toolbox = new Toolbox([$clock]); $toolProcessor = new ToolProcessor($toolbox); $structuredOutputProcessor = new StructuredOutputProcessor(); -$agent = new Agent($platform, $model, [$toolProcessor, $structuredOutputProcessor], [$toolProcessor, $structuredOutputProcessor], logger: logger()); +$agent = new Agent($platform, 'gemini-1.5-flash', [$toolProcessor, $structuredOutputProcessor], [$toolProcessor, $structuredOutputProcessor], logger: logger()); $messages = new MessageBag(Message::ofUser('What date and time is it?')); $result = $agent->call($messages, ['response_format' => [ diff --git a/examples/gemini/structured-output-math.php b/examples/gemini/structured-output-math.php index 8e77e8328..a2ef4095d 100644 --- a/examples/gemini/structured-output-math.php +++ b/examples/gemini/structured-output-math.php @@ -12,7 +12,6 @@ use Symfony\AI\Agent\Agent; use Symfony\AI\Agent\StructuredOutput\AgentProcessor; use Symfony\AI\Fixtures\StructuredOutput\MathReasoning; -use Symfony\AI\Platform\Bridge\Gemini\Gemini; use Symfony\AI\Platform\Bridge\Gemini\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -20,10 +19,9 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('GEMINI_API_KEY'), http_client()); -$model = new Gemini(Gemini::GEMINI_1_5_FLASH); $processor = new AgentProcessor(); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'gemini-1.5-flash', [$processor], [$processor], logger: logger()); $messages = new MessageBag( Message::forSystem('You are a helpful math tutor. Guide the user through the solution step by step.'), Message::ofUser('how can I solve 8x + 7 = -23'), diff --git a/examples/gemini/token-metadata.php b/examples/gemini/token-metadata.php index 9bddd429c..08f4f6e99 100644 --- a/examples/gemini/token-metadata.php +++ b/examples/gemini/token-metadata.php @@ -10,7 +10,6 @@ */ use Symfony\AI\Agent\Agent; -use Symfony\AI\Platform\Bridge\Gemini\Gemini; use Symfony\AI\Platform\Bridge\Gemini\PlatformFactory; use Symfony\AI\Platform\Bridge\Gemini\TokenOutputProcessor; use Symfony\AI\Platform\Message\Message; @@ -19,9 +18,8 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('GEMINI_API_KEY'), http_client()); -$model = new Gemini(Gemini::GEMINI_2_FLASH); -$agent = new Agent($platform, $model, outputProcessors: [new TokenOutputProcessor()], logger: logger()); +$agent = new Agent($platform, 'gemini-2.0-flash', outputProcessors: [new TokenOutputProcessor()], logger: logger()); $messages = new MessageBag( Message::forSystem('You are a pirate and you write funny.'), Message::ofUser('What is the Symfony framework?'), diff --git a/examples/gemini/toolcall.php b/examples/gemini/toolcall.php index 447926dea..598ee53c4 100644 --- a/examples/gemini/toolcall.php +++ b/examples/gemini/toolcall.php @@ -13,7 +13,6 @@ use Symfony\AI\Agent\Toolbox\AgentProcessor; use Symfony\AI\Agent\Toolbox\Tool\Clock; use Symfony\AI\Agent\Toolbox\Toolbox; -use Symfony\AI\Platform\Bridge\Gemini\Gemini; use Symfony\AI\Platform\Bridge\Gemini\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -21,11 +20,10 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('GEMINI_API_KEY'), http_client()); -$model = new Gemini(Gemini::GEMINI_2_FLASH); $toolbox = new Toolbox([new Clock()], logger: logger()); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'gemini-2.0-flash', [$processor], [$processor], logger: logger()); $messages = new MessageBag(Message::ofUser('What time is it?')); $result = $agent->call($messages); diff --git a/examples/huggingface/audio-classification.php b/examples/huggingface/audio-classification.php index bdbf3d93a..de6ba7c98 100644 --- a/examples/huggingface/audio-classification.php +++ b/examples/huggingface/audio-classification.php @@ -12,15 +12,13 @@ use Symfony\AI\Platform\Bridge\HuggingFace\PlatformFactory; use Symfony\AI\Platform\Bridge\HuggingFace\Task; use Symfony\AI\Platform\Message\Content\Audio; -use Symfony\AI\Platform\Model; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('HUGGINGFACE_KEY'), httpClient: http_client()); -$model = new Model('MIT/ast-finetuned-audioset-10-10-0.4593'); $audio = Audio::fromFile(dirname(__DIR__, 2).'/fixtures/audio.mp3'); -$result = $platform->invoke($model, $audio, [ +$result = $platform->invoke('MIT/ast-finetuned-audioset-10-10-0.4593', $audio, [ 'task' => Task::AUDIO_CLASSIFICATION, ]); diff --git a/examples/huggingface/automatic-speech-recognition.php b/examples/huggingface/automatic-speech-recognition.php index 73e03a0c5..5954e9f01 100644 --- a/examples/huggingface/automatic-speech-recognition.php +++ b/examples/huggingface/automatic-speech-recognition.php @@ -12,15 +12,13 @@ use Symfony\AI\Platform\Bridge\HuggingFace\PlatformFactory; use Symfony\AI\Platform\Bridge\HuggingFace\Task; use Symfony\AI\Platform\Message\Content\Audio; -use Symfony\AI\Platform\Model; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('HUGGINGFACE_KEY'), httpClient: http_client()); -$model = new Model('openai/whisper-large-v3'); $audio = Audio::fromFile(dirname(__DIR__, 2).'/fixtures/audio.mp3'); -$result = $platform->invoke($model, $audio, [ +$result = $platform->invoke('openai/whisper-large-v3', $audio, [ 'task' => Task::AUTOMATIC_SPEECH_RECOGNITION, ]); diff --git a/examples/huggingface/chat-completion.php b/examples/huggingface/chat-completion.php index bfeb263eb..a45e4a8ce 100644 --- a/examples/huggingface/chat-completion.php +++ b/examples/huggingface/chat-completion.php @@ -13,15 +13,13 @@ use Symfony\AI\Platform\Bridge\HuggingFace\Task; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; -use Symfony\AI\Platform\Model; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('HUGGINGFACE_KEY'), httpClient: http_client()); -$model = new Model('HuggingFaceH4/zephyr-7b-beta'); $messages = new MessageBag(Message::ofUser('Hello, how are you doing today?')); -$result = $platform->invoke($model, $messages, [ +$result = $platform->invoke('HuggingFaceH4/zephyr-7b-beta', $messages, [ 'task' => Task::CHAT_COMPLETION, ]); diff --git a/examples/huggingface/feature-extraction.php b/examples/huggingface/feature-extraction.php index 5fd721c33..db739e62c 100644 --- a/examples/huggingface/feature-extraction.php +++ b/examples/huggingface/feature-extraction.php @@ -11,14 +11,12 @@ use Symfony\AI\Platform\Bridge\HuggingFace\PlatformFactory; use Symfony\AI\Platform\Bridge\HuggingFace\Task; -use Symfony\AI\Platform\Model; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('HUGGINGFACE_KEY'), httpClient: http_client()); -$model = new Model('thenlper/gte-large'); -$result = $platform->invoke($model, 'Today is a sunny day and I will get some ice cream.', [ +$result = $platform->invoke('thenlper/gte-large', 'Today is a sunny day and I will get some ice cream.', [ 'task' => Task::FEATURE_EXTRACTION, ]); diff --git a/examples/huggingface/fill-mask.php b/examples/huggingface/fill-mask.php index 2e101c75b..2d565058c 100644 --- a/examples/huggingface/fill-mask.php +++ b/examples/huggingface/fill-mask.php @@ -11,14 +11,12 @@ use Symfony\AI\Platform\Bridge\HuggingFace\PlatformFactory; use Symfony\AI\Platform\Bridge\HuggingFace\Task; -use Symfony\AI\Platform\Model; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('HUGGINGFACE_KEY'), httpClient: http_client()); -$model = new Model('FacebookAI/xlm-roberta-base'); -$result = $platform->invoke($model, 'Hello I\'m a model.', [ +$result = $platform->invoke('FacebookAI/xlm-roberta-base', 'Hello I\'m a model.', [ 'task' => Task::FILL_MASK, ]); diff --git a/examples/huggingface/image-classification.php b/examples/huggingface/image-classification.php index bb15ebf52..44d98fa1d 100644 --- a/examples/huggingface/image-classification.php +++ b/examples/huggingface/image-classification.php @@ -12,15 +12,13 @@ use Symfony\AI\Platform\Bridge\HuggingFace\PlatformFactory; use Symfony\AI\Platform\Bridge\HuggingFace\Task; use Symfony\AI\Platform\Message\Content\Image; -use Symfony\AI\Platform\Model; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('HUGGINGFACE_KEY'), httpClient: http_client()); -$model = new Model('google/vit-base-patch16-224'); $image = Image::fromFile(dirname(__DIR__, 2).'/fixtures/image.jpg'); -$result = $platform->invoke($model, $image, [ +$result = $platform->invoke('google/vit-base-patch16-224', $image, [ 'task' => Task::IMAGE_CLASSIFICATION, ]); diff --git a/examples/huggingface/image-segmentation.php b/examples/huggingface/image-segmentation.php index 5c17f2919..86932b980 100644 --- a/examples/huggingface/image-segmentation.php +++ b/examples/huggingface/image-segmentation.php @@ -12,15 +12,13 @@ use Symfony\AI\Platform\Bridge\HuggingFace\PlatformFactory; use Symfony\AI\Platform\Bridge\HuggingFace\Task; use Symfony\AI\Platform\Message\Content\Image; -use Symfony\AI\Platform\Model; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('HUGGINGFACE_KEY'), httpClient: http_client()); -$model = new Model('nvidia/segformer-b0-finetuned-ade-512-512'); $image = Image::fromFile(dirname(__DIR__, 2).'/fixtures/image.jpg'); -$result = $platform->invoke($model, $image, [ +$result = $platform->invoke('nvidia/segformer-b0-finetuned-ade-512-512', $image, [ 'task' => Task::IMAGE_SEGMENTATION, ]); diff --git a/examples/huggingface/image-to-text.php b/examples/huggingface/image-to-text.php index ec3f9763c..9cf54e965 100644 --- a/examples/huggingface/image-to-text.php +++ b/examples/huggingface/image-to-text.php @@ -12,15 +12,13 @@ use Symfony\AI\Platform\Bridge\HuggingFace\PlatformFactory; use Symfony\AI\Platform\Bridge\HuggingFace\Task; use Symfony\AI\Platform\Message\Content\Image; -use Symfony\AI\Platform\Model; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('HUGGINGFACE_KEY'), httpClient: http_client()); -$model = new Model('Salesforce/blip-image-captioning-base'); $image = Image::fromFile(dirname(__DIR__, 2).'/fixtures/image.jpg'); -$result = $platform->invoke($model, $image, [ +$result = $platform->invoke('Salesforce/blip-image-captioning-base', $image, [ 'task' => Task::IMAGE_TO_TEXT, ]); diff --git a/examples/huggingface/object-detection.php b/examples/huggingface/object-detection.php index d0b367e6e..86512ef32 100644 --- a/examples/huggingface/object-detection.php +++ b/examples/huggingface/object-detection.php @@ -12,15 +12,13 @@ use Symfony\AI\Platform\Bridge\HuggingFace\PlatformFactory; use Symfony\AI\Platform\Bridge\HuggingFace\Task; use Symfony\AI\Platform\Message\Content\Image; -use Symfony\AI\Platform\Model; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('HUGGINGFACE_KEY'), httpClient: http_client()); -$model = new Model('facebook/detr-resnet-50'); $image = Image::fromFile(dirname(__DIR__, 2).'/fixtures/image.jpg'); -$result = $platform->invoke($model, $image, [ +$result = $platform->invoke('facebook/detr-resnet-50', $image, [ 'task' => Task::OBJECT_DETECTION, ]); diff --git a/examples/huggingface/question-answering.php b/examples/huggingface/question-answering.php index 15a92f34d..1beee702f 100644 --- a/examples/huggingface/question-answering.php +++ b/examples/huggingface/question-answering.php @@ -11,19 +11,17 @@ use Symfony\AI\Platform\Bridge\HuggingFace\PlatformFactory; use Symfony\AI\Platform\Bridge\HuggingFace\Task; -use Symfony\AI\Platform\Model; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('HUGGINGFACE_KEY'), httpClient: http_client()); -$model = new Model('deepset/roberta-base-squad2'); $input = [ 'question' => 'What is the capital of France?', 'context' => 'Paris is the capital and most populous city of France, with an estimated population of 2,175,601 residents as of 2018, in an area of more than 105 square kilometres.', ]; -$result = $platform->invoke($model, $input, [ +$result = $platform->invoke('deepset/roberta-base-squad2', $input, [ 'task' => Task::QUESTION_ANSWERING, ]); diff --git a/examples/huggingface/sentence-similarity.php b/examples/huggingface/sentence-similarity.php index 6c5cb1fa7..8fc539b98 100644 --- a/examples/huggingface/sentence-similarity.php +++ b/examples/huggingface/sentence-similarity.php @@ -11,12 +11,10 @@ use Symfony\AI\Platform\Bridge\HuggingFace\PlatformFactory; use Symfony\AI\Platform\Bridge\HuggingFace\Task; -use Symfony\AI\Platform\Model; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('HUGGINGFACE_KEY'), httpClient: http_client()); -$model = new Model('sentence-transformers/all-MiniLM-L6-v2'); $input = [ 'source_sentence' => 'That is a happy dog', @@ -27,7 +25,7 @@ ], ]; -$result = $platform->invoke($model, $input, [ +$result = $platform->invoke('sentence-transformers/all-MiniLM-L6-v2', $input, [ 'task' => Task::SENTENCE_SIMILARITY, ]); diff --git a/examples/huggingface/summarization.php b/examples/huggingface/summarization.php index 5d6d6b0c1..23f00ac8e 100644 --- a/examples/huggingface/summarization.php +++ b/examples/huggingface/summarization.php @@ -11,12 +11,10 @@ use Symfony\AI\Platform\Bridge\HuggingFace\PlatformFactory; use Symfony\AI\Platform\Bridge\HuggingFace\Task; -use Symfony\AI\Platform\Model; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('HUGGINGFACE_KEY'), httpClient: http_client()); -$model = new Model('facebook/bart-large-cnn'); $longText = <<invoke($model, $longText, [ +$result = $platform->invoke('facebook/bart-large-cnn', $longText, [ 'task' => Task::SUMMARIZATION, ]); diff --git a/examples/huggingface/table-question-answering.php b/examples/huggingface/table-question-answering.php index 32dae0db1..bd9e1c9e8 100644 --- a/examples/huggingface/table-question-answering.php +++ b/examples/huggingface/table-question-answering.php @@ -11,12 +11,10 @@ use Symfony\AI\Platform\Bridge\HuggingFace\PlatformFactory; use Symfony\AI\Platform\Bridge\HuggingFace\Task; -use Symfony\AI\Platform\Model; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('HUGGINGFACE_KEY'), httpClient: http_client()); -$model = new Model('microsoft/tapex-base'); $input = [ 'query' => 'select year where city = beijing', @@ -26,7 +24,7 @@ ], ]; -$result = $platform->invoke($model, $input, [ +$result = $platform->invoke('microsoft/tapex-base', $input, [ 'task' => Task::TABLE_QUESTION_ANSWERING, ]); diff --git a/examples/huggingface/text-classification.php b/examples/huggingface/text-classification.php index eac749f1e..9dd84e1f5 100644 --- a/examples/huggingface/text-classification.php +++ b/examples/huggingface/text-classification.php @@ -11,14 +11,12 @@ use Symfony\AI\Platform\Bridge\HuggingFace\PlatformFactory; use Symfony\AI\Platform\Bridge\HuggingFace\Task; -use Symfony\AI\Platform\Model; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('HUGGINGFACE_KEY'), httpClient: http_client()); -$model = new Model('ProsusAI/finbert'); -$result = $platform->invoke($model, 'I like you. I love you.', [ +$result = $platform->invoke('ProsusAI/finbert', 'I like you. I love you.', [ 'task' => Task::TEXT_CLASSIFICATION, ]); diff --git a/examples/huggingface/text-generation.php b/examples/huggingface/text-generation.php index 2af062145..1eb8343dc 100644 --- a/examples/huggingface/text-generation.php +++ b/examples/huggingface/text-generation.php @@ -11,14 +11,12 @@ use Symfony\AI\Platform\Bridge\HuggingFace\PlatformFactory; use Symfony\AI\Platform\Bridge\HuggingFace\Task; -use Symfony\AI\Platform\Model; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('HUGGINGFACE_KEY'), httpClient: http_client()); -$model = new Model('gpt2'); -$result = $platform->invoke($model, 'The quick brown fox jumps over the lazy', [ +$result = $platform->invoke('gpt2', 'The quick brown fox jumps over the lazy', [ 'task' => Task::TEXT_GENERATION, ]); diff --git a/examples/huggingface/text-to-image.php b/examples/huggingface/text-to-image.php index 399f7511e..74f36bdc8 100644 --- a/examples/huggingface/text-to-image.php +++ b/examples/huggingface/text-to-image.php @@ -11,14 +11,12 @@ use Symfony\AI\Platform\Bridge\HuggingFace\PlatformFactory; use Symfony\AI\Platform\Bridge\HuggingFace\Task; -use Symfony\AI\Platform\Model; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('HUGGINGFACE_KEY'), httpClient: http_client()); -$model = new Model('black-forest-labs/FLUX.1-dev'); -$result = $platform->invoke($model, 'Astronaut riding a horse', [ +$result = $platform->invoke('black-forest-labs/FLUX.1-dev', 'Astronaut riding a horse', [ 'task' => Task::TEXT_TO_IMAGE, ]); diff --git a/examples/huggingface/token-classification.php b/examples/huggingface/token-classification.php index 19d93b803..b955c1ac5 100644 --- a/examples/huggingface/token-classification.php +++ b/examples/huggingface/token-classification.php @@ -11,14 +11,12 @@ use Symfony\AI\Platform\Bridge\HuggingFace\PlatformFactory; use Symfony\AI\Platform\Bridge\HuggingFace\Task; -use Symfony\AI\Platform\Model; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('HUGGINGFACE_KEY'), httpClient: http_client()); -$model = new Model('dbmdz/bert-large-cased-finetuned-conll03-english'); -$result = $platform->invoke($model, 'John Smith works at Microsoft in London.', [ +$result = $platform->invoke('dbmdz/bert-large-cased-finetuned-conll03-english', 'John Smith works at Microsoft in London.', [ 'task' => Task::TOKEN_CLASSIFICATION, ]); diff --git a/examples/huggingface/translation.php b/examples/huggingface/translation.php index d2bc2971d..e659e8a39 100644 --- a/examples/huggingface/translation.php +++ b/examples/huggingface/translation.php @@ -11,14 +11,12 @@ use Symfony\AI\Platform\Bridge\HuggingFace\PlatformFactory; use Symfony\AI\Platform\Bridge\HuggingFace\Task; -use Symfony\AI\Platform\Model; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('HUGGINGFACE_KEY'), httpClient: http_client()); -$model = new Model('facebook/mbart-large-50-many-to-many-mmt'); -$result = $platform->invoke($model, 'Меня зовут Вольфганг и я живу в Берлине', [ +$result = $platform->invoke('facebook/mbart-large-50-many-to-many-mmt', 'Меня зовут Вольфганг и я живу в Берлине', [ 'task' => Task::TRANSLATION, 'src_lang' => 'ru', 'tgt_lang' => 'en', diff --git a/examples/huggingface/zero-shot-classification.php b/examples/huggingface/zero-shot-classification.php index 69eb530e4..e1d57f9ee 100644 --- a/examples/huggingface/zero-shot-classification.php +++ b/examples/huggingface/zero-shot-classification.php @@ -11,15 +11,13 @@ use Symfony\AI\Platform\Bridge\HuggingFace\PlatformFactory; use Symfony\AI\Platform\Bridge\HuggingFace\Task; -use Symfony\AI\Platform\Model; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('HUGGINGFACE_KEY'), httpClient: http_client()); -$model = new Model('facebook/bart-large-mnli'); $text = 'Hi, I recently bought a device from your company but it is not working as advertised and I would like to get reimbursed!'; -$result = $platform->invoke($model, $text, [ +$result = $platform->invoke('facebook/bart-large-mnli', $text, [ 'task' => Task::ZERO_SHOT_CLASSIFICATION, 'candidate_labels' => ['refund', 'legal', 'faq'], ]); diff --git a/examples/indexer/index-file-loader.php b/examples/indexer/index-file-loader.php index 8a8b22e71..37c4e1b34 100644 --- a/examples/indexer/index-file-loader.php +++ b/examples/indexer/index-file-loader.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\OpenAi\Embeddings; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Store\Bridge\Local\InMemoryStore; use Symfony\AI\Store\Document\Loader\TextFileLoader; @@ -22,7 +21,7 @@ $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); $store = new InMemoryStore(); -$vectorizer = new Vectorizer($platform, new Embeddings('text-embedding-3-small')); +$vectorizer = new Vectorizer($platform, 'text-embedding-3-small'); $indexer = new Indexer( loader: new TextFileLoader(), vectorizer: $vectorizer, diff --git a/examples/indexer/index-inmemory-loader.php b/examples/indexer/index-inmemory-loader.php index 20356b7b1..083db0ba8 100644 --- a/examples/indexer/index-inmemory-loader.php +++ b/examples/indexer/index-inmemory-loader.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\OpenAi\Embeddings; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Store\Bridge\Local\InMemoryStore; use Symfony\AI\Store\Document\Loader\InMemoryLoader; @@ -24,7 +23,7 @@ $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); $store = new InMemoryStore(); -$vectorizer = new Vectorizer($platform, new Embeddings('text-embedding-3-small')); +$vectorizer = new Vectorizer($platform, 'text-embedding-3-small'); $documents = [ new TextDocument( diff --git a/examples/indexer/index-rss-loader.php b/examples/indexer/index-rss-loader.php index e42b013c8..f9b0e48ec 100644 --- a/examples/indexer/index-rss-loader.php +++ b/examples/indexer/index-rss-loader.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\OpenAi\Embeddings; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Store\Bridge\Local\InMemoryStore; use Symfony\AI\Store\Document\Loader\RssFeedLoader; @@ -22,7 +21,7 @@ $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); $store = new InMemoryStore(); -$vectorizer = new Vectorizer($platform, new Embeddings('text-embedding-3-small')); +$vectorizer = new Vectorizer($platform, 'text-embedding-3-small'); $indexer = new Indexer( loader: new RssFeedLoader(HttpClient::create()), vectorizer: $vectorizer, diff --git a/examples/indexer/index-with-filters.php b/examples/indexer/index-with-filters.php index d9c337718..34f76b076 100644 --- a/examples/indexer/index-with-filters.php +++ b/examples/indexer/index-with-filters.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\OpenAi\Embeddings; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Store\Bridge\Local\InMemoryStore; use Symfony\AI\Store\Document\Filter\TextContainsFilter; @@ -25,7 +24,7 @@ $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); $store = new InMemoryStore(); -$vectorizer = new Vectorizer($platform, new Embeddings('text-embedding-3-small')); +$vectorizer = new Vectorizer($platform, 'text-embedding-3-small'); // Sample documents with some unwanted content $documents = [ diff --git a/examples/lmstudio/chat.php b/examples/lmstudio/chat.php index c98697f32..4dd26f720 100644 --- a/examples/lmstudio/chat.php +++ b/examples/lmstudio/chat.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\LmStudio\Completions; use Symfony\AI\Platform\Bridge\LmStudio\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -17,13 +16,12 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('LMSTUDIO_HOST_URL'), http_client()); -$model = new Completions('gemma-3-4b-it-qat'); $messages = new MessageBag( Message::forSystem('You are a pirate and you write funny.'), Message::ofUser('What is the Symfony framework?'), ); -$result = $platform->invoke($model, $messages, [ +$result = $platform->invoke('gemma-3-4b-it-qat', $messages, [ 'max_tokens' => 500, // specific options just for this call ]); diff --git a/examples/lmstudio/image-input-binary.php b/examples/lmstudio/image-input-binary.php index efcc00d64..0d7ba8e52 100644 --- a/examples/lmstudio/image-input-binary.php +++ b/examples/lmstudio/image-input-binary.php @@ -9,9 +9,7 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\LmStudio\Completions; use Symfony\AI\Platform\Bridge\LmStudio\PlatformFactory; -use Symfony\AI\Platform\Capability; use Symfony\AI\Platform\Message\Content\Image; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -19,10 +17,6 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('LMSTUDIO_HOST_URL'), http_client()); -$model = new Completions( - name: 'gemma-3-4b-it-qat', - capabilities: [...Completions::DEFAULT_CAPABILITIES, Capability::INPUT_IMAGE] -); $messages = new MessageBag( Message::forSystem('You are an image analyzer bot that helps identify the content of images.'), @@ -31,6 +25,6 @@ Image::fromFile(dirname(__DIR__, 2).'/fixtures/image.jpg'), ), ); -$result = $platform->invoke($model, $messages); +$result = $platform->invoke('gemma-3-4b-it-qat', $messages); echo $result->getResult()->getContent().\PHP_EOL; diff --git a/examples/memory/mariadb.php b/examples/memory/mariadb.php index 91b88c63e..ebb2ea778 100644 --- a/examples/memory/mariadb.php +++ b/examples/memory/mariadb.php @@ -14,8 +14,6 @@ use Symfony\AI\Agent\Agent; use Symfony\AI\Agent\Memory\EmbeddingProvider; use Symfony\AI\Agent\Memory\MemoryInputProcessor; -use Symfony\AI\Platform\Bridge\OpenAi\Embeddings; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -58,15 +56,16 @@ // create embeddings for documents as preparation of the chain memory $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$vectorizer = new Vectorizer($platform, $embeddings = new Embeddings(Embeddings::TEXT_3_SMALL)); +$vectorizer = new Vectorizer($platform, $embeddings = 'text-embedding-3-small'); $indexer = new Indexer(new InMemoryLoader($documents), $vectorizer, $store, logger: logger()); $indexer->index($documents); // Execute a chat call that is utilizing the memory -$embeddingsMemory = new EmbeddingProvider($platform, $embeddings, $store); +$embeddingsModel = $platform->getModelCatalog()->getModel($embeddings); +$embeddingsMemory = new EmbeddingProvider($platform, $embeddingsModel, $store); $memoryProcessor = new MemoryInputProcessor($embeddingsMemory); -$agent = new Agent($platform, new Gpt(Gpt::GPT_4O_MINI), [$memoryProcessor], logger: logger()); +$agent = new Agent($platform, 'gpt-4o-mini', [$memoryProcessor], logger: logger()); $messages = new MessageBag(Message::ofUser('Have we discussed about my friend John in the past? If yes, what did we talk about?')); $result = $agent->call($messages); diff --git a/examples/memory/static.php b/examples/memory/static.php index 8d717080d..db9f74341 100644 --- a/examples/memory/static.php +++ b/examples/memory/static.php @@ -13,7 +13,6 @@ use Symfony\AI\Agent\InputProcessor\SystemPromptInputProcessor; use Symfony\AI\Agent\Memory\MemoryInputProcessor; use Symfony\AI\Agent\Memory\StaticMemoryProvider; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -21,7 +20,6 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create($_ENV['OPENAI_API_KEY'], http_client()); -$model = new Gpt(Gpt::GPT_4O_MINI); $systemPromptProcessor = new SystemPromptInputProcessor('You are a professional trainer with short, personalized advice and a motivating claim.'); @@ -32,8 +30,8 @@ ); $memoryProcessor = new MemoryInputProcessor($personalFacts); -$chain = new Agent($platform, $model, [$systemPromptProcessor, $memoryProcessor], logger: logger()); +$agent = new Agent($platform, 'gpt-4o-mini', [$systemPromptProcessor, $memoryProcessor], logger: logger()); $messages = new MessageBag(Message::ofUser('What do we do today?')); -$result = $chain->call($messages); +$result = $agent->call($messages); echo $result->getContent().\PHP_EOL; diff --git a/examples/misc/chat-system-prompt.php b/examples/misc/chat-system-prompt.php index 832637ec4..b8f411bfa 100644 --- a/examples/misc/chat-system-prompt.php +++ b/examples/misc/chat-system-prompt.php @@ -11,7 +11,6 @@ use Symfony\AI\Agent\Agent; use Symfony\AI\Agent\InputProcessor\SystemPromptInputProcessor; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -19,11 +18,10 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$model = new Gpt(Gpt::GPT_4O_MINI); $processor = new SystemPromptInputProcessor('You are Yoda and write like he speaks. But short.'); -$agent = new Agent($platform, $model, [$processor], logger: logger()); +$agent = new Agent($platform, 'gpt-4o-mini', [$processor], logger: logger()); $messages = new MessageBag(Message::ofUser('What is the meaning of life?')); $result = $agent->call($messages); diff --git a/examples/misc/parallel-chat-gpt.php b/examples/misc/parallel-chat-gpt.php index 69bac2856..5357f82db 100644 --- a/examples/misc/parallel-chat-gpt.php +++ b/examples/misc/parallel-chat-gpt.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -17,7 +16,6 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$model = new Gpt(Gpt::GPT_4O_MINI); $messages = new MessageBag( Message::forSystem('You will be given a letter and you answer with only the next letter of the alphabet.'), @@ -27,7 +25,7 @@ $results = []; foreach (range('A', 'D') as $letter) { echo ' - Request for the letter '.$letter.' initiated.'.\PHP_EOL; - $results[] = $platform->invoke($model, $messages->with(Message::ofUser($letter))); + $results[] = $platform->invoke('gpt-4o-mini', $messages->with(Message::ofUser($letter))); } echo 'Waiting for the responses ...'.\PHP_EOL; diff --git a/examples/misc/parallel-embeddings.php b/examples/misc/parallel-embeddings.php index a64beef0f..eccebc5cd 100644 --- a/examples/misc/parallel-embeddings.php +++ b/examples/misc/parallel-embeddings.php @@ -9,15 +9,14 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\OpenAi\Embeddings; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$ada = new Embeddings(Embeddings::TEXT_ADA_002); -$small = new Embeddings(Embeddings::TEXT_3_SMALL); -$large = new Embeddings(Embeddings::TEXT_3_LARGE); +$ada = 'text-embedding-ada-002'; +$small = 'text-embedding-3-small'; +$large = 'text-embedding-3-large'; echo 'Initiating parallel embeddings calls to platform ...'.\PHP_EOL; $results = []; diff --git a/examples/misc/persistent-chat.php b/examples/misc/persistent-chat.php index 8b9df26fa..1a3ecc712 100644 --- a/examples/misc/persistent-chat.php +++ b/examples/misc/persistent-chat.php @@ -12,7 +12,6 @@ use Symfony\AI\Agent\Agent; use Symfony\AI\Agent\Chat; use Symfony\AI\Agent\Chat\MessageStore\InMemoryStore; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -20,9 +19,8 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$llm = new Gpt(Gpt::GPT_4O_MINI); -$agent = new Agent($platform, $llm, logger: logger()); +$agent = new Agent($platform, 'gpt-4o-mini', logger: logger()); $chat = new Chat($agent, new InMemoryStore()); $messages = new MessageBag( diff --git a/examples/mistral/chat-multiple.php b/examples/mistral/chat-multiple.php index 8f361d458..ae731fc6e 100644 --- a/examples/mistral/chat-multiple.php +++ b/examples/mistral/chat-multiple.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\Mistral\Mistral; use Symfony\AI\Platform\Bridge\Mistral\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -22,7 +21,7 @@ Message::forSystem('Just give short answers.'), Message::ofUser('What is your favorite color?'), ); -$result = $platform->invoke(new Mistral(Mistral::MISTRAL_LARGE), $messages, [ +$result = $platform->invoke('mistral-large-latest', $messages, [ 'temperature' => 1.5, 'n' => 10, ]); diff --git a/examples/mistral/chat.php b/examples/mistral/chat.php index d880efa35..c96b08932 100644 --- a/examples/mistral/chat.php +++ b/examples/mistral/chat.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\Mistral\Mistral; use Symfony\AI\Platform\Bridge\Mistral\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -17,10 +16,9 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('MISTRAL_API_KEY'), http_client()); -$model = new Mistral(Mistral::MISTRAL_LARGE); $messages = new MessageBag(Message::ofUser('What is the best French cheese?')); -$result = $platform->invoke($model, $messages, [ +$result = $platform->invoke('mistral-large-latest', $messages, [ 'temperature' => 0.7, ]); diff --git a/examples/mistral/embeddings.php b/examples/mistral/embeddings.php index 54b061e9e..6a96b60cb 100644 --- a/examples/mistral/embeddings.php +++ b/examples/mistral/embeddings.php @@ -9,15 +9,13 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\Mistral\Embeddings; use Symfony\AI\Platform\Bridge\Mistral\PlatformFactory; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('MISTRAL_API_KEY'), http_client()); -$model = new Embeddings(Embeddings::MISTRAL_EMBED); -$result = $platform->invoke($model, <<invoke('mistral-embed', <<invoke($model, $messages); +$result = $platform->invoke('mistral-small-latest', $messages); echo $result->getResult()->getContent().\PHP_EOL; diff --git a/examples/mistral/pdf-input-binary.php b/examples/mistral/pdf-input-binary.php index bca57e7af..81eabc1d2 100644 --- a/examples/mistral/pdf-input-binary.php +++ b/examples/mistral/pdf-input-binary.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\Mistral\Mistral; use Symfony\AI\Platform\Bridge\Mistral\PlatformFactory; use Symfony\AI\Platform\Message\Content\Document; use Symfony\AI\Platform\Message\Message; @@ -18,7 +17,6 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('MISTRAL_API_KEY'), httpClient: http_client()); -$model = new Mistral(Mistral::MISTRAL_SMALL); $messages = new MessageBag( Message::ofUser( @@ -26,6 +24,6 @@ 'What is this document about?', ), ); -$result = $platform->invoke($model, $messages); +$result = $platform->invoke('mistral-small-latest', $messages); echo $result->getResult()->getContent().\PHP_EOL; diff --git a/examples/mistral/pdf-input-url.php b/examples/mistral/pdf-input-url.php index 7638f8e94..37b6fe32a 100644 --- a/examples/mistral/pdf-input-url.php +++ b/examples/mistral/pdf-input-url.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\Mistral\Mistral; use Symfony\AI\Platform\Bridge\Mistral\PlatformFactory; use Symfony\AI\Platform\Message\Content\DocumentUrl; use Symfony\AI\Platform\Message\Message; @@ -18,7 +17,6 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('MISTRAL_API_KEY'), httpClient: http_client()); -$model = new Mistral(Mistral::MISTRAL_SMALL); $messages = new MessageBag( Message::ofUser( @@ -26,6 +24,6 @@ 'What is this document about?', ), ); -$result = $platform->invoke($model, $messages); +$result = $platform->invoke('mistral-small-latest', $messages); echo $result->getResult()->getContent().\PHP_EOL; diff --git a/examples/mistral/stream.php b/examples/mistral/stream.php index a2c64ecee..4fa1650ad 100644 --- a/examples/mistral/stream.php +++ b/examples/mistral/stream.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\Mistral\Mistral; use Symfony\AI\Platform\Bridge\Mistral\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -17,10 +16,9 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('MISTRAL_API_KEY'), http_client()); -$model = new Mistral(Mistral::MISTRAL_LARGE); $messages = new MessageBag(Message::ofUser('What is the eighth prime number?')); -$result = $platform->invoke($model, $messages, [ +$result = $platform->invoke('mistral-large-latest', $messages, [ 'stream' => true, ]); diff --git a/examples/mistral/structured-output-math.php b/examples/mistral/structured-output-math.php index 822e1e8d8..a49490e05 100644 --- a/examples/mistral/structured-output-math.php +++ b/examples/mistral/structured-output-math.php @@ -13,7 +13,6 @@ use Symfony\AI\Agent\StructuredOutput\AgentProcessor; use Symfony\AI\Agent\StructuredOutput\ResponseFormatFactory; use Symfony\AI\Fixtures\StructuredOutput\MathReasoning; -use Symfony\AI\Platform\Bridge\Mistral\Mistral; use Symfony\AI\Platform\Bridge\Mistral\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -24,11 +23,11 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('MISTRAL_API_KEY'), http_client()); -$model = new Mistral(Mistral::MISTRAL_SMALL); + $serializer = new Serializer([new ObjectNormalizer()], [new JsonEncoder()]); $processor = new AgentProcessor(new ResponseFormatFactory(), $serializer); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'mistral-small-latest', [$processor], [$processor], logger: logger()); $messages = new MessageBag( Message::forSystem('You are a helpful math tutor. Guide the user through the solution step by step.'), Message::ofUser('how can I solve 8x + 7 = -23'), diff --git a/examples/mistral/token-metadata.php b/examples/mistral/token-metadata.php index 1ce8f5c34..46f992f5b 100644 --- a/examples/mistral/token-metadata.php +++ b/examples/mistral/token-metadata.php @@ -10,7 +10,6 @@ */ use Symfony\AI\Agent\Agent; -use Symfony\AI\Platform\Bridge\Mistral\Mistral; use Symfony\AI\Platform\Bridge\Mistral\PlatformFactory; use Symfony\AI\Platform\Bridge\Mistral\TokenOutputProcessor; use Symfony\AI\Platform\Message\Message; @@ -19,9 +18,8 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('MISTRAL_API_KEY'), http_client()); -$model = new Mistral(Mistral::MISTRAL_LARGE); -$agent = new Agent($platform, $model, outputProcessors: [new TokenOutputProcessor()], logger: logger()); +$agent = new Agent($platform, 'mistral-large-latest', outputProcessors: [new TokenOutputProcessor()], logger: logger()); $messages = new MessageBag( Message::forSystem('You are a pirate and you write funny.'), diff --git a/examples/mistral/toolcall-stream.php b/examples/mistral/toolcall-stream.php index 5f7a9e50d..a3e271e6e 100644 --- a/examples/mistral/toolcall-stream.php +++ b/examples/mistral/toolcall-stream.php @@ -13,7 +13,6 @@ use Symfony\AI\Agent\Toolbox\AgentProcessor; use Symfony\AI\Agent\Toolbox\Tool\YouTubeTranscriber; use Symfony\AI\Agent\Toolbox\Toolbox; -use Symfony\AI\Platform\Bridge\Mistral\Mistral; use Symfony\AI\Platform\Bridge\Mistral\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -21,12 +20,11 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('MISTRAL_API_KEY'), http_client()); -$model = new Mistral(Mistral::MISTRAL_LARGE); $transcriber = new YouTubeTranscriber(http_client()); $toolbox = new Toolbox([$transcriber], logger: logger()); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'mistral-large-latest', [$processor], [$processor], logger: logger()); $messages = new MessageBag(Message::ofUser('Please summarize this video for me: https://www.youtube.com/watch?v=6uXW-ulpj0s')); $result = $agent->call($messages, [ diff --git a/examples/mistral/toolcall.php b/examples/mistral/toolcall.php index b8a9710ba..d2009a386 100644 --- a/examples/mistral/toolcall.php +++ b/examples/mistral/toolcall.php @@ -13,7 +13,6 @@ use Symfony\AI\Agent\Toolbox\AgentProcessor; use Symfony\AI\Agent\Toolbox\Tool\Clock; use Symfony\AI\Agent\Toolbox\Toolbox; -use Symfony\AI\Platform\Bridge\Mistral\Mistral; use Symfony\AI\Platform\Bridge\Mistral\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -21,11 +20,10 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('MISTRAL_API_KEY'), http_client()); -$model = new Mistral(Mistral::MISTRAL_LARGE); $toolbox = new Toolbox([new Clock()], logger: logger()); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'mistral-large-latest', [$processor], [$processor], logger: logger()); $messages = new MessageBag(Message::ofUser('What time is it?')); $result = $agent->call($messages); diff --git a/examples/ollama/chat-llama.php b/examples/ollama/chat-llama.php index 5a96cb2d9..2f4f82316 100644 --- a/examples/ollama/chat-llama.php +++ b/examples/ollama/chat-llama.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\Ollama\Ollama; use Symfony\AI\Platform\Bridge\Ollama\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -17,7 +16,6 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('OLLAMA_HOST_URL'), http_client()); -$model = new Ollama(env('OLLAMA_LLM')); $messages = new MessageBag( Message::forSystem('You are a helpful assistant.'), @@ -25,7 +23,7 @@ ); try { - $result = $platform->invoke($model, $messages); + $result = $platform->invoke(env('OLLAMA_LLM'), $messages); echo $result->getResult()->getContent().\PHP_EOL; } catch (InvalidArgumentException $e) { echo $e->getMessage()."\nMaybe use a different model?\n"; diff --git a/examples/ollama/embeddings.php b/examples/ollama/embeddings.php index ac8d4ca95..83d7c722f 100644 --- a/examples/ollama/embeddings.php +++ b/examples/ollama/embeddings.php @@ -9,14 +9,13 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\Ollama\Ollama; use Symfony\AI\Platform\Bridge\Ollama\PlatformFactory; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('OLLAMA_HOST_URL'), http_client()); -$response = $platform->invoke(new Ollama(env('OLLAMA_EMBEDDINGS')), <<invoke(env('OLLAMA_EMBEDDINGS'), <<index($documents); -$model = new Ollama(env('OLLAMA_LLM')); - $similaritySearch = new SimilaritySearch($vectorizer, $store); $toolbox = new Toolbox([$similaritySearch], logger: logger()); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, env('OLLAMA_LLM'), [$processor], [$processor], logger: logger()); $messages = new MessageBag( Message::forSystem('Please answer all user questions only using SimilaritySearch function.'), diff --git a/examples/ollama/stream.php b/examples/ollama/stream.php index d12a67a0d..d2c58239d 100644 --- a/examples/ollama/stream.php +++ b/examples/ollama/stream.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\Ollama\Ollama; use Symfony\AI\Platform\Bridge\Ollama\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -17,13 +16,12 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('OLLAMA_HOST_URL'), http_client()); -$model = new Ollama(env('OLLAMA_LLM')); $messages = new MessageBag( Message::forSystem('You are a helpful assistant.'), Message::ofUser('Tina has one brother and one sister. How many sisters do Tina\'s siblings have?'), ); -$result = $platform->invoke($model, $messages, ['stream' => true]); +$result = $platform->invoke(env('OLLAMA_LLM'), $messages, ['stream' => true]); print_stream($result); diff --git a/examples/ollama/structured-output-math.php b/examples/ollama/structured-output-math.php index a3d4fdb8b..ff45ab574 100644 --- a/examples/ollama/structured-output-math.php +++ b/examples/ollama/structured-output-math.php @@ -12,7 +12,6 @@ use Symfony\AI\Agent\Agent; use Symfony\AI\Agent\StructuredOutput\AgentProcessor; use Symfony\AI\Fixtures\StructuredOutput\MathReasoning; -use Symfony\AI\Platform\Bridge\Ollama\Ollama; use Symfony\AI\Platform\Bridge\Ollama\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -20,10 +19,9 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('OLLAMA_HOST_URL'), http_client()); -$model = new Ollama(env('OLLAMA_LLM')); $processor = new AgentProcessor(); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, env('OLLAMA_LLM'), [$processor], [$processor], logger: logger()); $messages = new MessageBag( Message::forSystem('You are a helpful math tutor. Guide the user through the solution step by step.'), Message::ofUser('how can I solve 8x + 7 = -23'), diff --git a/examples/ollama/toolcall.php b/examples/ollama/toolcall.php index 2a012cad2..c34037f12 100644 --- a/examples/ollama/toolcall.php +++ b/examples/ollama/toolcall.php @@ -13,7 +13,6 @@ use Symfony\AI\Agent\Toolbox\AgentProcessor; use Symfony\AI\Agent\Toolbox\Tool\Clock; use Symfony\AI\Agent\Toolbox\Toolbox; -use Symfony\AI\Platform\Bridge\Ollama\Ollama; use Symfony\AI\Platform\Bridge\Ollama\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -21,11 +20,10 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('OLLAMA_HOST_URL'), http_client()); -$model = new Ollama(env('OLLAMA_LLM')); $toolbox = new Toolbox([new Clock()], logger: logger()); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, env('OLLAMA_LLM'), [$processor], [$processor], logger: logger()); $messages = new MessageBag(Message::ofUser('What time is it?')); $result = $agent->call($messages); diff --git a/examples/openai/audio-input.php b/examples/openai/audio-input.php index 1aa2079a2..5440fd751 100644 --- a/examples/openai/audio-input.php +++ b/examples/openai/audio-input.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Content\Audio; use Symfony\AI\Platform\Message\Message; @@ -18,7 +17,6 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$model = new Gpt(Gpt::GPT_4O_AUDIO); $messages = new MessageBag( Message::ofUser( @@ -26,6 +24,6 @@ Audio::fromFile(dirname(__DIR__, 2).'/fixtures/audio.mp3'), ), ); -$result = $platform->invoke($model, $messages); +$result = $platform->invoke('gpt-4o-audio-preview', $messages); echo $result->getResult()->getContent().\PHP_EOL; diff --git a/examples/openai/audio-transcript.php b/examples/openai/audio-transcript.php index d775d2096..65a5dbd3c 100644 --- a/examples/openai/audio-transcript.php +++ b/examples/openai/audio-transcript.php @@ -10,15 +10,13 @@ */ use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; -use Symfony\AI\Platform\Bridge\OpenAi\Whisper; use Symfony\AI\Platform\Message\Content\Audio; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$model = new Whisper(Whisper::WHISPER_1); $file = Audio::fromFile(dirname(__DIR__, 2).'/fixtures/audio.mp3'); -$result = $platform->invoke($model, $file); +$result = $platform->invoke('whisper-1', $file); echo $result->asText().\PHP_EOL; diff --git a/examples/openai/chat-o1.php b/examples/openai/chat-o1.php index be420b402..e747580f9 100644 --- a/examples/openai/chat-o1.php +++ b/examples/openai/chat-o1.php @@ -10,7 +10,6 @@ */ use Symfony\AI\Agent\Agent; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -23,7 +22,6 @@ } $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$model = new Gpt(Gpt::O1_PREVIEW); $prompt = <<call(new MessageBag(Message::ofUser($prompt))); echo $result->getContent().\PHP_EOL; diff --git a/examples/openai/chat.php b/examples/openai/chat.php index 39a9e4277..298d753d4 100644 --- a/examples/openai/chat.php +++ b/examples/openai/chat.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -17,13 +16,12 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$model = new Gpt(Gpt::GPT_4O_MINI); $messages = new MessageBag( Message::forSystem('You are a pirate and you write funny.'), Message::ofUser('What is the Symfony framework?'), ); -$result = $platform->invoke($model, $messages, [ +$result = $platform->invoke('gpt-4o-mini', $messages, [ 'max_tokens' => 500, // specific options just for this call ]); diff --git a/examples/openai/embeddings.php b/examples/openai/embeddings.php index 6180f3504..c12c3074a 100644 --- a/examples/openai/embeddings.php +++ b/examples/openai/embeddings.php @@ -9,15 +9,13 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\OpenAi\Embeddings; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$embeddings = new Embeddings(Embeddings::TEXT_3_SMALL); -$result = $platform->invoke($embeddings, <<invoke('text-embedding-3-small', <<invoke($model, $messages); +$result = $platform->invoke('gpt-4o-mini', $messages); echo $result->getResult()->getContent().\PHP_EOL; diff --git a/examples/openai/image-input-url.php b/examples/openai/image-input-url.php index 05a907e48..cabc35cc9 100644 --- a/examples/openai/image-input-url.php +++ b/examples/openai/image-input-url.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Content\ImageUrl; use Symfony\AI\Platform\Message\Message; @@ -18,7 +17,6 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$model = new Gpt(Gpt::GPT_4O_MINI); $messages = new MessageBag( Message::forSystem('You are an image analyzer bot that helps identify the content of images.'), @@ -27,6 +25,6 @@ new ImageUrl('https://upload.wikimedia.org/wikipedia/commons/thumb/3/31/Webysther_20160423_-_Elephpant.svg/350px-Webysther_20160423_-_Elephpant.svg.png'), ), ); -$result = $platform->invoke($model, $messages); +$result = $platform->invoke('gpt-4o-mini', $messages); echo $result->getResult()->getContent().\PHP_EOL; diff --git a/examples/openai/image-output-dall-e-2.php b/examples/openai/image-output-dall-e-2.php index b28ff4774..0e0f5f7f9 100644 --- a/examples/openai/image-output-dall-e-2.php +++ b/examples/openai/image-output-dall-e-2.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\OpenAi\DallE; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; require_once dirname(__DIR__).'/bootstrap.php'; @@ -17,7 +16,7 @@ $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); $result = $platform->invoke( - model: new DallE(DallE::DALL_E_2), + model: 'dall-e-2', input: 'A cartoon-style elephant with a long trunk and large ears.', options: [ 'response_format' => 'url', // Generate response as URL diff --git a/examples/openai/image-output-dall-e-3.php b/examples/openai/image-output-dall-e-3.php index 0898dc83e..c63858d6e 100644 --- a/examples/openai/image-output-dall-e-3.php +++ b/examples/openai/image-output-dall-e-3.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\OpenAi\DallE; use Symfony\AI\Platform\Bridge\OpenAi\DallE\ImageResult; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; @@ -18,7 +17,7 @@ $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); $result = $platform->invoke( - model: new DallE(name: DallE::DALL_E_3), + model: 'dall-e-3', input: 'A cartoon-style elephant with a long trunk and large ears.', options: [ 'response_format' => 'url', // Generate response as URL diff --git a/examples/openai/pdf-input-binary.php b/examples/openai/pdf-input-binary.php index 1b3ee9ab2..305eb6cc7 100644 --- a/examples/openai/pdf-input-binary.php +++ b/examples/openai/pdf-input-binary.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Content\Document; use Symfony\AI\Platform\Message\Message; @@ -18,7 +17,6 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$model = new Gpt(Gpt::GPT_4O_MINI); $messages = new MessageBag( Message::ofUser( @@ -26,6 +24,6 @@ Document::fromFile(dirname(__DIR__, 2).'/fixtures/document.pdf'), ), ); -$result = $platform->invoke($model, $messages); +$result = $platform->invoke('gpt-4o-mini', $messages); echo $result->getResult()->getContent().\PHP_EOL; diff --git a/examples/openai/stream.php b/examples/openai/stream.php index bb36cb69d..6ddbc2d25 100644 --- a/examples/openai/stream.php +++ b/examples/openai/stream.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -17,13 +16,12 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$model = new Gpt(Gpt::GPT_4O_MINI); $messages = new MessageBag( Message::forSystem('You are a thoughtful philosopher.'), Message::ofUser('What is the purpose of an ant?'), ); -$result = $platform->invoke($model, $messages, [ +$result = $platform->invoke('gpt-4o-mini', $messages, [ 'stream' => true, // enable streaming of response text ]); diff --git a/examples/openai/structured-output-clock.php b/examples/openai/structured-output-clock.php index 7fe092deb..023f1c50b 100644 --- a/examples/openai/structured-output-clock.php +++ b/examples/openai/structured-output-clock.php @@ -14,7 +14,6 @@ use Symfony\AI\Agent\Toolbox\AgentProcessor as ToolProcessor; use Symfony\AI\Agent\Toolbox\Tool\Clock; use Symfony\AI\Agent\Toolbox\Toolbox; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -23,13 +22,12 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$model = new Gpt(Gpt::GPT_4O_MINI); $clock = new Clock(new SymfonyClock()); $toolbox = new Toolbox([$clock], logger: logger()); $toolProcessor = new ToolProcessor($toolbox); $structuredOutputProcessor = new StructuredOutputProcessor(); -$agent = new Agent($platform, $model, [$toolProcessor, $structuredOutputProcessor], [$toolProcessor, $structuredOutputProcessor]); +$agent = new Agent($platform, 'gpt-4o-mini', [$toolProcessor, $structuredOutputProcessor], [$toolProcessor, $structuredOutputProcessor]); $messages = new MessageBag(Message::ofUser('What date and time is it?')); $result = $agent->call($messages, ['response_format' => [ diff --git a/examples/openai/structured-output-list-of-polymorphic-items.php b/examples/openai/structured-output-list-of-polymorphic-items.php index eba131e5f..def7afdd8 100644 --- a/examples/openai/structured-output-list-of-polymorphic-items.php +++ b/examples/openai/structured-output-list-of-polymorphic-items.php @@ -12,7 +12,6 @@ use Symfony\AI\Agent\Agent; use Symfony\AI\Agent\StructuredOutput\AgentProcessor; use Symfony\AI\Fixtures\StructuredOutput\PolymorphicType\ListOfPolymorphicTypesDto; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -20,10 +19,8 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$model = new Gpt(Gpt::GPT_4O_MINI); - $processor = new AgentProcessor(); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'gpt-4o-mini', [$processor], [$processor], logger: logger()); $messages = new MessageBag( Message::forSystem('You are a persona data collector! Return all the data you can gather from the user input.'), Message::ofUser('Hi! My name is John Doe, I am 30 years old and I live in Paris.'), diff --git a/examples/openai/structured-output-math.php b/examples/openai/structured-output-math.php index 9fa376611..7c0e0ad9d 100644 --- a/examples/openai/structured-output-math.php +++ b/examples/openai/structured-output-math.php @@ -12,7 +12,6 @@ use Symfony\AI\Agent\Agent; use Symfony\AI\Agent\StructuredOutput\AgentProcessor; use Symfony\AI\Fixtures\StructuredOutput\MathReasoning; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -20,10 +19,8 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$model = new Gpt(Gpt::GPT_4O_MINI); - $processor = new AgentProcessor(); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'gpt-4o-mini', [$processor], [$processor], logger: logger()); $messages = new MessageBag( Message::forSystem('You are a helpful math tutor. Guide the user through the solution step by step.'), Message::ofUser('how can I solve 8x + 7 = -23'), diff --git a/examples/openai/structured-output-union-types.php b/examples/openai/structured-output-union-types.php index 7cee79984..42c685925 100644 --- a/examples/openai/structured-output-union-types.php +++ b/examples/openai/structured-output-union-types.php @@ -12,7 +12,6 @@ use Symfony\AI\Agent\Agent; use Symfony\AI\Agent\StructuredOutput\AgentProcessor; use Symfony\AI\Fixtures\StructuredOutput\UnionType\UnionTypeDto; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -20,10 +19,8 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$model = new Gpt(Gpt::GPT_4O_MINI); - $processor = new AgentProcessor(); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'gpt-4o-mini', [$processor], [$processor], logger: logger()); $messages = new MessageBag( Message::forSystem(<<call($messages); diff --git a/examples/openrouter/chat-gemini.php b/examples/openrouter/chat-gemini.php index 72c0f48a4..0c79c0e01 100644 --- a/examples/openrouter/chat-gemini.php +++ b/examples/openrouter/chat-gemini.php @@ -18,8 +18,8 @@ $platform = PlatformFactory::create(env('OPENROUTER_KEY'), http_client()); // In case free is running into 429 rate limit errors, you can use the paid model: -// $model = new Model('google/gemini-2.0-flash-lite-001'); -$model = new Model('google/gemini-2.0-flash-exp:free'); +// $model = 'google/gemini-2.0-flash-lite-001'; +$model = 'google/gemini-2.0-flash-exp:free'; $messages = new MessageBag( Message::forSystem('You are a helpful assistant.'), diff --git a/examples/perplexity/academic-search.php b/examples/perplexity/academic-search.php index 033b53f66..e37d55325 100644 --- a/examples/perplexity/academic-search.php +++ b/examples/perplexity/academic-search.php @@ -10,7 +10,6 @@ */ use Symfony\AI\Agent\Agent; -use Symfony\AI\Platform\Bridge\Perplexity\Perplexity; use Symfony\AI\Platform\Bridge\Perplexity\PlatformFactory; use Symfony\AI\Platform\Bridge\Perplexity\SearchResultProcessor; use Symfony\AI\Platform\Message\Message; @@ -19,8 +18,7 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('PERPLEXITY_API_KEY'), http_client()); -$model = new Perplexity(Perplexity::SONAR); -$agent = new Agent($platform, $model, outputProcessors: [new SearchResultProcessor()], logger: logger()); +$agent = new Agent($platform, 'sonar', outputProcessors: [new SearchResultProcessor()], logger: logger()); $messages = new MessageBag(Message::ofUser('What is the best French cheese of the first quarter-century of 21st century?')); $response = $agent->call($messages, [ diff --git a/examples/perplexity/chat.php b/examples/perplexity/chat.php index ce906e4dd..0b62e607a 100644 --- a/examples/perplexity/chat.php +++ b/examples/perplexity/chat.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\Perplexity\Perplexity; use Symfony\AI\Platform\Bridge\Perplexity\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -17,9 +16,8 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('PERPLEXITY_API_KEY'), http_client()); -$model = new Perplexity(Perplexity::SONAR); $messages = new MessageBag(Message::ofUser('What is the best French cheese?')); -$response = $platform->invoke($model, $messages); +$response = $platform->invoke('sonar', $messages); echo $response->getResult()->getContent().\PHP_EOL; diff --git a/examples/perplexity/disable-search.php b/examples/perplexity/disable-search.php index fb003d1a1..5cf4d7bb9 100644 --- a/examples/perplexity/disable-search.php +++ b/examples/perplexity/disable-search.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\Perplexity\Perplexity; use Symfony\AI\Platform\Bridge\Perplexity\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -17,10 +16,9 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('PERPLEXITY_API_KEY'), http_client()); -$model = new Perplexity(Perplexity::SONAR); $messages = new MessageBag(Message::ofUser('What is 2 + 2?')); -$response = $platform->invoke($model, $messages, [ +$response = $platform->invoke('sonar', $messages, [ 'disable_search' => true, ]); diff --git a/examples/perplexity/image-input-url.php b/examples/perplexity/image-input-url.php index 0bb832242..11fdeb03f 100644 --- a/examples/perplexity/image-input-url.php +++ b/examples/perplexity/image-input-url.php @@ -10,7 +10,6 @@ */ use Symfony\AI\Agent\Agent; -use Symfony\AI\Platform\Bridge\Perplexity\Perplexity; use Symfony\AI\Platform\Bridge\Perplexity\PlatformFactory; use Symfony\AI\Platform\Bridge\Perplexity\SearchResultProcessor; use Symfony\AI\Platform\Message\Content\ImageUrl; @@ -20,8 +19,7 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('PERPLEXITY_API_KEY'), http_client()); -$model = new Perplexity(Perplexity::SONAR); -$agent = new Agent($platform, $model, outputProcessors: [new SearchResultProcessor()], logger: logger()); +$agent = new Agent($platform, 'sonar', outputProcessors: [new SearchResultProcessor()], logger: logger()); $messages = new MessageBag( Message::forSystem('You are an image analyzer bot that helps identify the content of images.'), diff --git a/examples/perplexity/pdf-input-url.php b/examples/perplexity/pdf-input-url.php index c3f7434b0..c6431e548 100644 --- a/examples/perplexity/pdf-input-url.php +++ b/examples/perplexity/pdf-input-url.php @@ -10,7 +10,6 @@ */ use Symfony\AI\Agent\Agent; -use Symfony\AI\Platform\Bridge\Perplexity\Perplexity; use Symfony\AI\Platform\Bridge\Perplexity\PlatformFactory; use Symfony\AI\Platform\Bridge\Perplexity\SearchResultProcessor; use Symfony\AI\Platform\Message\Content\DocumentUrl; @@ -20,8 +19,7 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('PERPLEXITY_API_KEY'), http_client()); -$model = new Perplexity(Perplexity::SONAR); -$agent = new Agent($platform, $model, outputProcessors: [new SearchResultProcessor()], logger: logger()); +$agent = new Agent($platform, 'sonar', outputProcessors: [new SearchResultProcessor()], logger: logger()); $messages = new MessageBag( Message::ofUser( diff --git a/examples/perplexity/stream.php b/examples/perplexity/stream.php index 8311ae583..762c7f051 100644 --- a/examples/perplexity/stream.php +++ b/examples/perplexity/stream.php @@ -10,7 +10,6 @@ */ use Symfony\AI\Agent\Agent; -use Symfony\AI\Platform\Bridge\Perplexity\Perplexity; use Symfony\AI\Platform\Bridge\Perplexity\PlatformFactory; use Symfony\AI\Platform\Bridge\Perplexity\SearchResultProcessor; use Symfony\AI\Platform\Message\Message; @@ -19,8 +18,7 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('PERPLEXITY_API_KEY'), http_client()); -$model = new Perplexity(Perplexity::SONAR); -$agent = new Agent($platform, $model, outputProcessors: [new SearchResultProcessor()], logger: logger()); +$agent = new Agent($platform, 'sonar', outputProcessors: [new SearchResultProcessor()], logger: logger()); $messages = new MessageBag( Message::forSystem('You are a thoughtful philosopher.'), diff --git a/examples/perplexity/token-metadata.php b/examples/perplexity/token-metadata.php index 544a62268..f92db5a5a 100644 --- a/examples/perplexity/token-metadata.php +++ b/examples/perplexity/token-metadata.php @@ -10,7 +10,6 @@ */ use Symfony\AI\Agent\Agent; -use Symfony\AI\Platform\Bridge\Perplexity\Perplexity; use Symfony\AI\Platform\Bridge\Perplexity\PlatformFactory; use Symfony\AI\Platform\Bridge\Perplexity\TokenOutputProcessor; use Symfony\AI\Platform\Message\Message; @@ -19,8 +18,7 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('PERPLEXITY_API_KEY'), http_client()); -$model = new Perplexity(Perplexity::SONAR_DEEP_RESEARCH); -$agent = new Agent($platform, $model, outputProcessors: [new TokenOutputProcessor()], logger: logger()); +$agent = new Agent($platform, 'sonar', outputProcessors: [new TokenOutputProcessor()], logger: logger()); $messages = new MessageBag( Message::forSystem('You are a pirate and you write funny.'), diff --git a/examples/perplexity/web-search.php b/examples/perplexity/web-search.php index 92ca9a2f8..d4d0fcfab 100644 --- a/examples/perplexity/web-search.php +++ b/examples/perplexity/web-search.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\Perplexity\Perplexity; use Symfony\AI\Platform\Bridge\Perplexity\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -17,10 +16,9 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('PERPLEXITY_API_KEY'), http_client()); -$model = new Perplexity(Perplexity::SONAR); $messages = new MessageBag(Message::ofUser('What is the best French cheese?')); -$response = $platform->invoke($model, $messages, [ +$response = $platform->invoke('sonar', $messages, [ 'search_domain_filter' => [ 'https://en.wikipedia.org/wiki/Cheese', ], diff --git a/examples/rag/cache.php b/examples/rag/cache.php index 7dff96b4f..432ba99b2 100644 --- a/examples/rag/cache.php +++ b/examples/rag/cache.php @@ -14,8 +14,6 @@ use Symfony\AI\Agent\Toolbox\Tool\SimilaritySearch; use Symfony\AI\Agent\Toolbox\Toolbox; use Symfony\AI\Fixtures\Movies; -use Symfony\AI\Platform\Bridge\OpenAi\Embeddings; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -45,16 +43,14 @@ // create embeddings for documents $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$vectorizer = new Vectorizer($platform, $embeddings = new Embeddings(Embeddings::TEXT_3_SMALL), logger()); +$vectorizer = new Vectorizer($platform, 'text-embedding-3-small', logger()); $indexer = new Indexer(new InMemoryLoader($documents), $vectorizer, $store, logger: logger()); $indexer->index($documents); -$model = new Gpt(Gpt::GPT_4O_MINI); - $similaritySearch = new SimilaritySearch($vectorizer, $store); $toolbox = new Toolbox([$similaritySearch], logger: logger()); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'gpt-4o-mini', [$processor], [$processor], logger: logger()); $messages = new MessageBag( Message::forSystem('Please answer all user questions only using SimilaritySearch function.'), diff --git a/examples/rag/chromadb.php b/examples/rag/chromadb.php index 9f767abd5..224851786 100644 --- a/examples/rag/chromadb.php +++ b/examples/rag/chromadb.php @@ -15,8 +15,6 @@ use Symfony\AI\Agent\Toolbox\Tool\SimilaritySearch; use Symfony\AI\Agent\Toolbox\Toolbox; use Symfony\AI\Fixtures\Movies; -use Symfony\AI\Platform\Bridge\OpenAi\Embeddings; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -52,16 +50,14 @@ // create embeddings for documents $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$vectorizer = new Vectorizer($platform, $embeddings = new Embeddings(Embeddings::TEXT_3_SMALL), logger()); +$vectorizer = new Vectorizer($platform, 'text-embedding-3-small', logger()); $indexer = new Indexer(new InMemoryLoader($documents), $vectorizer, $store, logger: logger()); $indexer->index($documents); -$model = new Gpt(Gpt::GPT_4O_MINI); - $similaritySearch = new SimilaritySearch($vectorizer, $store); $toolbox = new Toolbox([$similaritySearch], logger: logger()); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'gpt-4o-mini', [$processor], [$processor], logger: logger()); $messages = new MessageBag( Message::forSystem('Please answer all user questions only using SimilaritySearch function.'), diff --git a/examples/rag/clickhouse.php b/examples/rag/clickhouse.php index 490228824..388e2f9dd 100644 --- a/examples/rag/clickhouse.php +++ b/examples/rag/clickhouse.php @@ -14,8 +14,6 @@ use Symfony\AI\Agent\Toolbox\Tool\SimilaritySearch; use Symfony\AI\Agent\Toolbox\Toolbox; use Symfony\AI\Fixtures\Movies; -use Symfony\AI\Platform\Bridge\OpenAi\Embeddings; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -52,16 +50,14 @@ // create embeddings for documents $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$vectorizer = new Vectorizer($platform, $embeddings = new Embeddings(Embeddings::TEXT_3_SMALL), logger()); +$vectorizer = new Vectorizer($platform, 'text-embedding-3-small', logger()); $indexer = new Indexer(new InMemoryLoader($documents), $vectorizer, $store, logger: logger()); $indexer->index($documents); -$model = new Gpt(Gpt::GPT_4O_MINI); - $similaritySearch = new SimilaritySearch($vectorizer, $store); $toolbox = new Toolbox([$similaritySearch], logger: logger()); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'gpt-4o-mini', [$processor], [$processor], logger: logger()); $messages = new MessageBag( Message::forSystem('Please answer all user questions only using SimilaritySearch function.'), diff --git a/examples/rag/cloudflare.php b/examples/rag/cloudflare.php index 4c8730870..a8a067487 100644 --- a/examples/rag/cloudflare.php +++ b/examples/rag/cloudflare.php @@ -14,8 +14,6 @@ use Symfony\AI\Agent\Toolbox\Tool\SimilaritySearch; use Symfony\AI\Agent\Toolbox\Toolbox; use Symfony\AI\Fixtures\Movies; -use Symfony\AI\Platform\Bridge\OpenAi\Embeddings; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -52,16 +50,14 @@ // create embeddings for documents (keep in mind that upserting vectors is asynchronous) $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$vectorizer = new Vectorizer($platform, $embeddings = new Embeddings(Embeddings::TEXT_3_SMALL), logger()); +$vectorizer = new Vectorizer($platform, 'text-embedding-3-small', logger()); $indexer = new Indexer(new InMemoryLoader($documents), $vectorizer, $store, logger: logger()); $indexer->index($documents); -$model = new Gpt(Gpt::GPT_4O_MINI); - $similaritySearch = new SimilaritySearch($vectorizer, $store); $toolbox = new Toolbox([$similaritySearch], logger: logger()); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'gpt-4o-mini', [$processor], [$processor], logger: logger()); $messages = new MessageBag( Message::forSystem('Please answer all user questions only using SimilaritySearch function.'), diff --git a/examples/rag/in-memory.php b/examples/rag/in-memory.php index 0f6dd14ca..7a7ee4dd7 100644 --- a/examples/rag/in-memory.php +++ b/examples/rag/in-memory.php @@ -14,8 +14,6 @@ use Symfony\AI\Agent\Toolbox\Tool\SimilaritySearch; use Symfony\AI\Agent\Toolbox\Toolbox; use Symfony\AI\Fixtures\Movies; -use Symfony\AI\Platform\Bridge\OpenAi\Embeddings; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -44,16 +42,14 @@ // create embeddings for documents $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$vectorizer = new Vectorizer($platform, $embeddings = new Embeddings(Embeddings::TEXT_3_SMALL), logger()); +$vectorizer = new Vectorizer($platform, 'text-embedding-3-small', logger()); $indexer = new Indexer(new InMemoryLoader($documents), $vectorizer, $store, logger: logger()); $indexer->index($documents); -$model = new Gpt(Gpt::GPT_4O_MINI); - $similaritySearch = new SimilaritySearch($vectorizer, $store); $toolbox = new Toolbox([$similaritySearch], logger: logger()); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'gpt-4o-mini', [$processor], [$processor], logger: logger()); $messages = new MessageBag( Message::forSystem('Please answer all user questions only using SimilaritySearch function.'), diff --git a/examples/rag/mariadb-gemini.php b/examples/rag/mariadb-gemini.php index 0ae739faa..d17f731de 100644 --- a/examples/rag/mariadb-gemini.php +++ b/examples/rag/mariadb-gemini.php @@ -16,9 +16,6 @@ use Symfony\AI\Agent\Toolbox\Tool\SimilaritySearch; use Symfony\AI\Agent\Toolbox\Toolbox; use Symfony\AI\Fixtures\Movies; -use Symfony\AI\Platform\Bridge\Gemini\Embeddings; -use Symfony\AI\Platform\Bridge\Gemini\Embeddings\TaskType; -use Symfony\AI\Platform\Bridge\Gemini\Gemini; use Symfony\AI\Platform\Bridge\Gemini\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -54,17 +51,15 @@ // create embeddings for documents $platform = PlatformFactory::create(env('GEMINI_API_KEY'), http_client()); -$embeddings = new Embeddings(Embeddings::GEMINI_EMBEDDING_EXP_03_07, ['dimensions' => 768, 'task_type' => TaskType::SemanticSimilarity]); -$vectorizer = new Vectorizer($platform, $embeddings, logger()); +$model = 'gemini-embedding-exp-03-07?dimensions=768&task_type=SEMANTIC_SIMILARITY'; +$vectorizer = new Vectorizer($platform, $model, logger()); $indexer = new Indexer(new InMemoryLoader($documents), $vectorizer, $store, logger: logger()); $indexer->index($documents); -$model = new Gemini(Gemini::GEMINI_2_FLASH_LITE); - $similaritySearch = new SimilaritySearch($vectorizer, $store); $toolbox = new Toolbox([$similaritySearch], logger: logger()); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'gemini-2.0-flash-lite-preview-02-05', [$processor], [$processor], logger: logger()); $messages = new MessageBag( Message::forSystem('Please answer all user questions only using SimilaritySearch function.'), diff --git a/examples/rag/mariadb-openai.php b/examples/rag/mariadb-openai.php index a582ae213..6a189be63 100644 --- a/examples/rag/mariadb-openai.php +++ b/examples/rag/mariadb-openai.php @@ -16,8 +16,6 @@ use Symfony\AI\Agent\Toolbox\Tool\SimilaritySearch; use Symfony\AI\Agent\Toolbox\Toolbox; use Symfony\AI\Fixtures\Movies; -use Symfony\AI\Platform\Bridge\OpenAi\Embeddings; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -53,16 +51,14 @@ // create embeddings for documents $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$vectorizer = new Vectorizer($platform, $embeddings = new Embeddings(Embeddings::TEXT_3_SMALL), logger()); +$vectorizer = new Vectorizer($platform, 'text-embedding-3-small', logger()); $indexer = new Indexer(new InMemoryLoader($documents), $vectorizer, $store, logger: logger()); $indexer->index($documents); -$model = new Gpt(Gpt::GPT_4O_MINI); - $similaritySearch = new SimilaritySearch($vectorizer, $store); $toolbox = new Toolbox([$similaritySearch], logger: logger()); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'gpt-4o-mini', [$processor], [$processor], logger: logger()); $messages = new MessageBag( Message::forSystem('Please answer all user questions only using SimilaritySearch function.'), diff --git a/examples/rag/meilisearch.php b/examples/rag/meilisearch.php index 534278c2c..844b7a732 100644 --- a/examples/rag/meilisearch.php +++ b/examples/rag/meilisearch.php @@ -14,8 +14,6 @@ use Symfony\AI\Agent\Toolbox\Tool\SimilaritySearch; use Symfony\AI\Agent\Toolbox\Toolbox; use Symfony\AI\Fixtures\Movies; -use Symfony\AI\Platform\Bridge\OpenAi\Embeddings; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -52,16 +50,14 @@ // create embeddings for documents $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$vectorizer = new Vectorizer($platform, $embeddings = new Embeddings(Embeddings::TEXT_3_SMALL), logger()); +$vectorizer = new Vectorizer($platform, 'text-embedding-3-small', logger()); $indexer = new Indexer(new InMemoryLoader($documents), $vectorizer, $store, logger: logger()); $indexer->index($documents); -$model = new Gpt(Gpt::GPT_4O_MINI); - $similaritySearch = new SimilaritySearch($vectorizer, $store); $toolbox = new Toolbox([$similaritySearch], logger: logger()); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'gpt-4o-mini', [$processor], [$processor], logger: logger()); $messages = new MessageBag( Message::forSystem('Please answer all user questions only using SimilaritySearch function.'), diff --git a/examples/rag/milvus.php b/examples/rag/milvus.php index d669caa69..8051424d0 100644 --- a/examples/rag/milvus.php +++ b/examples/rag/milvus.php @@ -14,8 +14,6 @@ use Symfony\AI\Agent\Toolbox\Tool\SimilaritySearch; use Symfony\AI\Agent\Toolbox\Toolbox; use Symfony\AI\Fixtures\Movies; -use Symfony\AI\Platform\Bridge\OpenAi\Embeddings; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -53,16 +51,14 @@ // create embeddings for documents $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$vectorizer = new Vectorizer($platform, $embeddings = new Embeddings(Embeddings::TEXT_3_SMALL), logger()); +$vectorizer = new Vectorizer($platform, 'text-embedding-3-small', logger()); $indexer = new Indexer(new InMemoryLoader($documents), $vectorizer, $store, logger: logger()); $indexer->index($documents); -$model = new Gpt(Gpt::GPT_4O_MINI); - $similaritySearch = new SimilaritySearch($vectorizer, $store); $toolbox = new Toolbox([$similaritySearch], logger: logger()); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'gpt-4o-mini', [$processor], [$processor], logger: logger()); $messages = new MessageBag( Message::forSystem('Please answer all user questions only using SimilaritySearch function.'), diff --git a/examples/rag/mongodb.php b/examples/rag/mongodb.php index 050d5a2e1..68be627b0 100644 --- a/examples/rag/mongodb.php +++ b/examples/rag/mongodb.php @@ -15,8 +15,6 @@ use Symfony\AI\Agent\Toolbox\Tool\SimilaritySearch; use Symfony\AI\Agent\Toolbox\Toolbox; use Symfony\AI\Fixtures\Movies; -use Symfony\AI\Platform\Bridge\OpenAi\Embeddings; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -51,19 +49,17 @@ // create embeddings for documents $platform = PlatformFactory::create(env('OPENAI_API_KEY')); -$vectorizer = new Vectorizer($platform, $embeddings = new Embeddings(Embeddings::TEXT_3_SMALL), logger()); +$vectorizer = new Vectorizer($platform, 'text-embedding-3-small', logger()); $indexer = new Indexer(new InMemoryLoader($documents), $vectorizer, $store, logger: logger()); $indexer->index($documents); // initialize the index $store->setup(); -$model = new Gpt(Gpt::GPT_4O_MINI); - $similaritySearch = new SimilaritySearch($vectorizer, $store); $toolbox = new Toolbox([$similaritySearch], logger: logger()); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'gpt-4o-mini', [$processor], [$processor], logger: logger()); $messages = new MessageBag( Message::forSystem('Please answer all user questions only using SimilaritySearch function.'), diff --git a/examples/rag/neo4j.php b/examples/rag/neo4j.php index f4e1a38a8..62b3d692b 100644 --- a/examples/rag/neo4j.php +++ b/examples/rag/neo4j.php @@ -14,8 +14,6 @@ use Symfony\AI\Agent\Toolbox\Tool\SimilaritySearch; use Symfony\AI\Agent\Toolbox\Toolbox; use Symfony\AI\Fixtures\Movies; -use Symfony\AI\Platform\Bridge\OpenAi\Embeddings; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -55,16 +53,14 @@ // create embeddings for documents $platform = PlatformFactory::create($_SERVER['OPENAI_API_KEY']); -$vectorizer = new Vectorizer($platform, $embeddings = new Embeddings(Embeddings::TEXT_3_SMALL), logger()); +$vectorizer = new Vectorizer($platform, 'text-embedding-3-small', logger()); $indexer = new Indexer(new InMemoryLoader($documents), $vectorizer, $store, logger: logger()); $indexer->index($documents); -$model = new Gpt(Gpt::GPT_4O_MINI); - $similaritySearch = new SimilaritySearch($vectorizer, $store); $toolbox = new Toolbox([$similaritySearch], logger: logger()); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, [$processor], [$processor]); +$agent = new Agent($platform, 'gpt-4o-mini', [$processor], [$processor]); $messages = new MessageBag( Message::forSystem('Please answer all user questions only using SimilaritySearch function.'), diff --git a/examples/rag/pinecone.php b/examples/rag/pinecone.php index ee34c05e4..b2063de06 100644 --- a/examples/rag/pinecone.php +++ b/examples/rag/pinecone.php @@ -15,8 +15,6 @@ use Symfony\AI\Agent\Toolbox\Tool\SimilaritySearch; use Symfony\AI\Agent\Toolbox\Toolbox; use Symfony\AI\Fixtures\Movies; -use Symfony\AI\Platform\Bridge\OpenAi\Embeddings; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -45,16 +43,14 @@ // create embeddings for documents $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$vectorizer = new Vectorizer($platform, $embeddings = new Embeddings(Embeddings::TEXT_3_SMALL), logger()); +$vectorizer = new Vectorizer($platform, 'text-embedding-3-small', logger()); $indexer = new Indexer(new InMemoryLoader($documents), $vectorizer, $store, logger: logger()); $indexer->index($documents); -$model = new Gpt(Gpt::GPT_4O_MINI); - $similaritySearch = new SimilaritySearch($vectorizer, $store); $toolbox = new Toolbox([$similaritySearch], logger: logger()); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'gpt-4o-mini', [$processor], [$processor], logger: logger()); $messages = new MessageBag( Message::forSystem('Please answer all user questions only using SimilaritySearch function.'), diff --git a/examples/rag/postgres.php b/examples/rag/postgres.php index 8fb3ed433..1e59b6b29 100644 --- a/examples/rag/postgres.php +++ b/examples/rag/postgres.php @@ -16,8 +16,6 @@ use Symfony\AI\Agent\Toolbox\Tool\SimilaritySearch; use Symfony\AI\Agent\Toolbox\Toolbox; use Symfony\AI\Fixtures\Movies; -use Symfony\AI\Platform\Bridge\OpenAi\Embeddings; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -52,16 +50,14 @@ // create embeddings for documents $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$vectorizer = new Vectorizer($platform, $embeddings = new Embeddings(Embeddings::TEXT_3_SMALL), logger()); +$vectorizer = new Vectorizer($platform, 'text-embedding-3-small', logger()); $indexer = new Indexer(new InMemoryLoader($documents), $vectorizer, $store, logger: logger()); $indexer->index($documents); -$model = new Gpt(Gpt::GPT_4O_MINI); - $similaritySearch = new SimilaritySearch($vectorizer, $store); $toolbox = new Toolbox([$similaritySearch], logger: logger()); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'gpt-4o-mini', [$processor], [$processor], logger: logger()); $messages = new MessageBag( Message::forSystem('Please answer all user questions only using SimilaritySearch function.'), diff --git a/examples/rag/qdrant.php b/examples/rag/qdrant.php index 079caf48b..2e9a60e76 100644 --- a/examples/rag/qdrant.php +++ b/examples/rag/qdrant.php @@ -14,8 +14,6 @@ use Symfony\AI\Agent\Toolbox\Tool\SimilaritySearch; use Symfony\AI\Agent\Toolbox\Toolbox; use Symfony\AI\Fixtures\Movies; -use Symfony\AI\Platform\Bridge\OpenAi\Embeddings; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -52,16 +50,14 @@ // create embeddings for documents $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$vectorizer = new Vectorizer($platform, $embeddings = new Embeddings(Embeddings::TEXT_3_SMALL), logger()); +$vectorizer = new Vectorizer($platform, 'text-embedding-3-small', logger()); $indexer = new Indexer(new InMemoryLoader($documents), $vectorizer, $store, logger: logger()); $indexer->index($documents); -$model = new Gpt(Gpt::GPT_4O_MINI); - $similaritySearch = new SimilaritySearch($vectorizer, $store); $toolbox = new Toolbox([$similaritySearch], logger: logger()); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'gpt-4o-mini', [$processor], [$processor], logger: logger()); $messages = new MessageBag( Message::forSystem('Please answer all user questions only using SimilaritySearch function.'), diff --git a/examples/rag/surrealdb.php b/examples/rag/surrealdb.php index ae858ca46..bd52c95e5 100644 --- a/examples/rag/surrealdb.php +++ b/examples/rag/surrealdb.php @@ -14,8 +14,6 @@ use Symfony\AI\Agent\Toolbox\Tool\SimilaritySearch; use Symfony\AI\Agent\Toolbox\Toolbox; use Symfony\AI\Fixtures\Movies; -use Symfony\AI\Platform\Bridge\OpenAi\Embeddings; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -55,16 +53,14 @@ // create embeddings for documents $platform = PlatformFactory::create($_SERVER['OPENAI_API_KEY']); -$vectorizer = new Vectorizer($platform, $embeddings = new Embeddings(Embeddings::TEXT_3_SMALL), logger()); +$vectorizer = new Vectorizer($platform, 'text-embedding-3-small', logger()); $indexer = new Indexer(new InMemoryLoader($documents), $vectorizer, $store, logger: logger()); $indexer->index($documents); -$model = new Gpt(Gpt::GPT_4O_MINI); - $similaritySearch = new SimilaritySearch($vectorizer, $store); $toolbox = new Toolbox([$similaritySearch], logger: logger()); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, [$processor], [$processor]); +$agent = new Agent($platform, 'gpt-4o-mini', [$processor], [$processor]); $messages = new MessageBag( Message::forSystem('Please answer all user questions only using SimilaritySearch function.'), diff --git a/examples/rag/typesense.php b/examples/rag/typesense.php index 232949652..3778bcdc3 100644 --- a/examples/rag/typesense.php +++ b/examples/rag/typesense.php @@ -14,8 +14,6 @@ use Symfony\AI\Agent\Toolbox\Tool\SimilaritySearch; use Symfony\AI\Agent\Toolbox\Toolbox; use Symfony\AI\Fixtures\Movies; -use Symfony\AI\Platform\Bridge\OpenAi\Embeddings; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -52,16 +50,14 @@ // create embeddings for documents $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$vectorizer = new Vectorizer($platform, $embeddings = new Embeddings(Embeddings::TEXT_3_SMALL), logger()); +$vectorizer = new Vectorizer($platform, 'text-embedding-3-small', logger()); $indexer = new Indexer(new InMemoryLoader($documents), $vectorizer, $store, logger: logger()); $indexer->index($documents); -$model = new Gpt(Gpt::GPT_4O_MINI); - $similaritySearch = new SimilaritySearch($vectorizer, $store); $toolbox = new Toolbox([$similaritySearch], logger: logger()); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'gpt-4o-mini', [$processor], [$processor], logger: logger()); $messages = new MessageBag( Message::forSystem('Please answer all user questions only using SimilaritySearch function.'), diff --git a/examples/rag/weaviate.php b/examples/rag/weaviate.php index 531871fd2..8b97bb16c 100644 --- a/examples/rag/weaviate.php +++ b/examples/rag/weaviate.php @@ -15,7 +15,6 @@ use Symfony\AI\Agent\Toolbox\Toolbox; use Symfony\AI\Fixtures\Movies; use Symfony\AI\Platform\Bridge\OpenAi\Embeddings; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -52,16 +51,14 @@ // create embeddings for documents $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$vectorizer = new Vectorizer($platform, $embeddings = new Embeddings(Embeddings::TEXT_3_SMALL), logger()); +$vectorizer = new Vectorizer($platform, 'text-embedding-3-small', logger()); $indexer = new Indexer(new InMemoryLoader($documents), $vectorizer, $store, logger: logger()); $indexer->index($documents); -$model = new Gpt(Gpt::GPT_4O_MINI); - $similaritySearch = new SimilaritySearch($vectorizer, $store); $toolbox = new Toolbox([$similaritySearch], logger: logger()); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'gpt-4o-mini', [$processor], [$processor], logger: logger()); $messages = new MessageBag( Message::forSystem('Please answer all user questions only using SimilaritySearch function.'), diff --git a/examples/replicate/chat-llama.php b/examples/replicate/chat-llama.php index c9d6adfa7..4dd371472 100644 --- a/examples/replicate/chat-llama.php +++ b/examples/replicate/chat-llama.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\Meta\Llama; use Symfony\AI\Platform\Bridge\Replicate\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -17,12 +16,11 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('REPLICATE_API_KEY'), http_client()); -$model = new Llama(Llama::V3_1_405B_INSTRUCT); $messages = new MessageBag( Message::forSystem('You are a helpful assistant.'), Message::ofUser('Tina has one brother and one sister. How many sisters do Tina\'s siblings have?'), ); -$result = $platform->invoke($model, $messages); +$result = $platform->invoke('llama-3.1-405b-instruct', $messages); echo $result->getResult()->getContent().\PHP_EOL; diff --git a/examples/scaleway/chat.php b/examples/scaleway/chat.php index fdf978251..7fc559d59 100644 --- a/examples/scaleway/chat.php +++ b/examples/scaleway/chat.php @@ -10,19 +10,17 @@ */ use Symfony\AI\Platform\Bridge\Scaleway\PlatformFactory; -use Symfony\AI\Platform\Bridge\Scaleway\Scaleway; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('SCALEWAY_SECRET_KEY'), http_client()); -$model = new Scaleway(Scaleway::OPENAI_OSS); $messages = new MessageBag( Message::forSystem('You are a pirate and you write funny.'), Message::ofUser('What is the Symfony framework?'), ); -$result = $platform->invoke($model, $messages); +$result = $platform->invoke('gpt-oss-120b', $messages); echo $result->asText().\PHP_EOL; diff --git a/examples/scaleway/embeddings.php b/examples/scaleway/embeddings.php index 6f474a51f..01ed69121 100644 --- a/examples/scaleway/embeddings.php +++ b/examples/scaleway/embeddings.php @@ -9,14 +9,13 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\Scaleway\Embeddings; use Symfony\AI\Platform\Bridge\Scaleway\PlatformFactory; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('SCALEWAY_SECRET_KEY'), http_client()); -$result = $platform->invoke(new Embeddings(), <<invoke('bge-multilingual-gemma2', <<invoke($model, $messages, ['stream' => true]); +$result = $platform->invoke('gpt-oss-120b', $messages, ['stream' => true]); foreach ($result->getResult()->getContent() as $word) { echo $word; diff --git a/examples/scaleway/structured-output-math.php b/examples/scaleway/structured-output-math.php index 34ec09d4b..9bd65b0d7 100644 --- a/examples/scaleway/structured-output-math.php +++ b/examples/scaleway/structured-output-math.php @@ -13,17 +13,15 @@ use Symfony\AI\Agent\StructuredOutput\AgentProcessor; use Symfony\AI\Fixtures\StructuredOutput\MathReasoning; use Symfony\AI\Platform\Bridge\Scaleway\PlatformFactory; -use Symfony\AI\Platform\Bridge\Scaleway\Scaleway; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('SCALEWAY_SECRET_KEY'), http_client()); -$model = new Scaleway(Scaleway::OPENAI_OSS); $processor = new AgentProcessor(); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'gpt-oss-120b', [$processor], [$processor], logger: logger()); $messages = new MessageBag( Message::forSystem('You are a helpful math tutor. Guide the user through the solution step by step.'), Message::ofUser('how can I solve 8x + 7 = -23'), diff --git a/examples/scaleway/toolcall-stream.php b/examples/scaleway/toolcall-stream.php index c3a1e3047..a94eab220 100644 --- a/examples/scaleway/toolcall-stream.php +++ b/examples/scaleway/toolcall-stream.php @@ -14,19 +14,17 @@ use Symfony\AI\Agent\Toolbox\Tool\YouTubeTranscriber; use Symfony\AI\Agent\Toolbox\Toolbox; use Symfony\AI\Platform\Bridge\Scaleway\PlatformFactory; -use Symfony\AI\Platform\Bridge\Scaleway\Scaleway; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('SCALEWAY_SECRET_KEY'), http_client()); -$model = new Scaleway(Scaleway::OPENAI_OSS); $transcriber = new YouTubeTranscriber(http_client()); $toolbox = new Toolbox([$transcriber], logger: logger()); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'gpt-oss-120b', [$processor], [$processor], logger: logger()); $messages = new MessageBag(Message::ofUser('Please summarize this video for me: https://www.youtube.com/watch?v=6uXW-ulpj0s')); $result = $agent->call($messages, ['stream' => true]); diff --git a/examples/scaleway/toolcall.php b/examples/scaleway/toolcall.php index 9943198ad..0e1782274 100644 --- a/examples/scaleway/toolcall.php +++ b/examples/scaleway/toolcall.php @@ -14,19 +14,17 @@ use Symfony\AI\Agent\Toolbox\Tool\YouTubeTranscriber; use Symfony\AI\Agent\Toolbox\Toolbox; use Symfony\AI\Platform\Bridge\Scaleway\PlatformFactory; -use Symfony\AI\Platform\Bridge\Scaleway\Scaleway; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('SCALEWAY_SECRET_KEY'), http_client()); -$model = new Scaleway(Scaleway::OPENAI_OSS); $transcriber = new YouTubeTranscriber(http_client()); $toolbox = new Toolbox([$transcriber], logger: logger()); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'gpt-oss-120b', [$processor], [$processor], logger: logger()); $messages = new MessageBag(Message::ofUser('Please summarize this video for me: https://www.youtube.com/watch?v=6uXW-ulpj0s')); $result = $agent->call($messages); diff --git a/examples/scaleway/vision.php b/examples/scaleway/vision.php index 67e9188a0..378165ac6 100644 --- a/examples/scaleway/vision.php +++ b/examples/scaleway/vision.php @@ -10,7 +10,6 @@ */ use Symfony\AI\Platform\Bridge\Scaleway\PlatformFactory; -use Symfony\AI\Platform\Bridge\Scaleway\Scaleway; use Symfony\AI\Platform\Message\Content\Image; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -18,7 +17,6 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('SCALEWAY_SECRET_KEY'), http_client()); -$model = new Scaleway(Scaleway::MISTRAL_PIXTRAL); $messages = new MessageBag( Message::ofUser( @@ -26,6 +24,6 @@ Image::fromFile(dirname(__DIR__, 2).'/fixtures/image.jpg'), ), ); -$result = $platform->invoke($model, $messages); +$result = $platform->invoke('pixtral-12b-2409', $messages); -echo $result->asText().\PHP_EOL; +echo $result->getResult()->getContent().\PHP_EOL; diff --git a/examples/toolbox/brave.php b/examples/toolbox/brave.php index f0729733e..5ebc6101f 100644 --- a/examples/toolbox/brave.php +++ b/examples/toolbox/brave.php @@ -14,7 +14,6 @@ use Symfony\AI\Agent\Toolbox\Tool\Brave; use Symfony\AI\Agent\Toolbox\Tool\Crawler; use Symfony\AI\Agent\Toolbox\Toolbox; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -22,13 +21,12 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$model = new Gpt(Gpt::GPT_4O_MINI); $brave = new Brave(http_client(), env('BRAVE_API_KEY')); $crawler = new Crawler(http_client()); $toolbox = new Toolbox([$brave, $crawler], logger: logger()); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'gpt-4o-mini', [$processor], [$processor], logger: logger()); $messages = new MessageBag(Message::ofUser('What was the latest game result of Dallas Cowboys?')); $result = $agent->call($messages); diff --git a/examples/toolbox/clock.php b/examples/toolbox/clock.php index 12b886c27..a2a688026 100644 --- a/examples/toolbox/clock.php +++ b/examples/toolbox/clock.php @@ -13,7 +13,6 @@ use Symfony\AI\Agent\Toolbox\AgentProcessor; use Symfony\AI\Agent\Toolbox\Toolbox; use Symfony\AI\Agent\Toolbox\ToolFactory\MemoryToolFactory; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -22,13 +21,12 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$model = new Gpt(Gpt::GPT_4O_MINI); $metadataFactory = (new MemoryToolFactory()) ->addTool(Clock::class, 'clock', 'Get the current date and time', 'now'); $toolbox = new Toolbox([new Clock()], $metadataFactory, logger: logger()); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'gpt-4o-mini', [$processor], [$processor], logger: logger()); $messages = new MessageBag(Message::ofUser('What date and time is it?')); $result = $agent->call($messages); diff --git a/examples/toolbox/firecrawl-crawl.php b/examples/toolbox/firecrawl-crawl.php index 041a1c224..114c781e6 100644 --- a/examples/toolbox/firecrawl-crawl.php +++ b/examples/toolbox/firecrawl-crawl.php @@ -13,7 +13,6 @@ use Symfony\AI\Agent\Toolbox\AgentProcessor; use Symfony\AI\Agent\Toolbox\Tool\Firecrawl; use Symfony\AI\Agent\Toolbox\Toolbox; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -21,7 +20,6 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$model = new Gpt(Gpt::GPT_4O_MINI); $firecrawl = new Firecrawl( http_client(), @@ -32,7 +30,7 @@ $toolbox = new Toolbox([$firecrawl], logger: logger()); $toolProcessor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, inputProcessors: [$toolProcessor], outputProcessors: [$toolProcessor]); +$agent = new Agent($platform, 'gpt-4o-mini', inputProcessors: [$toolProcessor], outputProcessors: [$toolProcessor]); $messages = new MessageBag(Message::ofUser('Crawl the following URL: https://symfony.com/doc/current/setup.html then resume it in less than 200 words.')); $result = $agent->call($messages); diff --git a/examples/toolbox/firecrawl-map.php b/examples/toolbox/firecrawl-map.php index 7893e97c7..21084c8b7 100644 --- a/examples/toolbox/firecrawl-map.php +++ b/examples/toolbox/firecrawl-map.php @@ -13,7 +13,6 @@ use Symfony\AI\Agent\Toolbox\AgentProcessor; use Symfony\AI\Agent\Toolbox\Tool\Firecrawl; use Symfony\AI\Agent\Toolbox\Toolbox; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -21,7 +20,6 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$model = new Gpt(Gpt::GPT_4O_MINI); $firecrawl = new Firecrawl( http_client(), @@ -32,7 +30,7 @@ $toolbox = new Toolbox([$firecrawl], logger: logger()); $toolProcessor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, inputProcessors: [$toolProcessor], outputProcessors: [$toolProcessor]); +$agent = new Agent($platform, 'gpt-4o-mini', inputProcessors: [$toolProcessor], outputProcessors: [$toolProcessor]); $messages = new MessageBag(Message::ofUser('Retrieve all the links from https://symfony.com then list only the ones related to the Messenger component.')); $result = $agent->call($messages); diff --git a/examples/toolbox/firecrawl-scrape.php b/examples/toolbox/firecrawl-scrape.php index 81da48f59..be9565fef 100644 --- a/examples/toolbox/firecrawl-scrape.php +++ b/examples/toolbox/firecrawl-scrape.php @@ -13,7 +13,6 @@ use Symfony\AI\Agent\Toolbox\AgentProcessor; use Symfony\AI\Agent\Toolbox\Tool\Firecrawl; use Symfony\AI\Agent\Toolbox\Toolbox; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -21,7 +20,6 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$model = new Gpt(Gpt::GPT_4O_MINI); $firecrawl = new Firecrawl( http_client(), @@ -32,7 +30,7 @@ $toolbox = new Toolbox([$firecrawl], logger: logger()); $toolProcessor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, inputProcessors: [$toolProcessor], outputProcessors: [$toolProcessor]); +$agent = new Agent($platform, 'gpt-4o-mini', inputProcessors: [$toolProcessor], outputProcessors: [$toolProcessor]); $messages = new MessageBag(Message::ofUser('Scrape the following URL: https://symfony.com/doc/current/setup.html then resume it in less than 200 words.')); $result = $agent->call($messages); diff --git a/examples/toolbox/mapbox-geocode.php b/examples/toolbox/mapbox-geocode.php index 5ed906e3f..69f474aa2 100644 --- a/examples/toolbox/mapbox-geocode.php +++ b/examples/toolbox/mapbox-geocode.php @@ -13,7 +13,6 @@ use Symfony\AI\Agent\Toolbox\AgentProcessor; use Symfony\AI\Agent\Toolbox\Tool\Mapbox; use Symfony\AI\Agent\Toolbox\Toolbox; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -21,12 +20,11 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$model = new Gpt(Gpt::GPT_4O_MINI); $mapbox = new Mapbox(http_client(), env('MAPBOX_ACCESS_TOKEN')); $toolbox = new Toolbox([$mapbox], logger: logger()); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'gpt-4o-mini', [$processor], [$processor], logger: logger()); $messages = new MessageBag(Message::ofUser('What are the coordinates of Brandenburg Gate in Berlin?')); $result = $agent->call($messages); diff --git a/examples/toolbox/mapbox-reverse-geocode.php b/examples/toolbox/mapbox-reverse-geocode.php index d9f14403f..254d910e2 100644 --- a/examples/toolbox/mapbox-reverse-geocode.php +++ b/examples/toolbox/mapbox-reverse-geocode.php @@ -13,7 +13,6 @@ use Symfony\AI\Agent\Toolbox\AgentProcessor; use Symfony\AI\Agent\Toolbox\Tool\Mapbox; use Symfony\AI\Agent\Toolbox\Toolbox; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -21,12 +20,11 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$model = new Gpt(Gpt::GPT_4O_MINI); $mapbox = new Mapbox(http_client(), env('MAPBOX_ACCESS_TOKEN')); $toolbox = new Toolbox([$mapbox], logger: logger()); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'gpt-4o-mini', [$processor], [$processor], logger: logger()); $messages = new MessageBag(Message::ofUser('What address is at coordinates longitude -73.985131, latitude 40.758895?')); $result = $agent->call($messages); diff --git a/examples/toolbox/serpapi.php b/examples/toolbox/serpapi.php index 393c3b15e..a4bff0a5b 100644 --- a/examples/toolbox/serpapi.php +++ b/examples/toolbox/serpapi.php @@ -13,7 +13,6 @@ use Symfony\AI\Agent\Toolbox\AgentProcessor; use Symfony\AI\Agent\Toolbox\Tool\SerpApi; use Symfony\AI\Agent\Toolbox\Toolbox; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -21,12 +20,11 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$model = new Gpt(Gpt::GPT_4O_MINI); $serpApi = new SerpApi(http_client(), env('SERP_API_KEY')); $toolbox = new Toolbox([$serpApi], logger: logger()); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'gpt-4o-mini', [$processor], [$processor], logger: logger()); $messages = new MessageBag(Message::ofUser('Who is the current chancellor of Germany?')); $result = $agent->call($messages); diff --git a/examples/toolbox/tavily.php b/examples/toolbox/tavily.php index 6e5ec1682..0f5fb539f 100644 --- a/examples/toolbox/tavily.php +++ b/examples/toolbox/tavily.php @@ -13,7 +13,6 @@ use Symfony\AI\Agent\Toolbox\AgentProcessor; use Symfony\AI\Agent\Toolbox\Tool\Tavily; use Symfony\AI\Agent\Toolbox\Toolbox; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -21,12 +20,11 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$model = new Gpt(Gpt::GPT_4O_MINI); $tavily = new Tavily(http_client(), env('TAVILY_API_KEY')); $toolbox = new Toolbox([$tavily], logger: logger()); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'gpt-4o-mini', [$processor], [$processor], logger: logger()); $messages = new MessageBag(Message::ofUser('What was the latest game result of Dallas Cowboys?')); $result = $agent->call($messages); diff --git a/examples/toolbox/weather-event.php b/examples/toolbox/weather-event.php index fbd136f07..ecb5d5a2c 100644 --- a/examples/toolbox/weather-event.php +++ b/examples/toolbox/weather-event.php @@ -14,7 +14,6 @@ use Symfony\AI\Agent\Toolbox\Event\ToolCallsExecuted; use Symfony\AI\Agent\Toolbox\Tool\OpenMeteo; use Symfony\AI\Agent\Toolbox\Toolbox; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -24,13 +23,12 @@ require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); -$model = new Gpt(Gpt::GPT_4O_MINI); $openMeteo = new OpenMeteo(http_client()); $toolbox = new Toolbox([$openMeteo], logger: logger()); $eventDispatcher = new EventDispatcher(); $processor = new AgentProcessor($toolbox, eventDispatcher: $eventDispatcher); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'gpt-4o-mini', [$processor], [$processor], logger: logger()); // Add tool call result listener to enforce chain exits direct with structured response for weather tools $eventDispatcher->addListener(ToolCallsExecuted::class, function (ToolCallsExecuted $event): void { diff --git a/examples/transformers/text-generation.php b/examples/transformers/text-generation.php index 4c88ace7e..22a3f43dc 100644 --- a/examples/transformers/text-generation.php +++ b/examples/transformers/text-generation.php @@ -11,7 +11,6 @@ use Codewithkyrian\Transformers\Pipelines\Task; use Symfony\AI\Platform\Bridge\TransformersPhp\PlatformFactory; -use Symfony\AI\Platform\Model; require_once dirname(__DIR__).'/bootstrap.php'; @@ -26,9 +25,8 @@ } $platform = PlatformFactory::create(); -$model = new Model('Xenova/LaMini-Flan-T5-783M'); -$result = $platform->invoke($model, 'How many continents are there in the world?', [ +$result = $platform->invoke('Xenova/LaMini-Flan-T5-783M', 'How many continents are there in the world?', [ 'task' => Task::Text2TextGeneration, ]); diff --git a/examples/vertexai/audio-input.php b/examples/vertexai/audio-input.php index 8f6c8be2c..b2c26ea16 100644 --- a/examples/vertexai/audio-input.php +++ b/examples/vertexai/audio-input.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\VertexAi\Gemini\Model; use Symfony\AI\Platform\Bridge\VertexAi\PlatformFactory; use Symfony\AI\Platform\Message\Content\Audio; use Symfony\AI\Platform\Message\Message; @@ -18,7 +17,6 @@ require_once __DIR__.'/bootstrap.php'; $platform = PlatformFactory::create(env('GOOGLE_CLOUD_LOCATION'), env('GOOGLE_CLOUD_PROJECT'), adc_aware_http_client()); -$model = new Model(Model::GEMINI_2_5_FLASH); $messages = new MessageBag( Message::ofUser( @@ -26,6 +24,6 @@ Audio::fromFile(dirname(__DIR__, 2).'/fixtures/audio.mp3'), ), ); -$result = $platform->invoke($model, $messages); +$result = $platform->invoke('gemini-2.5-flash', $messages); echo $result->getResult()->getContent().\PHP_EOL; diff --git a/examples/vertexai/chat.php b/examples/vertexai/chat.php index d2a21c5e6..eace1c4ad 100644 --- a/examples/vertexai/chat.php +++ b/examples/vertexai/chat.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\VertexAi\Gemini\Model; use Symfony\AI\Platform\Bridge\VertexAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -17,12 +16,11 @@ require_once __DIR__.'/bootstrap.php'; $platform = PlatformFactory::create(env('GOOGLE_CLOUD_LOCATION'), env('GOOGLE_CLOUD_PROJECT'), adc_aware_http_client()); -$model = new Model(Model::GEMINI_2_5_FLASH); $messages = new MessageBag( Message::forSystem('You are an expert assistant in geography.'), Message::ofUser('Where is Mount Fuji?'), ); -$result = $platform->invoke($model, $messages); +$result = $platform->invoke('gemini-2.5-flash', $messages); echo $result->getResult()->getContent().\PHP_EOL; diff --git a/examples/vertexai/embeddings.php b/examples/vertexai/embeddings.php index 53ffc73eb..71187a29a 100644 --- a/examples/vertexai/embeddings.php +++ b/examples/vertexai/embeddings.php @@ -9,15 +9,13 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\VertexAi\Embeddings\Model; use Symfony\AI\Platform\Bridge\VertexAi\PlatformFactory; require_once __DIR__.'/bootstrap.php'; $platform = PlatformFactory::create(env('GOOGLE_CLOUD_LOCATION'), env('GOOGLE_CLOUD_PROJECT'), adc_aware_http_client()); -$embeddings = new Model(Model::GEMINI_EMBEDDING_001); -$result = $platform->invoke($embeddings, <<invoke('gemini-embedding-001', <<invoke($model, $messages); +$result = $platform->invoke('gemini-2.5-pro', $messages); echo $result->getResult()->getContent().\PHP_EOL; diff --git a/examples/vertexai/pdf-input-binary.php b/examples/vertexai/pdf-input-binary.php index bd0fc47ad..25ebe67ad 100644 --- a/examples/vertexai/pdf-input-binary.php +++ b/examples/vertexai/pdf-input-binary.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\VertexAi\Gemini\Model; use Symfony\AI\Platform\Bridge\VertexAi\PlatformFactory; use Symfony\AI\Platform\Message\Content\Document; use Symfony\AI\Platform\Message\Message; @@ -18,7 +17,6 @@ require_once __DIR__.'/bootstrap.php'; $platform = PlatformFactory::create(env('GOOGLE_CLOUD_LOCATION'), env('GOOGLE_CLOUD_PROJECT'), adc_aware_http_client()); -$model = new Model(Model::GEMINI_2_5_FLASH); $messages = new MessageBag( Message::ofUser( @@ -26,6 +24,6 @@ 'What is this document about?', ), ); -$result = $platform->invoke($model, $messages); +$result = $platform->invoke('gemini-2.5-flash', $messages); echo $result->getResult()->getContent().\PHP_EOL; diff --git a/examples/vertexai/server-tools.php b/examples/vertexai/server-tools.php index 782b58f58..10a2e1734 100644 --- a/examples/vertexai/server-tools.php +++ b/examples/vertexai/server-tools.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\VertexAi\Gemini\Model; use Symfony\AI\Platform\Bridge\VertexAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -18,8 +17,6 @@ $platform = PlatformFactory::create(env('GOOGLE_CLOUD_LOCATION'), env('GOOGLE_CLOUD_PROJECT'), adc_aware_http_client()); -$model = new Model(Model::GEMINI_2_5_PRO, ['server_tools' => ['url_context' => true]]); - $messages = new MessageBag( Message::ofUser( <<<'PROMPT' @@ -28,6 +25,6 @@ ), ); -$result = $platform->invoke($model, $messages); +$result = $platform->invoke('gemini-2.5-pro', $messages, ['server_tools' => ['url_context' => true]]); echo $result->getResult()->getContent().\PHP_EOL; diff --git a/examples/vertexai/stream.php b/examples/vertexai/stream.php index 16bda55d3..a292869cc 100644 --- a/examples/vertexai/stream.php +++ b/examples/vertexai/stream.php @@ -9,7 +9,6 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\VertexAi\Gemini\Model; use Symfony\AI\Platform\Bridge\VertexAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -17,14 +16,13 @@ require_once __DIR__.'/bootstrap.php'; $platform = PlatformFactory::create(env('GOOGLE_CLOUD_LOCATION'), env('GOOGLE_CLOUD_PROJECT'), adc_aware_http_client()); -$model = new Model(Model::GEMINI_2_5_FLASH); $messages = new MessageBag( Message::forSystem('You are an expert assistant in geography.'), Message::ofUser('Where is Mount Fuji?'), ); -$result = $platform->invoke($model, $messages, [ +$result = $platform->invoke('gemini-2.5-flash', $messages, [ 'stream' => true, ]); diff --git a/examples/vertexai/structured-output-clock.php b/examples/vertexai/structured-output-clock.php index 6bee34c8b..840a21596 100644 --- a/examples/vertexai/structured-output-clock.php +++ b/examples/vertexai/structured-output-clock.php @@ -14,7 +14,6 @@ use Symfony\AI\Agent\Toolbox\AgentProcessor as ToolProcessor; use Symfony\AI\Agent\Toolbox\Tool\Clock; use Symfony\AI\Agent\Toolbox\Toolbox; -use Symfony\AI\Platform\Bridge\VertexAi\Gemini\Model; use Symfony\AI\Platform\Bridge\VertexAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -23,13 +22,12 @@ require_once __DIR__.'/bootstrap.php'; $platform = PlatformFactory::create(env('GOOGLE_CLOUD_LOCATION'), env('GOOGLE_CLOUD_PROJECT'), adc_aware_http_client()); -$model = new Model(Model::GEMINI_2_5_PRO); $clock = new Clock(new SymfonyClock()); $toolbox = new Toolbox([$clock]); $toolProcessor = new ToolProcessor($toolbox); $structuredOutputProcessor = new StructuredOutputProcessor(); -$agent = new Agent($platform, $model, [$toolProcessor, $structuredOutputProcessor], [$toolProcessor, $structuredOutputProcessor], logger: logger()); +$agent = new Agent($platform, 'gemini-2.5-pro', [$toolProcessor, $structuredOutputProcessor], [$toolProcessor, $structuredOutputProcessor], logger: logger()); $messages = new MessageBag(Message::ofUser('What date and time is it?')); $result = $agent->call($messages, ['response_format' => [ diff --git a/examples/vertexai/structured-output-math.php b/examples/vertexai/structured-output-math.php index 2cb3affca..8ca97c801 100644 --- a/examples/vertexai/structured-output-math.php +++ b/examples/vertexai/structured-output-math.php @@ -12,7 +12,6 @@ use Symfony\AI\Agent\Agent; use Symfony\AI\Agent\StructuredOutput\AgentProcessor; use Symfony\AI\Fixtures\StructuredOutput\MathReasoning; -use Symfony\AI\Platform\Bridge\VertexAi\Gemini\Model; use Symfony\AI\Platform\Bridge\VertexAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -20,10 +19,9 @@ require_once __DIR__.'/bootstrap.php'; $platform = PlatformFactory::create(env('GOOGLE_CLOUD_LOCATION'), env('GOOGLE_CLOUD_PROJECT'), adc_aware_http_client()); -$model = new Model(Model::GEMINI_2_5_FLASH_LITE); $processor = new AgentProcessor(); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'gemini-2.5-flash-lite', [$processor], [$processor], logger: logger()); $messages = new MessageBag( Message::forSystem('You are a helpful math tutor. Guide the user through the solution step by step.'), Message::ofUser('how can I solve 8x + 7 = -23'), diff --git a/examples/vertexai/token-metadata.php b/examples/vertexai/token-metadata.php index 2f137b8e0..8ab02c10e 100644 --- a/examples/vertexai/token-metadata.php +++ b/examples/vertexai/token-metadata.php @@ -10,7 +10,6 @@ */ use Symfony\AI\Agent\Agent; -use Symfony\AI\Platform\Bridge\VertexAi\Gemini\Model; use Symfony\AI\Platform\Bridge\VertexAi\PlatformFactory; use Symfony\AI\Platform\Bridge\VertexAi\TokenOutputProcessor; use Symfony\AI\Platform\Message\Message; @@ -19,9 +18,8 @@ require_once __DIR__.'/bootstrap.php'; $platform = PlatformFactory::create(env('GOOGLE_CLOUD_LOCATION'), env('GOOGLE_CLOUD_PROJECT'), adc_aware_http_client()); -$model = new Model(Model::GEMINI_2_0_FLASH_LITE); -$agent = new Agent($platform, $model, outputProcessors: [new TokenOutputProcessor()], logger: logger()); +$agent = new Agent($platform, 'gemini-2.0-flash-lite', outputProcessors: [new TokenOutputProcessor()], logger: logger()); $messages = new MessageBag( Message::forSystem('You are an expert assistant in animal study.'), Message::ofUser('What does a cat usually eat?'), diff --git a/examples/vertexai/toolcall.php b/examples/vertexai/toolcall.php index 8eb98f9a2..1473bf6af 100644 --- a/examples/vertexai/toolcall.php +++ b/examples/vertexai/toolcall.php @@ -13,7 +13,6 @@ use Symfony\AI\Agent\Toolbox\AgentProcessor; use Symfony\AI\Agent\Toolbox\Tool\Clock; use Symfony\AI\Agent\Toolbox\Toolbox; -use Symfony\AI\Platform\Bridge\VertexAi\Gemini\Model; use Symfony\AI\Platform\Bridge\VertexAi\PlatformFactory; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; @@ -21,11 +20,10 @@ require_once __DIR__.'/bootstrap.php'; $platform = PlatformFactory::create(env('GOOGLE_CLOUD_LOCATION'), env('GOOGLE_CLOUD_PROJECT'), adc_aware_http_client()); -$model = new Model(Model::GEMINI_2_0_FLASH_LITE); $toolbox = new Toolbox([new Clock()], logger: logger()); $processor = new AgentProcessor($toolbox); -$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger()); +$agent = new Agent($platform, 'gemini-2.0-flash-lite', [$processor], [$processor], logger: logger()); $messages = new MessageBag(Message::ofUser('What time is it?')); $result = $agent->call($messages); diff --git a/examples/voyage/embeddings.php b/examples/voyage/embeddings.php index 289a1a100..3ea230031 100644 --- a/examples/voyage/embeddings.php +++ b/examples/voyage/embeddings.php @@ -10,14 +10,12 @@ */ use Symfony\AI\Platform\Bridge\Voyage\PlatformFactory; -use Symfony\AI\Platform\Bridge\Voyage\Voyage; require_once dirname(__DIR__).'/bootstrap.php'; $platform = PlatformFactory::create(env('VOYAGE_API_KEY'), http_client()); -$embeddings = new Voyage(Voyage::V3); -$result = $platform->invoke($embeddings, <<invoke('voyage-3', <<invoke($embeddings, [$text1, $text2, $text3]); +$result = $platform->invoke('voyage-3', [$text1, $text2, $text3]); echo 'Dimensions Text 1: '.$result->asVectors()[0]->getDimensions().\PHP_EOL; echo 'Dimensions Text 2: '.$result->asVectors()[1]->getDimensions().\PHP_EOL; diff --git a/src/agent/src/Agent.php b/src/agent/src/Agent.php index 15722c7d9..d02d17ea6 100644 --- a/src/agent/src/Agent.php +++ b/src/agent/src/Agent.php @@ -42,10 +42,11 @@ /** * @param InputProcessorInterface[] $inputProcessors * @param OutputProcessorInterface[] $outputProcessors + * @param non-empty-string $model */ public function __construct( private PlatformInterface $platform, - private Model $model, + private string $model, iterable $inputProcessors = [], iterable $outputProcessors = [], private string $name = 'agent', @@ -57,7 +58,7 @@ public function __construct( public function getModel(): Model { - return $this->model; + return $this->platform->getModelCatalog()->getModel($this->model); } public function getName(): string @@ -74,7 +75,7 @@ public function getName(): string */ public function call(MessageBag $messages, array $options = []): ResultInterface { - $input = new Input($this->model, $messages, $options); + $input = new Input($this->getModel(), $messages, $options); array_map(fn (InputProcessorInterface $processor) => $processor->processInput($input), $this->inputProcessors); $model = $input->model; @@ -90,7 +91,7 @@ public function call(MessageBag $messages, array $options = []): ResultInterface } try { - $result = $this->platform->invoke($model, $messages, $options)->getResult(); + $result = $this->platform->invoke($this->model, $messages, $options)->getResult(); } catch (ClientExceptionInterface $e) { $message = $e->getMessage(); $content = $e->getResponse()->toArray(false); diff --git a/src/agent/src/Memory/EmbeddingProvider.php b/src/agent/src/Memory/EmbeddingProvider.php index e68a1b691..4a54381c4 100644 --- a/src/agent/src/Memory/EmbeddingProvider.php +++ b/src/agent/src/Memory/EmbeddingProvider.php @@ -53,7 +53,7 @@ public function load(Input $input): array $userMessageTextContent = array_shift($userMessageTextContent); - $vectors = $this->platform->invoke($this->model, $userMessageTextContent->text)->asVectors(); + $vectors = $this->platform->invoke($this->model->getName(), $userMessageTextContent->text)->asVectors(); $foundEmbeddingContent = $this->vectorStore->query($vectors[0]); if (0 === \count($foundEmbeddingContent)) { return []; diff --git a/src/agent/tests/AgentTest.php b/src/agent/tests/AgentTest.php index 052599f7d..b0da87d27 100644 --- a/src/agent/tests/AgentTest.php +++ b/src/agent/tests/AgentTest.php @@ -30,6 +30,7 @@ use Symfony\AI\Platform\Message\MessageBag; use Symfony\AI\Platform\Message\UserMessage; use Symfony\AI\Platform\Model; +use Symfony\AI\Platform\ModelCatalog\DynamicModelCatalog; use Symfony\AI\Platform\PlatformInterface; use Symfony\AI\Platform\Result\RawResultInterface; use Symfony\AI\Platform\Result\ResultInterface; @@ -43,9 +44,8 @@ final class AgentTest extends TestCase public function testConstructorInitializesWithDefaults() { $platform = $this->createMock(PlatformInterface::class); - $model = $this->createMock(Model::class); - $agent = new Agent($platform, $model); + $agent = new Agent($platform, 'gpt-4o'); $this->assertInstanceOf(AgentInterface::class, $agent); } @@ -53,11 +53,10 @@ public function testConstructorInitializesWithDefaults() public function testConstructorInitializesWithProcessors() { $platform = $this->createMock(PlatformInterface::class); - $model = $this->createMock(Model::class); $inputProcessor = $this->createMock(InputProcessorInterface::class); $outputProcessor = $this->createMock(OutputProcessorInterface::class); - $agent = new Agent($platform, $model, [$inputProcessor], [$outputProcessor]); + $agent = new Agent($platform, 'gpt-4o', [$inputProcessor], [$outputProcessor]); $this->assertInstanceOf(AgentInterface::class, $agent); } @@ -65,7 +64,6 @@ public function testConstructorInitializesWithProcessors() public function testConstructorSetsAgentOnAgentAwareProcessors() { $platform = $this->createMock(PlatformInterface::class); - $model = $this->createMock(Model::class); $agentAwareProcessor = new class implements InputProcessorInterface, AgentAwareInterface { public ?AgentInterface $agent = null; @@ -80,7 +78,7 @@ public function setAgent(AgentInterface $agent): void } }; - $agent = new Agent($platform, $model, [$agentAwareProcessor]); + $agent = new Agent($platform, 'gpt-4o', [$agentAwareProcessor]); $this->assertSame($agent, $agentAwareProcessor->agent); } @@ -88,46 +86,51 @@ public function setAgent(AgentInterface $agent): void public function testConstructorThrowsExceptionForInvalidInputProcessor() { $platform = $this->createMock(PlatformInterface::class); - $model = $this->createMock(Model::class); $invalidProcessor = new \stdClass(); $this->expectException(InvalidArgumentException::class); $this->expectExceptionMessage(\sprintf('Processor "stdClass" must implement "%s".', InputProcessorInterface::class)); /* @phpstan-ignore-next-line */ - new Agent($platform, $model, [$invalidProcessor]); + new Agent($platform, 'gpt-4o', [$invalidProcessor]); } public function testConstructorThrowsExceptionForInvalidOutputProcessor() { $platform = $this->createMock(PlatformInterface::class); - $model = $this->createMock(Model::class); $invalidProcessor = new \stdClass(); $this->expectException(InvalidArgumentException::class); $this->expectExceptionMessage(\sprintf('Processor "stdClass" must implement "%s".', OutputProcessorInterface::class)); /* @phpstan-ignore-next-line */ - new Agent($platform, $model, [], [$invalidProcessor]); + new Agent($platform, 'gpt-4o', [], [$invalidProcessor]); } public function testAgentExposesHisModel() { $platform = $this->createMock(PlatformInterface::class); - $model = $this->createMock(Model::class); - $agent = new Agent($platform, $model); + $platform->expects($this->once()) + ->method('getModelCatalog') + ->willReturn(new DynamicModelCatalog()); + + $agent = new Agent($platform, 'gpt-4o'); - $this->assertSame($model, $agent->getModel()); + $this->assertEquals(new Model('gpt-4o', Capability::cases()), $agent->getModel()); } public function testCallProcessesInputThroughProcessors() { $platform = $this->createMock(PlatformInterface::class); - $model = $this->createMock(Model::class); + $modelName = 'gpt-4o'; $messages = new MessageBag(new UserMessage(new Text('Hello'))); $result = $this->createMock(ResultInterface::class); + $platform->expects($this->once()) + ->method('getModelCatalog') + ->willReturn(new DynamicModelCatalog()); + $inputProcessor = $this->createMock(InputProcessorInterface::class); $inputProcessor->expects($this->once()) ->method('processInput') @@ -138,10 +141,10 @@ public function testCallProcessesInputThroughProcessors() $platform->expects($this->once()) ->method('invoke') - ->with($model, $messages, []) + ->with($modelName, $messages, []) ->willReturn($response); - $agent = new Agent($platform, $model, [$inputProcessor]); + $agent = new Agent($platform, $modelName, [$inputProcessor]); $actualResult = $agent->call($messages); $this->assertSame($result, $actualResult); @@ -150,10 +153,14 @@ public function testCallProcessesInputThroughProcessors() public function testCallProcessesOutputThroughProcessors() { $platform = $this->createMock(PlatformInterface::class); - $model = $this->createMock(Model::class); + $modelName = 'gpt-4o'; $messages = new MessageBag(new UserMessage(new Text('Hello'))); $result = $this->createMock(ResultInterface::class); + $platform->expects($this->once()) + ->method('getModelCatalog') + ->willReturn(new DynamicModelCatalog()); + $outputProcessor = $this->createMock(OutputProcessorInterface::class); $outputProcessor->expects($this->once()) ->method('processOutput') @@ -164,10 +171,10 @@ public function testCallProcessesOutputThroughProcessors() $platform->expects($this->once()) ->method('invoke') - ->with($model, $messages, []) + ->with($modelName, $messages, []) ->willReturn($response); - $agent = new Agent($platform, $model, [], [$outputProcessor]); + $agent = new Agent($platform, $modelName, [], [$outputProcessor]); $actualResult = $agent->call($messages); $this->assertSame($result, $actualResult); @@ -176,57 +183,68 @@ public function testCallProcessesOutputThroughProcessors() public function testCallThrowsExceptionForAudioInputWithoutSupport() { $platform = $this->createMock(PlatformInterface::class); - $model = $this->createMock(Model::class); $messages = new MessageBag(new UserMessage(new Audio('audio-data', 'audio/mp3'))); - $model->expects($this->once()) - ->method('supports') - ->with(Capability::INPUT_AUDIO) - ->willReturn(false); + $modelCatalog = $this->createMock(\Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface::class); + $model = new Model('gpt-4', [Capability::INPUT_TEXT]); // Model without INPUT_AUDIO capability + + $modelCatalog->expects($this->once()) + ->method('getModel') + ->with('gpt-4') + ->willReturn($model); + + $platform->expects($this->once()) + ->method('getModelCatalog') + ->willReturn($modelCatalog); $this->expectException(MissingModelSupportException::class); - $agent = new Agent($platform, $model); + $agent = new Agent($platform, 'gpt-4'); $agent->call($messages); } public function testCallThrowsExceptionForImageInputWithoutSupport() { $platform = $this->createMock(PlatformInterface::class); - $model = $this->createMock(Model::class); $messages = new MessageBag(new UserMessage(new Image('image-data', 'image/png'))); - $model->expects($this->once()) - ->method('supports') - ->with(Capability::INPUT_IMAGE) - ->willReturn(false); + $modelCatalog = $this->createMock(\Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface::class); + $model = new Model('gpt-4', [Capability::INPUT_TEXT]); // Model without INPUT_IMAGE capability + + $modelCatalog->expects($this->once()) + ->method('getModel') + ->with('gpt-4') + ->willReturn($model); + + $platform->expects($this->once()) + ->method('getModelCatalog') + ->willReturn($modelCatalog); $this->expectException(MissingModelSupportException::class); - $agent = new Agent($platform, $model); + $agent = new Agent($platform, 'gpt-4'); $agent->call($messages); } public function testCallAllowsAudioInputWithSupport() { $platform = $this->createMock(PlatformInterface::class); - $model = $this->createMock(Model::class); $messages = new MessageBag(new UserMessage(new Audio('audio-data', 'audio/mp3'))); $result = $this->createMock(ResultInterface::class); - $model->expects($this->once()) - ->method('supports') - ->with(Capability::INPUT_AUDIO) - ->willReturn(true); + $platform->expects($this->once()) + ->method('getModelCatalog') + ->willReturn(new DynamicModelCatalog()); $rawResult = $this->createMock(RawResultInterface::class); $response = new ResultPromise(fn () => $result, $rawResult, []); $platform->expects($this->once()) ->method('invoke') + ->with('gpt-4', $messages, []) ->willReturn($response); - $agent = new Agent($platform, $model); + $agent = new Agent($platform, 'gpt-4'); $actualResult = $agent->call($messages); $this->assertSame($result, $actualResult); @@ -235,22 +253,22 @@ public function testCallAllowsAudioInputWithSupport() public function testCallAllowsImageInputWithSupport() { $platform = $this->createMock(PlatformInterface::class); - $model = $this->createMock(Model::class); $messages = new MessageBag(new UserMessage(new Image('image-data', 'image/png'))); $result = $this->createMock(ResultInterface::class); - $model->expects($this->once()) - ->method('supports') - ->with(Capability::INPUT_IMAGE) - ->willReturn(true); + + $platform->expects($this->once()) + ->method('getModelCatalog') + ->willReturn(new DynamicModelCatalog()); $rawResult = $this->createMock(RawResultInterface::class); $response = new ResultPromise(fn () => $result, $rawResult, []); $platform->expects($this->once()) ->method('invoke') + ->with('gpt-4', $messages, []) ->willReturn($response); - $agent = new Agent($platform, $model); + $agent = new Agent($platform, 'gpt-4'); $actualResult = $agent->call($messages); $this->assertSame($result, $actualResult); @@ -259,10 +277,13 @@ public function testCallAllowsImageInputWithSupport() public function testCallHandlesClientException() { $platform = $this->createMock(PlatformInterface::class); - $model = $this->createMock(Model::class); $messages = new MessageBag(new UserMessage(new Text('Hello'))); $logger = $this->createMock(LoggerInterface::class); + $platform->expects($this->once()) + ->method('getModelCatalog') + ->willReturn(new DynamicModelCatalog()); + $httpResponse = $this->createMock(HttpResponseInterface::class); $httpResponse->expects($this->once()) ->method('toArray') @@ -295,16 +316,19 @@ public function getResponse(): HttpResponseInterface $this->expectException(InvalidArgumentException::class); $this->expectExceptionMessage('Client error'); - $agent = new Agent($platform, $model, logger: $logger); + $agent = new Agent($platform, 'gpt-4', logger: $logger); $agent->call($messages); } public function testCallHandlesClientExceptionWithEmptyMessage() { $platform = $this->createMock(PlatformInterface::class); - $model = $this->createMock(Model::class); $messages = new MessageBag(new UserMessage(new Text('Hello'))); + $platform->expects($this->once()) + ->method('getModelCatalog') + ->willReturn(new DynamicModelCatalog()); + $httpResponse = $this->createMock(HttpResponseInterface::class); $httpResponse->expects($this->once()) ->method('toArray') @@ -333,16 +357,19 @@ public function getResponse(): HttpResponseInterface $this->expectException(InvalidArgumentException::class); $this->expectExceptionMessage('Invalid request to model or platform'); - $agent = new Agent($platform, $model); + $agent = new Agent($platform, 'gpt-4'); $agent->call($messages); } public function testCallHandlesHttpException() { $platform = $this->createMock(PlatformInterface::class); - $model = $this->createMock(Model::class); $messages = new MessageBag(new UserMessage(new Text('Hello'))); + $platform->expects($this->once()) + ->method('getModelCatalog') + ->willReturn(new DynamicModelCatalog()); + $exception = $this->createMock(HttpExceptionInterface::class); $platform->expects($this->once()) @@ -352,14 +379,13 @@ public function testCallHandlesHttpException() $this->expectException(RuntimeException::class); $this->expectExceptionMessage('Failed to request model'); - $agent = new Agent($platform, $model); + $agent = new Agent($platform, 'gpt-4'); $agent->call($messages); } public function testCallPassesOptionsToInvoke() { $platform = $this->createMock(PlatformInterface::class); - $model = $this->createMock(Model::class); $messages = new MessageBag(new UserMessage(new Text('Hello'))); $options = ['temperature' => 0.7, 'max_tokens' => 100]; $result = $this->createMock(ResultInterface::class); @@ -367,12 +393,16 @@ public function testCallPassesOptionsToInvoke() $rawResult = $this->createMock(RawResultInterface::class); $response = new ResultPromise(fn () => $result, $rawResult, []); + $platform->expects($this->once()) + ->method('getModelCatalog') + ->willReturn(new DynamicModelCatalog()); + $platform->expects($this->once()) ->method('invoke') - ->with($model, $messages, $options) + ->with('gpt-4', $messages, $options) ->willReturn($response); - $agent = new Agent($platform, $model); + $agent = new Agent($platform, 'gpt-4'); $actualResult = $agent->call($messages, $options); $this->assertSame($result, $actualResult); @@ -381,7 +411,6 @@ public function testCallPassesOptionsToInvoke() public function testConstructorAcceptsTraversableProcessors() { $platform = $this->createMock(PlatformInterface::class); - $model = $this->createMock(Model::class); $inputProcessor = $this->createMock(InputProcessorInterface::class); $outputProcessor = $this->createMock(OutputProcessorInterface::class); @@ -389,7 +418,7 @@ public function testConstructorAcceptsTraversableProcessors() $inputProcessors = new \ArrayIterator([$inputProcessor]); $outputProcessors = new \ArrayIterator([$outputProcessor]); - $agent = new Agent($platform, $model, $inputProcessors, $outputProcessors); + $agent = new Agent($platform, 'gpt-4', $inputProcessors, $outputProcessors); $this->assertInstanceOf(AgentInterface::class, $agent); } @@ -397,9 +426,8 @@ public function testConstructorAcceptsTraversableProcessors() public function testGetNameReturnsDefaultName() { $platform = $this->createMock(PlatformInterface::class); - $model = $this->createMock(Model::class); - $agent = new Agent($platform, $model); + $agent = new Agent($platform, 'gpt-4'); $this->assertSame('agent', $agent->getName()); } @@ -407,10 +435,9 @@ public function testGetNameReturnsDefaultName() public function testGetNameReturnsProvidedName() { $platform = $this->createMock(PlatformInterface::class); - $model = $this->createMock(Model::class); $name = 'test'; - $agent = new Agent($platform, $model, [], [], $name); + $agent = new Agent($platform, 'gpt-4', [], [], $name); $this->assertSame($name, $agent->getName()); } diff --git a/src/agent/tests/InputProcessor/ModelOverrideInputProcessorTest.php b/src/agent/tests/InputProcessor/ModelOverrideInputProcessorTest.php index b6b741316..8201be4ff 100644 --- a/src/agent/tests/InputProcessor/ModelOverrideInputProcessorTest.php +++ b/src/agent/tests/InputProcessor/ModelOverrideInputProcessorTest.php @@ -15,8 +15,7 @@ use Symfony\AI\Agent\Exception\InvalidArgumentException; use Symfony\AI\Agent\Input; use Symfony\AI\Agent\InputProcessor\ModelOverrideInputProcessor; -use Symfony\AI\Platform\Bridge\Anthropic\Claude; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; +use Symfony\AI\Platform\Capability; use Symfony\AI\Platform\Message\MessageBag; use Symfony\AI\Platform\Model; @@ -24,25 +23,29 @@ final class ModelOverrideInputProcessorTest extends TestCase { public function testProcessInputWithValidModelOption() { - $gpt = new Gpt(Gpt::GPT_4O); - $claude = new Claude(Claude::SONNET_37); - $input = new Input($gpt, new MessageBag(), ['model' => $claude]); + $originalModel = new Model('gpt-4o-mini', [Capability::INPUT_TEXT, Capability::OUTPUT_TEXT]); + $overrideModel = new Model('gpt-4o', [Capability::INPUT_TEXT, Capability::OUTPUT_TEXT]); + + $input = new Input($originalModel, new MessageBag(), ['model' => $overrideModel]); $processor = new ModelOverrideInputProcessor(); $processor->processInput($input); - $this->assertSame($claude, $input->model); + $this->assertSame($overrideModel, $input->model); + $this->assertSame('gpt-4o', $input->model->getName()); } public function testProcessInputWithoutModelOption() { - $gpt = new Gpt(Gpt::GPT_4O); - $input = new Input($gpt, new MessageBag()); + $originalModel = new Model('gpt-4o-mini', [Capability::INPUT_TEXT, Capability::OUTPUT_TEXT]); + + $input = new Input($originalModel, new MessageBag()); $processor = new ModelOverrideInputProcessor(); $processor->processInput($input); - $this->assertSame($gpt, $input->model); + $this->assertSame($originalModel, $input->model); + $this->assertSame('gpt-4o-mini', $input->model->getName()); } public function testProcessInputWithInvalidModelOption() @@ -50,9 +53,8 @@ public function testProcessInputWithInvalidModelOption() $this->expectException(InvalidArgumentException::class); $this->expectExceptionMessage(\sprintf('Option "model" must be an instance of "%s".', Model::class)); - $gpt = new Gpt(Gpt::GPT_4O); - $model = new MessageBag(); - $input = new Input($gpt, new MessageBag(), ['model' => $model]); + $originalModel = new Model('gpt-4o-mini', [Capability::INPUT_TEXT, Capability::OUTPUT_TEXT]); + $input = new Input($originalModel, new MessageBag(), ['model' => new MessageBag()]); $processor = new ModelOverrideInputProcessor(); $processor->processInput($input); diff --git a/src/agent/tests/InputProcessor/SystemPromptInputProcessorTest.php b/src/agent/tests/InputProcessor/SystemPromptInputProcessorTest.php index 2925757d4..1b5bcfe05 100644 --- a/src/agent/tests/InputProcessor/SystemPromptInputProcessorTest.php +++ b/src/agent/tests/InputProcessor/SystemPromptInputProcessorTest.php @@ -34,7 +34,7 @@ public function testProcessInputAddsSystemMessageWhenNoneExists() { $processor = new SystemPromptInputProcessor('This is a system prompt'); - $input = new Input(new Gpt(Gpt::GPT_4O), new MessageBag(Message::ofUser('This is a user message'))); + $input = new Input(new Gpt('gpt-4o'), new MessageBag(Message::ofUser('This is a user message'))); $processor->processInput($input); $messages = $input->messages->getMessages(); @@ -52,7 +52,7 @@ public function testProcessInputDoesNotAddSystemMessageWhenOneExists() Message::forSystem('This is already a system prompt'), Message::ofUser('This is a user message'), ); - $input = new Input(new Gpt(Gpt::GPT_4O), $messages); + $input = new Input(new Gpt('gpt-4o'), $messages); $processor->processInput($input); $messages = $input->messages->getMessages(); @@ -79,7 +79,7 @@ public function execute(ToolCall $toolCall): mixed }, ); - $input = new Input(new Gpt(Gpt::GPT_4O), new MessageBag(Message::ofUser('This is a user message'))); + $input = new Input(new Gpt('gpt-4o'), new MessageBag(Message::ofUser('This is a user message'))); $processor->processInput($input); $messages = $input->messages->getMessages(); @@ -118,7 +118,7 @@ public function execute(ToolCall $toolCall): mixed $this->getTranslator(), ); - $input = new Input(new Gpt(Gpt::GPT_4O), new MessageBag(Message::ofUser('This is a user message'))); + $input = new Input(new Gpt('gpt-4o'), new MessageBag(Message::ofUser('This is a user message'))); $processor->processInput($input); $messages = $input->messages->getMessages(); @@ -160,7 +160,7 @@ public function execute(ToolCall $toolCall): mixed }, ); - $input = new Input(new Gpt(Gpt::GPT_4O), new MessageBag(Message::ofUser('This is a user message'))); + $input = new Input(new Gpt('gpt-4o'), new MessageBag(Message::ofUser('This is a user message'))); $processor->processInput($input); $messages = $input->messages->getMessages(); @@ -183,7 +183,7 @@ public function testWithTranslatedSystemPrompt() { $processor = new SystemPromptInputProcessor(new TranslatableMessage('This is a'), null, $this->getTranslator()); - $input = new Input(new Gpt(Gpt::GPT_4O), new MessageBag(Message::ofUser('This is a user message')), []); + $input = new Input(new Gpt('gpt-4o'), new MessageBag(Message::ofUser('This is a user message')), []); $processor->processInput($input); $messages = $input->messages->getMessages(); @@ -201,7 +201,7 @@ public function testWithTranslationDomainSystemPrompt() $this->getTranslator(), ); - $input = new Input(new Gpt(Gpt::GPT_4O), new MessageBag(), []); + $input = new Input(new Gpt('gpt-4o'), new MessageBag(), []); $processor->processInput($input); $messages = $input->messages->getMessages(); diff --git a/src/agent/tests/Memory/EmbeddingProviderTest.php b/src/agent/tests/Memory/EmbeddingProviderTest.php index 7df41e65c..5efed0fff 100644 --- a/src/agent/tests/Memory/EmbeddingProviderTest.php +++ b/src/agent/tests/Memory/EmbeddingProviderTest.php @@ -99,9 +99,15 @@ public function testItIsNotCreatingMemoryWhenNoVectorsFound() $this->createStub(RawResultInterface::class), ); + $embeddingModel = $this->createMock(Model::class); + $embeddingModel->expects($this->once()) + ->method('getName') + ->willReturn('text-embedding-3-small'); + $platform = $this->createMock(PlatformInterface::class); $platform->expects($this->once()) ->method('invoke') + ->with('text-embedding-3-small', 'Have we talked about the weather?') ->willReturn($resultPromise); $store = $this->createMock(StoreInterface::class); @@ -112,7 +118,7 @@ public function testItIsNotCreatingMemoryWhenNoVectorsFound() $embeddingProvider = new EmbeddingProvider( $platform, - $this->createStub(Model::class), + $embeddingModel, $store, ); @@ -133,9 +139,15 @@ public function testItIsCreatingMemoryWithFoundVectors() $this->createStub(RawResultInterface::class), ); + $embeddingModel = $this->createMock(Model::class); + $embeddingModel->expects($this->once()) + ->method('getName') + ->willReturn('text-embedding-3-small'); + $platform = $this->createMock(PlatformInterface::class); $platform->expects($this->once()) ->method('invoke') + ->with('text-embedding-3-small', 'Have we talked about the weather?') ->willReturn($resultPromise); $store = $this->createMock(StoreInterface::class); @@ -149,7 +161,7 @@ public function testItIsCreatingMemoryWithFoundVectors() $embeddingProvider = new EmbeddingProvider( $platform, - $this->createStub(Model::class), + $embeddingModel, $store, ); diff --git a/src/ai-bundle/config/options.php b/src/ai-bundle/config/options.php index b17cfe0ce..172b887cb 100644 --- a/src/ai-bundle/config/options.php +++ b/src/ai-bundle/config/options.php @@ -15,10 +15,11 @@ use MongoDB\Client as MongoDbClient; use Probots\Pinecone\Client as PineconeClient; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; -use Symfony\AI\Platform\Model; +use Symfony\AI\Platform\Capability; use Symfony\AI\Platform\PlatformInterface; use Symfony\AI\Store\Document\VectorizerInterface; use Symfony\AI\Store\StoreInterface; +use Symfony\Component\Config\Definition\Exception\InvalidConfigurationException; use Symfony\Contracts\Translation\TranslatorInterface; return static function (DefinitionConfigurator $configurator): void { @@ -176,6 +177,32 @@ ->end() ->end() ->end() + ->arrayNode('model') + ->useAttributeAsKey('platform') + ->arrayPrototype() + ->useAttributeAsKey('model_name') + ->normalizeKeys(false) + ->validate() + ->ifEmpty() + ->thenInvalid('Model name cannot be empty.') + ->end() + ->arrayPrototype() + ->children() + ->arrayNode('capabilities') + ->info('Array of capabilities that this model supports') + ->enumPrototype(Capability::class) + ->enumFqcn(Capability::class) + ->end() + ->defaultValue([]) + ->validate() + ->ifEmpty() + ->thenInvalid('At least one capability must be specified for each model.') + ->end() + ->end() + ->end() + ->end() + ->end() + ->end() ->arrayNode('agent') ->useAttributeAsKey('name') ->arrayPrototype() @@ -188,27 +215,60 @@ ->info('Enable tracking of token usage for the agent') ->defaultTrue() ->end() - ->arrayNode('model') - ->children() - ->stringNode('class') - ->isRequired() - ->validate() - ->ifTrue(function ($v) { - return !is_a($v, Model::class, true); - }) - ->thenInvalid(\sprintf('The model class "%%s" must extend %s.', Model::class)) - ->end() - ->end() - ->stringNode('name')->isRequired()->end() - ->arrayNode('options') - ->variablePrototype()->end() - ->end() + ->variableNode('model') + ->validate() + ->ifTrue(function ($v) { + return !\is_string($v) && (!\is_array($v) || !isset($v['name'])); + }) + ->thenInvalid('Model must be a string or an array with a "name" key.') ->end() ->validate() ->ifTrue(function ($v) { - return isset($v['name']) && str_contains($v['name'], '?') && !empty($v['options']); + // Check if both query parameters and options array are provided + if (\is_array($v) && isset($v['name']) && isset($v['options']) && [] !== $v['options']) { + return str_contains($v['name'], '?'); + } + + return false; + }) + ->thenInvalid('Cannot use both query parameters in model name and options array.') + ->end() + ->beforeNormalization() + ->always(function ($v) { + if (\is_string($v)) { + return $v; + } + + // It's an array with 'name' and optionally 'options' + $model = $v['name']; + $options = $v['options'] ?? []; + + // Parse query parameters from model name if present + if (str_contains($model, '?')) { + $parsed = parse_url($model); + $model = $parsed['path'] ?? ''; + + if ('' === $model) { + throw new InvalidConfigurationException('Model name cannot be empty.'); + } + + if (isset($parsed['query'])) { + // If options array is also provided, throw an error + if ([] !== $options) { + throw new InvalidConfigurationException('Cannot use both query parameters in model name and options array.'); + } + parse_str($parsed['query'], $existingOptions); + $options = $existingOptions; + } + } + + // Return model string with options as query parameters + if ([] === $options) { + return $model; + } + + return $model.'?'.http_build_query($options); }) - ->thenInvalid('Cannot specify both query parameters in model name and options array. Use either "model.name" with query parameters (e.g., "gpt-4o-mini?temperature=0.5") or separate "model.name" and "model.options".') ->end() ->end() ->booleanNode('structured_output')->defaultTrue()->end() @@ -535,27 +595,60 @@ ->info('Service name of platform') ->defaultValue(PlatformInterface::class) ->end() - ->arrayNode('model') - ->children() - ->stringNode('class') - ->isRequired() - ->validate() - ->ifTrue(function ($v) { - return !is_a($v, Model::class, true); - }) - ->thenInvalid(\sprintf('The model class "%%s" must extend %s.', Model::class)) - ->end() - ->end() - ->stringNode('name')->isRequired()->end() - ->arrayNode('options') - ->variablePrototype()->end() - ->end() + ->variableNode('model') + ->validate() + ->ifTrue(function ($v) { + return !\is_string($v) && (!\is_array($v) || !isset($v['name'])); + }) + ->thenInvalid('Model must be a string or an array with a "name" key.') ->end() ->validate() ->ifTrue(function ($v) { - return isset($v['name']) && str_contains($v['name'], '?') && !empty($v['options']); + // Check if both query parameters and options array are provided + if (\is_array($v) && isset($v['name']) && isset($v['options']) && [] !== $v['options']) { + return str_contains($v['name'], '?'); + } + + return false; + }) + ->thenInvalid('Cannot use both query parameters in model name and options array.') + ->end() + ->beforeNormalization() + ->always(function ($v) { + if (\is_string($v)) { + return $v; + } + + // It's an array with 'name' and optionally 'options' + $model = $v['name']; + $options = $v['options'] ?? []; + + // Parse query parameters from model name if present + if (str_contains($model, '?')) { + $parsed = parse_url($model); + $model = $parsed['path'] ?? ''; + + if ('' === $model) { + throw new InvalidConfigurationException('Model name cannot be empty.'); + } + + if (isset($parsed['query'])) { + // If options array is also provided, throw an error + if ([] !== $options) { + throw new InvalidConfigurationException('Cannot use both query parameters in model name and options array.'); + } + parse_str($parsed['query'], $existingOptions); + $options = $existingOptions; + } + } + + // Return model string with options as query parameters + if ([] === $options) { + return $model; + } + + return $model.'?'.http_build_query($options); }) - ->thenInvalid('Cannot specify both query parameters in model name and options array. Use either "model.name" with query parameters (e.g., "gpt-4o-mini?temperature=0.5") or separate "model.name" and "model.options".') ->end() ->end() ->end() diff --git a/src/ai-bundle/config/services.php b/src/ai-bundle/config/services.php index b1608d83a..2f4aa5e2c 100644 --- a/src/ai-bundle/config/services.php +++ b/src/ai-bundle/config/services.php @@ -25,19 +25,38 @@ use Symfony\AI\AiBundle\Profiler\DataCollector; use Symfony\AI\AiBundle\Profiler\TraceableToolbox; use Symfony\AI\AiBundle\Security\EventListener\IsGrantedToolAttributeListener; +use Symfony\AI\Platform\Bridge\AiMlApi\ModelCatalog as AiMlApiModelCatalog; use Symfony\AI\Platform\Bridge\Anthropic\Contract\AnthropicContract; +use Symfony\AI\Platform\Bridge\Anthropic\ModelCatalog as AnthropicModelCatalog; use Symfony\AI\Platform\Bridge\Anthropic\TokenOutputProcessor as AnthropicTokenOutputProcessor; +use Symfony\AI\Platform\Bridge\Azure\Meta\ModelCatalog as AzureMetaModelCatalog; +use Symfony\AI\Platform\Bridge\Azure\OpenAi\ModelCatalog as AzureOpenAiModelCatalog; +use Symfony\AI\Platform\Bridge\Cerebras\ModelCatalog as CerebrasModelCatalog; +use Symfony\AI\Platform\Bridge\DockerModelRunner\ModelCatalog as DockerModelRunnerModelCatalog; +use Symfony\AI\Platform\Bridge\ElevenLabs\ModelCatalog as ElevenLabsModelCatalog; use Symfony\AI\Platform\Bridge\Gemini\Contract\GeminiContract; +use Symfony\AI\Platform\Bridge\Gemini\ModelCatalog as GeminiModelCatalog; use Symfony\AI\Platform\Bridge\Gemini\TokenOutputProcessor as GeminiTokenOutputProcessor; +use Symfony\AI\Platform\Bridge\HuggingFace\ModelCatalog as HuggingFaceModelCatalog; +use Symfony\AI\Platform\Bridge\LmStudio\ModelCatalog as LmStudioModelCatalog; +use Symfony\AI\Platform\Bridge\Mistral\ModelCatalog as MistralModelCatalog; use Symfony\AI\Platform\Bridge\Mistral\TokenOutputProcessor as MistralTokenOutputProcessor; use Symfony\AI\Platform\Bridge\Ollama\Contract\OllamaContract; +use Symfony\AI\Platform\Bridge\Ollama\ModelCatalog as OllamaModelCatalog; use Symfony\AI\Platform\Bridge\OpenAi\Contract\OpenAiContract; +use Symfony\AI\Platform\Bridge\OpenAi\ModelCatalog as OpenAiModelCatalog; use Symfony\AI\Platform\Bridge\OpenAi\TokenOutputProcessor as OpenAiTokenOutputProcessor; +use Symfony\AI\Platform\Bridge\OpenRouter\ModelCatalog as OpenRouterModelCatalog; use Symfony\AI\Platform\Bridge\Perplexity\Contract\PerplexityContract; +use Symfony\AI\Platform\Bridge\Perplexity\ModelCatalog as PerplexityModelCatalog; use Symfony\AI\Platform\Bridge\Perplexity\SearchResultProcessor as PerplexitySearchResultProcessor; use Symfony\AI\Platform\Bridge\Perplexity\TokenOutputProcessor as PerplexityTokenOutputProcessor; +use Symfony\AI\Platform\Bridge\Replicate\ModelCatalog as ReplicateModelCatalog; +use Symfony\AI\Platform\Bridge\Scaleway\ModelCatalog as ScalewayModelCatalog; use Symfony\AI\Platform\Bridge\VertexAi\Contract\GeminiContract as VertexAiGeminiContract; +use Symfony\AI\Platform\Bridge\VertexAi\ModelCatalog as VertexAiModelCatalog; use Symfony\AI\Platform\Bridge\VertexAi\TokenOutputProcessor as VertexAiTokenOutputProcessor; +use Symfony\AI\Platform\Bridge\Voyage\ModelCatalog as VoyageModelCatalog; use Symfony\AI\Platform\Contract; use Symfony\AI\Platform\Contract\JsonSchema\DescriptionParser; use Symfony\AI\Platform\Contract\JsonSchema\Factory as SchemaFactory; @@ -47,6 +66,7 @@ return static function (ContainerConfigurator $container): void { $container->services() + // contract ->set('ai.platform.contract.default', Contract::class) ->factory([Contract::class, 'create']) ->set('ai.platform.contract.openai', Contract::class) @@ -61,6 +81,28 @@ ->factory([OllamaContract::class, 'create']) ->set('ai.platform.contract.perplexity', Contract::class) ->factory([PerplexityContract::class, 'create']) + + // model catalog + ->set('ai.platform.model_catalog.aimlapi', AiMlApiModelCatalog::class) + ->set('ai.platform.model_catalog.anthropic', AnthropicModelCatalog::class) + ->set('ai.platform.model_catalog.azure.meta', AzureMetaModelCatalog::class) + ->set('ai.platform.model_catalog.azure.openai', AzureOpenAiModelCatalog::class) + ->set('ai.platform.model_catalog.cerebras', CerebrasModelCatalog::class) + ->set('ai.platform.model_catalog.dockermodelrunner', DockerModelRunnerModelCatalog::class) + ->set('ai.platform.model_catalog.elevenlabs', ElevenLabsModelCatalog::class) + ->set('ai.platform.model_catalog.gemini', GeminiModelCatalog::class) + ->set('ai.platform.model_catalog.huggingface', HuggingFaceModelCatalog::class) + ->set('ai.platform.model_catalog.lmstudio', LmStudioModelCatalog::class) + ->set('ai.platform.model_catalog.mistral', MistralModelCatalog::class) + ->set('ai.platform.model_catalog.ollama', OllamaModelCatalog::class) + ->set('ai.platform.model_catalog.openai', OpenAiModelCatalog::class) + ->set('ai.platform.model_catalog.openrouter', OpenRouterModelCatalog::class) + ->set('ai.platform.model_catalog.perplexity', PerplexityModelCatalog::class) + ->set('ai.platform.model_catalog.replicate', ReplicateModelCatalog::class) + ->set('ai.platform.model_catalog.scaleway', ScalewayModelCatalog::class) + ->set('ai.platform.model_catalog.vertexai.gemini', VertexAiModelCatalog::class) + ->set('ai.platform.model_catalog.voyage', VoyageModelCatalog::class) + // structured output ->set('ai.agent.response_format_factory', ResponseFormatFactory::class) ->args([ diff --git a/src/ai-bundle/src/AiBundle.php b/src/ai-bundle/src/AiBundle.php index 7ecbf1208..c5d74a29c 100644 --- a/src/ai-bundle/src/AiBundle.php +++ b/src/ai-bundle/src/AiBundle.php @@ -48,7 +48,6 @@ use Symfony\AI\Platform\Bridge\VertexAi\PlatformFactory as VertexAiPlatformFactory; use Symfony\AI\Platform\Bridge\Voyage\PlatformFactory as VoyagePlatformFactory; use Symfony\AI\Platform\Exception\RuntimeException; -use Symfony\AI\Platform\Model; use Symfony\AI\Platform\ModelClientInterface; use Symfony\AI\Platform\Platform; use Symfony\AI\Platform\PlatformInterface; @@ -226,6 +225,7 @@ private function processPlatformConfig(string $type, array $platform, ContainerB $platform['api_key'], new Reference($platform['http_client'], ContainerInterface::NULL_ON_INVALID_REFERENCE), new Reference('ai.platform.contract.anthropic'), + new Reference('ai.platform.model_catalog.anthropic'), ]) ->addTag('ai.platform'); @@ -248,6 +248,7 @@ private function processPlatformConfig(string $type, array $platform, ContainerB $config['api_key'], new Reference($config['http_client'], ContainerInterface::NULL_ON_INVALID_REFERENCE), new Reference('ai.platform.contract.openai'), + new Reference('ai.platform.model_catalog.azure.openai'), ]) ->addTag('ai.platform'); @@ -268,6 +269,7 @@ private function processPlatformConfig(string $type, array $platform, ContainerB $platform['host'], new Reference($platform['http_client'], ContainerInterface::NULL_ON_INVALID_REFERENCE), new Reference('ai.platform.contract.default'), + new Reference('ai.platform.model_catalog.elevenlabs'), ]) ->addTag('ai.platform'); @@ -286,6 +288,7 @@ private function processPlatformConfig(string $type, array $platform, ContainerB $platform['api_key'], new Reference($platform['http_client'], ContainerInterface::NULL_ON_INVALID_REFERENCE), new Reference('ai.platform.contract.google'), + new Reference('ai.platform.model_catalog.gemini'), ]) ->addTag('ai.platform'); @@ -324,7 +327,8 @@ private function processPlatformConfig(string $type, array $platform, ContainerB $platform['location'], $platform['project_id'], $httpClient, - new Reference('ai.platform.contract.vertexai.gemini', ContainerInterface::NULL_ON_INVALID_REFERENCE), + new Reference('ai.platform.contract.vertexai', ContainerInterface::NULL_ON_INVALID_REFERENCE), + new Reference('ai.platform.model_catalog.vertexai.gemini'), ]) ->addTag('ai.platform'); @@ -362,6 +366,7 @@ private function processPlatformConfig(string $type, array $platform, ContainerB $platform['api_key'], new Reference($platform['http_client'], ContainerInterface::NULL_ON_INVALID_REFERENCE), new Reference('ai.platform.contract.default'), + new Reference('ai.platform.model_catalog.openrouter'), ]) ->addTag('ai.platform'); @@ -380,6 +385,7 @@ private function processPlatformConfig(string $type, array $platform, ContainerB $platform['api_key'], new Reference($platform['http_client'], ContainerInterface::NULL_ON_INVALID_REFERENCE), new Reference('ai.platform.contract.default'), + new Reference('ai.platform.model_catalog.mistral'), ]) ->addTag('ai.platform'); @@ -398,6 +404,7 @@ private function processPlatformConfig(string $type, array $platform, ContainerB $platform['host_url'], new Reference($platform['http_client'], ContainerInterface::NULL_ON_INVALID_REFERENCE), new Reference('ai.platform.contract.default'), + new Reference('ai.platform.model_catalog.lmstudio'), ]) ->addTag('ai.platform'); @@ -416,6 +423,7 @@ private function processPlatformConfig(string $type, array $platform, ContainerB $platform['host_url'], new Reference($platform['http_client'], ContainerInterface::NULL_ON_INVALID_REFERENCE), new Reference('ai.platform.contract.ollama'), + new Reference('ai.platform.model_catalog.ollama'), ]) ->addTag('ai.platform'); @@ -433,6 +441,7 @@ private function processPlatformConfig(string $type, array $platform, ContainerB ->setArguments([ $platform['api_key'], new Reference($platform['http_client'], ContainerInterface::NULL_ON_INVALID_REFERENCE), + new Reference('ai.platform.model_catalog.cerebras'), ]) ->addTag('ai.platform'); @@ -450,6 +459,7 @@ private function processPlatformConfig(string $type, array $platform, ContainerB ->setArguments([ $platform['api_key'], new Reference($platform['http_client'], ContainerInterface::NULL_ON_INVALID_REFERENCE), + new Reference('ai.platform.model_catalog.voyage'), ]) ->addTag('ai.platform'); @@ -486,6 +496,7 @@ private function processPlatformConfig(string $type, array $platform, ContainerB $platform['host_url'], new Reference($platform['http_client'], ContainerInterface::NULL_ON_INVALID_REFERENCE), new Reference('ai.platform.contract.default'), + new Reference('ai.platform.model_catalog.dockermodelrunner'), ]) ->addTag('ai.platform'); @@ -503,6 +514,8 @@ private function processPlatformConfig(string $type, array $platform, ContainerB ->setArguments([ $platform['api_key'], new Reference('http_client', ContainerInterface::NULL_ON_INVALID_REFERENCE), + new Reference('ai.platform.contract.default'), + new Reference('ai.platform.model_catalog.scaleway'), ]) ->addTag('ai.platform'); @@ -519,33 +532,12 @@ private function processPlatformConfig(string $type, array $platform, ContainerB */ private function processAgentConfig(string $name, array $config, ContainerBuilder $container): void { - // MODEL - ['class' => $modelClass, 'name' => $modelName, 'options' => $options] = $config['model']; - - // Parse query parameters from model name if present - if (str_contains((string) $modelName, '?')) { - $parsed = parse_url($modelName); - $modelName = $parsed['path'] ?? ''; - - if (isset($parsed['query'])) { - parse_str($parsed['query'], $options); - } - } - - $modelDefinition = new Definition($modelClass); - $modelDefinition->setArgument(0, $modelName); - if ([] !== $options) { - $modelDefinition->setArgument(1, $options); - } - $modelDefinition->addTag('ai.model.language_model'); - $container->setDefinition('ai.agent.'.$name.'.model', $modelDefinition); - // AGENT $agentId = 'ai.agent.'.$name; $agentDefinition = (new Definition(Agent::class)) ->addTag('ai.agent', ['name' => $name]) ->setArgument(0, new Reference($config['platform'])) - ->setArgument(1, new Reference('ai.agent.'.$name.'.model')); + ->setArgument(1, $config['model']); // TOOL & PROCESSOR if ($config['tools']['enabled']) { @@ -1154,30 +1146,9 @@ private function processStoreConfig(string $type, array $stores, ContainerBuilde */ private function processVectorizerConfig(string $name, array $config, ContainerBuilder $container): void { - ['class' => $modelClass, 'name' => $modelName, 'options' => $options] = $config['model']; - - // Parse query parameters from model name if present - if (str_contains((string) $modelName, '?')) { - $parsed = parse_url($modelName); - $modelName = $parsed['path'] ?? ''; - - if (isset($parsed['query'])) { - parse_str($parsed['query'], $options); - } - } - - $modelDefinition = (new Definition((string) $modelClass)); - $modelDefinition->setArgument(0, $modelName); - if ([] !== $options) { - $modelDefinition->setArgument(1, $options); - } - - $modelDefinition->addTag('ai.model.embeddings_model'); - $container->setDefinition('ai.vectorizer.'.$name.'.model', $modelDefinition); - $vectorizerDefinition = new Definition(Vectorizer::class, [ new Reference($config['platform']), - new Reference('ai.vectorizer.'.$name.'.model'), + $config['model'], new Reference('logger', ContainerInterface::IGNORE_ON_INVALID_REFERENCE), ]); $vectorizerDefinition->addTag('ai.vectorizer', ['name' => $name]); diff --git a/src/ai-bundle/src/Profiler/TraceablePlatform.php b/src/ai-bundle/src/Profiler/TraceablePlatform.php index f595d45e3..4d49f4dae 100644 --- a/src/ai-bundle/src/Profiler/TraceablePlatform.php +++ b/src/ai-bundle/src/Profiler/TraceablePlatform.php @@ -13,6 +13,7 @@ use Symfony\AI\Platform\Message\Content\File; use Symfony\AI\Platform\Model; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; use Symfony\AI\Platform\PlatformInterface; use Symfony\AI\Platform\Result\ResultInterface; use Symfony\AI\Platform\Result\ResultPromise; @@ -45,7 +46,7 @@ public function __construct( $this->resultCache = new \WeakMap(); } - public function invoke(Model $model, array|string|object $input, array $options = []): ResultPromise + public function invoke(string $model, array|string|object $input, array $options = []): ResultPromise { $resultPromise = $this->platform->invoke($model, $input, $options); @@ -68,6 +69,11 @@ public function invoke(Model $model, array|string|object $input, array $options return $resultPromise; } + public function getModelCatalog(): ModelCatalogInterface + { + return $this->platform->getModelCatalog(); + } + private function createTraceableStreamResult(\Generator $originalStream): StreamResult { return $result = new StreamResult((function () use (&$result, $originalStream) { diff --git a/src/ai-bundle/tests/DependencyInjection/AiBundleTest.php b/src/ai-bundle/tests/DependencyInjection/AiBundleTest.php index 2d23c09cc..ae0c2b1df 100644 --- a/src/ai-bundle/tests/DependencyInjection/AiBundleTest.php +++ b/src/ai-bundle/tests/DependencyInjection/AiBundleTest.php @@ -19,10 +19,6 @@ use Symfony\AI\Agent\Memory\MemoryInputProcessor; use Symfony\AI\Agent\Memory\StaticMemoryProvider; use Symfony\AI\AiBundle\AiBundle; -use Symfony\AI\Platform\Bridge\Anthropic\Claude; -use Symfony\AI\Platform\Bridge\Mistral\Embeddings as MistralEmbeddings; -use Symfony\AI\Platform\Bridge\OpenAi\Embeddings; -use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Store\Document\Filter\TextContainsFilter; use Symfony\AI\Store\Document\Loader\InMemoryLoader; use Symfony\AI\Store\Document\Transformer\TextTrimTransformer; @@ -48,7 +44,7 @@ public function testStoreCommandsArentDefinedWithoutStore() 'ai' => [ 'agent' => [ 'my_agent' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', ], ], ], @@ -85,7 +81,7 @@ public function testInjectionAgentAliasIsRegistered() 'ai' => [ 'agent' => [ 'my_agent' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', ], ], ], @@ -132,7 +128,7 @@ public function testAgentHasTag() 'ai' => [ 'agent' => [ 'my_agent' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', ], ], ], @@ -147,7 +143,7 @@ public function testAgentNameIsSetFromConfigKey() 'ai' => [ 'agent' => [ 'my_custom_agent' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', ], ], ], @@ -176,7 +172,7 @@ public function testFaultTolerantAgentSpecificToolbox(bool $enabled) 'ai' => [ 'agent' => [ 'my_agent' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', 'tools' => [ ['service' => 'some_service', 'description' => 'Some tool'], ], @@ -197,7 +193,7 @@ public function testFaultTolerantDefaultToolbox(bool $enabled) 'ai' => [ 'agent' => [ 'my_agent' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', 'tools' => true, 'fault_tolerant_toolbox' => $enabled, ], @@ -214,7 +210,7 @@ public function testAgentsCanBeRegisteredAsTools() 'ai' => [ 'agent' => [ 'main_agent' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', 'tools' => [ ['agent' => 'another_agent', 'description' => 'Agent tool with implicit name'], ['agent' => 'another_agent', 'name' => 'another_agent_instance', 'description' => 'Agent tool with explicit name'], @@ -235,7 +231,7 @@ public function testAgentsAsToolsCannotDefineService() 'ai' => [ 'agent' => [ 'main_agent' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', 'tools' => [['agent' => 'another_agent', 'service' => 'foo_bar', 'description' => 'Agent with service']], ], ], @@ -385,7 +381,7 @@ public function testConfigurationWithUseAttributeAsKeyWorksWithoutNormalizeKeys( ], 'agent' => [ 'My-Agent_Name.v2' => [ // Mixed case and special chars in key - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', ], ], 'store' => [ @@ -417,7 +413,7 @@ public function testProcessorTagsUseFullAgentId() 'ai' => [ 'agent' => [ 'test_agent' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', 'tools' => [ ['service' => 'some_tool', 'description' => 'Test tool'], ], @@ -469,14 +465,14 @@ public function testMultipleAgentsWithProcessors() 'ai' => [ 'agent' => [ 'first_agent' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', 'tools' => [ ['service' => 'tool_one', 'description' => 'Tool for first agent'], ], 'prompt' => 'First agent prompt', ], 'second_agent' => [ - 'model' => ['class' => Claude::class, 'name' => 'claude-3-opus-20240229'], + 'model' => 'claude-3-opus-20240229', 'tools' => [ ['service' => 'tool_two', 'description' => 'Tool for second agent'], ], @@ -520,7 +516,7 @@ public function testDefaultToolboxProcessorTags() 'ai' => [ 'agent' => [ 'agent_with_default_toolbox' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', 'tools' => true, ], ], @@ -569,7 +565,7 @@ public function testTokenUsageProcessorTags() 'agent' => [ 'tracked_agent' => [ 'platform' => 'ai.platform.openai', - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', 'track_token_usage' => true, ], ], @@ -690,7 +686,7 @@ public function testSystemPromptWithArrayStructure() 'ai' => [ 'agent' => [ 'test_agent' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', 'prompt' => [ 'text' => 'You are a helpful assistant.', 'enable_translation' => true, @@ -719,7 +715,7 @@ public function testSystemPromptWithIncludeToolsEnabled() 'ai' => [ 'agent' => [ 'test_agent' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', 'prompt' => [ 'text' => 'You are a helpful assistant.', 'include_tools' => true, @@ -748,7 +744,7 @@ public function testSystemPromptWithOnlyTextKey() 'ai' => [ 'agent' => [ 'test_agent' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', 'prompt' => [ 'text' => 'You are a helpful assistant.', ], @@ -775,7 +771,7 @@ public function testAgentWithoutSystemPrompt() 'ai' => [ 'agent' => [ 'test_agent' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', ], ], ], @@ -792,7 +788,7 @@ public function testValidSystemPromptCreatesProcessor() 'ai' => [ 'agent' => [ 'test_agent' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', 'prompt' => [ 'text' => 'Valid prompt', 'include_tools' => true, @@ -824,7 +820,7 @@ public function testEmptyTextInArrayThrowsException() 'ai' => [ 'agent' => [ 'test_agent' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', 'prompt' => [ 'text' => '', ], @@ -844,7 +840,7 @@ public function testSystemPromptArrayWithoutTextKeyThrowsException() 'ai' => [ 'agent' => [ 'test_agent' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', 'prompt' => [ 'include_tools' => true, ], @@ -861,7 +857,7 @@ public function testSystemPromptWithStringFormat() 'ai' => [ 'agent' => [ 'test_agent' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', 'prompt' => 'You are a helpful assistant.', 'tools' => [ ['service' => 'some_tool', 'description' => 'Test tool'], @@ -886,7 +882,7 @@ public function testMemoryProviderConfiguration() 'ai' => [ 'agent' => [ 'test_agent' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', 'memory' => 'Static memory for testing', 'prompt' => [ 'text' => 'You are a helpful assistant.', @@ -921,7 +917,7 @@ public function testAgentWithoutMemoryConfiguration() 'ai' => [ 'agent' => [ 'test_agent' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', 'prompt' => [ 'text' => 'You are a helpful assistant.', ], @@ -940,7 +936,7 @@ public function testMemoryWithNullValueDoesNotCreateProcessor() 'ai' => [ 'agent' => [ 'test_agent' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', 'memory' => null, 'prompt' => [ 'text' => 'You are a helpful assistant.', @@ -960,7 +956,7 @@ public function testMemoryWithSystemPromptAndTools() 'ai' => [ 'agent' => [ 'test_agent' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', 'memory' => 'conversation_memory_service', 'prompt' => [ 'text' => 'You are a helpful assistant.', @@ -1003,7 +999,7 @@ public function testMemoryWithStringPromptFormat() 'ai' => [ 'agent' => [ 'test_agent' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', 'prompt' => 'You are a helpful assistant.', // memory cannot be configured with string format ], @@ -1025,20 +1021,20 @@ public function testMultipleAgentsWithDifferentMemoryConfigurations() 'ai' => [ 'agent' => [ 'agent_with_memory' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', 'memory' => 'first_memory_service', 'prompt' => [ 'text' => 'Agent with memory.', ], ], 'agent_without_memory' => [ - 'model' => ['class' => Claude::class, 'name' => 'claude-3-opus-20240229'], + 'model' => 'claude-3-opus-20240229', 'prompt' => [ 'text' => 'Agent without memory.', ], ], 'agent_with_different_memory' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', 'memory' => 'second_memory_service', 'prompt' => [ 'text' => 'Agent with different memory.', @@ -1080,7 +1076,7 @@ public function testMemoryProcessorUsesCorrectClass() 'ai' => [ 'agent' => [ 'test_agent' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', 'memory' => 'my_memory_service', 'prompt' => [ 'text' => 'You are a helpful assistant.', @@ -1118,7 +1114,7 @@ public function testEmptyStringMemoryConfigurationThrowsException() 'ai' => [ 'agent' => [ 'test_agent' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', 'memory' => '', 'prompt' => [ 'text' => 'Test prompt', @@ -1139,7 +1135,7 @@ public function testMemoryArrayConfigurationWithoutServiceKeyThrowsException() 'ai' => [ 'agent' => [ 'test_agent' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', 'memory' => ['invalid' => 'value'], 'prompt' => [ 'text' => 'Test prompt', @@ -1160,7 +1156,7 @@ public function testMemoryArrayConfigurationWithEmptyServiceThrowsException() 'ai' => [ 'agent' => [ 'test_agent' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', 'memory' => ['service' => ''], 'prompt' => [ 'text' => 'Test prompt', @@ -1178,7 +1174,7 @@ public function testMemoryServiceConfigurationWorksCorrectly() 'ai' => [ 'agent' => [ 'test_agent' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', 'memory' => ['service' => 'my_custom_memory_service'], 'prompt' => [ 'text' => 'Test prompt', @@ -1205,7 +1201,7 @@ public function testMemoryProcessorPriorityOrdering() 'ai' => [ 'agent' => [ 'test_agent' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', 'memory' => 'test_memory', 'prompt' => [ 'text' => 'Test prompt', @@ -1234,7 +1230,7 @@ public function testMemoryProcessorIntegration() 'ai' => [ 'agent' => [ 'test_agent' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', 'memory' => 'my_memory_service', 'prompt' => [ 'text' => 'You are a helpful assistant.', @@ -1281,7 +1277,7 @@ public function testMemoryWithExistingServiceUsesServiceReference() 'ai' => [ 'agent' => [ 'test_agent' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', 'memory' => ['service' => 'existing_memory_service'], // New array syntax for service 'prompt' => [ 'text' => 'You are a helpful assistant.', @@ -1308,7 +1304,7 @@ public function testMemoryWithNonExistingServiceCreatesStaticMemoryProvider() 'ai' => [ 'agent' => [ 'test_agent' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', 'memory' => 'This is static memory content', // This is not a service 'prompt' => [ 'text' => 'You are a helpful assistant.', @@ -1353,7 +1349,7 @@ public function testMemoryWithServiceAliasUsesAlias() 'ai' => [ 'agent' => [ 'test_agent' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', 'memory' => ['service' => 'memory_alias'], // Use new array syntax for service alias 'prompt' => [ 'text' => 'You are a helpful assistant.', @@ -1389,14 +1385,14 @@ public function testDifferentAgentsCanUseDifferentMemoryTypes() 'ai' => [ 'agent' => [ 'agent_with_service' => [ - 'model' => ['class' => Gpt::class, 'name' => 'gpt-4'], + 'model' => 'gpt-4', 'memory' => ['service' => 'dynamic_memory_service'], // Use new array syntax for service 'prompt' => [ 'text' => 'Agent with service.', ], ], 'agent_with_static' => [ - 'model' => ['class' => Claude::class, 'name' => 'claude-3-opus-20240229'], + 'model' => 'claude-3-opus-20240229', 'memory' => 'Static memory context for this agent', // Static content 'prompt' => [ 'text' => 'Agent with static memory.', @@ -1433,7 +1429,6 @@ public function testModelConfigurationWithQueryParameters() 'agent' => [ 'test' => [ 'model' => [ - 'class' => Gpt::class, 'name' => 'gpt-4o-mini?temperature=0.5&max_tokens=2000', ], ], @@ -1441,9 +1436,8 @@ public function testModelConfigurationWithQueryParameters() ], ]); - $modelDefinition = $container->getDefinition('ai.agent.test.model'); - $this->assertSame('gpt-4o-mini', $modelDefinition->getArgument(0)); - $this->assertEquals(['temperature' => '0.5', 'max_tokens' => '2000'], $modelDefinition->getArgument(1)); + $agentDefinition = $container->getDefinition('ai.agent.test'); + $this->assertSame('gpt-4o-mini?temperature=0.5&max_tokens=2000', $agentDefinition->getArgument(1)); } #[TestDox('Model configuration with separate options array works correctly')] @@ -1454,7 +1448,6 @@ public function testModelConfigurationWithSeparateOptions() 'agent' => [ 'test' => [ 'model' => [ - 'class' => Gpt::class, 'name' => 'gpt-4o-mini', 'options' => [ 'temperature' => 0.7, @@ -1466,26 +1459,26 @@ public function testModelConfigurationWithSeparateOptions() ], ]); - $modelDefinition = $container->getDefinition('ai.agent.test.model'); - $this->assertSame('gpt-4o-mini', $modelDefinition->getArgument(0)); - $this->assertEquals(['temperature' => 0.7, 'max_tokens' => 1500], $modelDefinition->getArgument(1)); + $agentDefinition = $container->getDefinition('ai.agent.test'); + $this->assertSame('gpt-4o-mini?temperature=0.7&max_tokens=1500', $agentDefinition->getArgument(1)); } - #[TestDox('Model configuration with conflicting query parameters and options throws exception')] + #[TestDox('Model configuration throws exception when using both query parameters and options array')] public function testModelConfigurationConflictThrowsException() { + // Should throw exception when both query parameters and options array are provided $this->expectException(InvalidConfigurationException::class); - $this->expectExceptionMessage('Cannot specify both query parameters in model name and options array'); + $this->expectExceptionMessage('Cannot use both query parameters in model name and options array'); - $this->buildContainer([ + $container = $this->buildContainer([ 'ai' => [ 'agent' => [ 'test' => [ 'model' => [ - 'class' => Gpt::class, - 'name' => 'gpt-4o-mini?temperature=0.5', + 'name' => 'gpt-4o-mini?temperature=0.5&max_tokens=1000', 'options' => [ 'temperature' => 0.7, + 'stream' => true, ], ], ], @@ -1502,7 +1495,6 @@ public function testModelConfigurationTypeConversion() 'agent' => [ 'test' => [ 'model' => [ - 'class' => Gpt::class, 'name' => 'gpt-4o-mini?temperature=0.5&max_tokens=2000&stream=true&presence_penalty=0', ], ], @@ -1510,14 +1502,9 @@ public function testModelConfigurationTypeConversion() ], ]); - $modelDefinition = $container->getDefinition('ai.agent.test.model'); - $this->assertSame('gpt-4o-mini', $modelDefinition->getArgument(0)); - - $options = $modelDefinition->getArgument(1); - $this->assertSame('0.5', $options['temperature']); // string - $this->assertSame('2000', $options['max_tokens']); // string - $this->assertSame('true', $options['stream']); // string - $this->assertSame('0', $options['presence_penalty']); // string + $agentDefinition = $container->getDefinition('ai.agent.test'); + // Query parameters are maintained as strings when parsed from URL + $this->assertSame('gpt-4o-mini?temperature=0.5&max_tokens=2000&stream=true&presence_penalty=0', $agentDefinition->getArgument(1)); } #[TestDox('Vectorizer model configuration with query parameters works correctly')] @@ -1528,7 +1515,6 @@ public function testVectorizerModelConfigurationWithQueryParameters() 'vectorizer' => [ 'test' => [ 'model' => [ - 'class' => Gpt::class, 'name' => 'text-embedding-3-small?dimensions=512', ], ], @@ -1536,23 +1522,22 @@ public function testVectorizerModelConfigurationWithQueryParameters() ], ]); - $modelDefinition = $container->getDefinition('ai.vectorizer.test.model'); - $this->assertSame('text-embedding-3-small', $modelDefinition->getArgument(0)); - $this->assertEquals(['dimensions' => '512'], $modelDefinition->getArgument(1)); + $vectorizerDefinition = $container->getDefinition('ai.vectorizer.test'); + $this->assertSame('text-embedding-3-small?dimensions=512', $vectorizerDefinition->getArgument(1)); } - #[TestDox('Vectorizer model configuration with conflicting parameters throws exception')] + #[TestDox('Vectorizer model configuration throws exception when using both query parameters and options array')] public function testVectorizerModelConfigurationConflictThrowsException() { + // Should throw exception when both query parameters and options array are provided $this->expectException(InvalidConfigurationException::class); - $this->expectExceptionMessage('Cannot specify both query parameters in model name and options array'); + $this->expectExceptionMessage('Cannot use both query parameters in model name and options array'); - $this->buildContainer([ + $container = $this->buildContainer([ 'ai' => [ 'vectorizer' => [ 'test' => [ 'model' => [ - 'class' => Gpt::class, 'name' => 'text-embedding-3-small?dimensions=512', 'options' => [ 'dimensions' => 1536, @@ -1572,7 +1557,6 @@ public function testVectorizerConfiguration() 'my_vectorizer' => [ 'platform' => 'my_platform_service_id', 'model' => [ - 'class' => Embeddings::class, 'name' => 'text-embedding-3-small', 'options' => ['dimension' => 512], ], @@ -1582,15 +1566,13 @@ public function testVectorizerConfiguration() ]); $this->assertTrue($container->hasDefinition('ai.vectorizer.my_vectorizer')); - $this->assertTrue($container->hasDefinition('ai.vectorizer.my_vectorizer.model')); $vectorizerDefinition = $container->getDefinition('ai.vectorizer.my_vectorizer'); $this->assertSame(Vectorizer::class, $vectorizerDefinition->getClass()); $this->assertTrue($vectorizerDefinition->hasTag('ai.vectorizer')); - $modelDefinition = $container->getDefinition('ai.vectorizer.my_vectorizer.model'); - $this->assertSame(Embeddings::class, $modelDefinition->getClass()); - $this->assertTrue($modelDefinition->hasTag('ai.model.embeddings_model')); + // Check that model is passed as a string with options as query params + $this->assertSame('text-embedding-3-small?dimension=512', $vectorizerDefinition->getArgument(1)); } public function testVectorizerWithLoggerInjection() @@ -1600,10 +1582,7 @@ public function testVectorizerWithLoggerInjection() 'vectorizer' => [ 'my_vectorizer' => [ 'platform' => 'my_platform_service_id', - 'model' => [ - 'class' => Embeddings::class, - 'name' => 'text-embedding-3-small', - ], + 'model' => 'text-embedding-3-small', ], ], ], @@ -1618,9 +1597,9 @@ public function testVectorizerWithLoggerInjection() $this->assertInstanceOf(Reference::class, $arguments[0]); $this->assertSame('my_platform_service_id', (string) $arguments[0]); - // Second argument should be model reference - $this->assertInstanceOf(Reference::class, $arguments[1]); - $this->assertSame('ai.vectorizer.my_vectorizer.model', (string) $arguments[1]); + // Second argument should be model string + $this->assertIsString($arguments[1]); + $this->assertSame('text-embedding-3-small', $arguments[1]); // Third argument should be logger reference with IGNORE_ON_INVALID_REFERENCE $this->assertInstanceOf(Reference::class, $arguments[2]); @@ -1640,10 +1619,7 @@ public function testIndexerWithConfiguredVectorizer() 'vectorizer' => [ 'my_vectorizer' => [ 'platform' => 'my_platform_service_id', - 'model' => [ - 'class' => Embeddings::class, - 'name' => 'text-embedding-3-small', - ], + 'model' => 'text-embedding-3-small', ], ], 'indexer' => [ @@ -2187,7 +2163,6 @@ private function getFullConfig(): array 'my_chat_agent' => [ 'platform' => 'openai_platform_service_id', 'model' => [ - 'class' => Gpt::class, 'name' => 'gpt-3.5-turbo', 'options' => [ 'temperature' => 0.7, @@ -2211,7 +2186,7 @@ private function getFullConfig(): array 'fault_tolerant_toolbox' => false, ], 'another_agent' => [ - 'model' => ['class' => Claude::class, 'name' => 'claude-3-opus-20240229'], + 'model' => 'claude-3-opus-20240229', 'prompt' => 'Be concise.', ], ], @@ -2365,7 +2340,6 @@ private function getFullConfig(): array 'test_vectorizer' => [ 'platform' => 'mistral_platform_service_id', 'model' => [ - 'class' => MistralEmbeddings::class, 'name' => 'mistral-embed', 'options' => ['dimension' => 768], ], diff --git a/src/ai-bundle/tests/Profiler/DataCollectorTest.php b/src/ai-bundle/tests/Profiler/DataCollectorTest.php index 9d7520100..bed7ded34 100644 --- a/src/ai-bundle/tests/Profiler/DataCollectorTest.php +++ b/src/ai-bundle/tests/Profiler/DataCollectorTest.php @@ -18,7 +18,6 @@ use Symfony\AI\Platform\Message\Content\Text; use Symfony\AI\Platform\Message\Message; use Symfony\AI\Platform\Message\MessageBag; -use Symfony\AI\Platform\Model; use Symfony\AI\Platform\PlatformInterface; use Symfony\AI\Platform\Result\RawResultInterface; use Symfony\AI\Platform\Result\ResultPromise; @@ -36,7 +35,7 @@ public function testCollectsDataForNonStreamingResponse() $platform->method('invoke')->willReturn(new ResultPromise(static fn () => $result, $this->createStub(RawResultInterface::class))); - $result = $traceablePlatform->invoke($this->createStub(Model::class), $messageBag, ['stream' => false]); + $result = $traceablePlatform->invoke('gpt-4o', $messageBag, ['stream' => false]); $this->assertSame('Assistant response', $result->asText()); $dataCollector = new DataCollector([$traceablePlatform], $this->createStub(ToolboxInterface::class), []); @@ -60,7 +59,7 @@ public function testCollectsDataForStreamingResponse() $platform->method('invoke')->willReturn(new ResultPromise(static fn () => $result, $this->createStub(RawResultInterface::class))); - $result = $traceablePlatform->invoke($this->createStub(Model::class), $messageBag, ['stream' => true]); + $result = $traceablePlatform->invoke('gpt-4o', $messageBag, ['stream' => true]); $this->assertSame('Assistant response', implode('', iterator_to_array($result->asStream()))); $dataCollector = new DataCollector([$traceablePlatform], $this->createStub(ToolboxInterface::class), []); diff --git a/src/platform/src/Bridge/AiMlApi/Completions.php b/src/platform/src/Bridge/AiMlApi/Completions.php index f43aab144..c5cbe694c 100644 --- a/src/platform/src/Bridge/AiMlApi/Completions.php +++ b/src/platform/src/Bridge/AiMlApi/Completions.php @@ -11,7 +11,6 @@ namespace Symfony\AI\Platform\Bridge\AiMlApi; -use Symfony\AI\Platform\Capability; use Symfony\AI\Platform\Model; /** @@ -19,124 +18,4 @@ */ class Completions extends Model { - public const GPT_3_5_TURBO = 'gpt-3.5-turbo'; - public const GPT_3_5_TURBO_0125 = 'gpt-3.5-turbo-0125'; - public const GPT_3_5_TURBO_1106 = 'gpt-3.5-turbo-1106'; - public const GPT_4O = 'gpt-4o'; - public const GPT_4O_2024_08_06 = 'gpt-4o-2024-08-06'; - public const GPT_4O_2024_05_13 = 'gpt-4o-2024-05-13'; - public const GPT_4O_MINI = 'gpt-4o-mini'; - public const GPT_4O_MINI_2024_07_18 = 'gpt-4o-mini-2024-07-18'; - public const CHATGPT_4O_LATEST = 'chatgpt-4o-latest'; - public const GPT_4O_AUDIO_PREVIEW = 'gpt-4o-audio-preview'; - public const GPT_4O_MINI_AUDIO_PREVIEW = 'gpt-4o-mini-audio-preview'; - public const GPT_4O_SEARCH_PREVIEW = 'gpt-4o-search-preview'; - public const GPT_4O_MINI_SEARCH_PREVIEW = 'gpt-4o-mini-search-preview'; - public const GPT_4_TURBO = 'gpt-4-turbo'; - public const GPT_4_TURBO_2024_04_09 = 'gpt-4-turbo-2024-04-09'; - public const GPT_4 = 'gpt-4'; - public const GPT_4_0125_PREVIEW = 'gpt-4-0125-preview'; - public const GPT_4_1106_PREVIEW = 'gpt-4-1106-preview'; - public const O1_MINI = 'o1-mini'; - public const O1_MINI_2024_09_12 = 'o1-mini-2024-09-12'; - public const O1 = 'o1'; - public const OPENAI_O3_2025_04_16 = 'openai/o3-2025-04-16'; - public const O3_MINI = 'o3-mini'; - public const OPENAI_O3_PRO = 'openai/o3-pro'; - public const OPENAI_GPT_4_1_2025_04_14 = 'openai/gpt-4.1-2025-04-14'; - public const OPENAI_GPT_4_1_MINI_2025_04_14 = 'openai/gpt-4.1-mini-2025-04-14'; - public const OPENAI_GPT_4_1_NANO_2025_04_14 = 'openai/gpt-4.1-nano-2025-04-14'; - public const OPENAI_O4_MINI_2025_04_16 = 'openai/o4-mini-2025-04-16'; - public const OPENAI_GPT_OSS_20B = 'openai/gpt-oss-20b'; - public const OPENAI_GPT_OSS_120B = 'openai/gpt-oss-120b'; - public const OPENAI_GPT_5_2025_08_07 = 'openai/gpt-5-2025-08-07'; - public const OPENAI_GPT_5_MINI_2025_08_07 = 'openai/gpt-5-mini-2025-08-07'; - public const OPENAI_GPT_5_NANO_2025_08_07 = 'openai/gpt-5-nano-2025-08-07'; - public const OPENAI_GPT_5_CHAT_LATEST = 'openai/gpt-5-chat-latest'; - public const DEEPSEEK_CHAT = 'deepseek-chat'; - public const DEEPSEEK_DEEPSEEK_CHAT = 'deepseek/deepseek-chat'; - public const DEEPSEEK_DEEPSEEK_CHAT_V3_0324 = 'deepseek/deepseek-chat-v3-0324'; - public const DEEPSEEK_DEEPSEEK_R1 = 'deepseek/deepseek-r1'; - public const DEEPSEEK_REASONER = 'deepseek-reasoner'; - public const DEEPSEEK_DEEPSEEK_PROVER_V2 = 'deepseek/deepseek-prover-v2'; - public const DEEPSEEK_DEEPSEEK_CHAT_V3_1 = 'deepseek/deepseek-chat-v3.1'; - public const DEEPSEEK_DEEPSEEK_REASONER_V3_1 = 'deepseek/deepseek-reasoner-v3.1'; - public const QWEN_QWEN2_72B_INSTRUCT = 'Qwen/Qwen2-72B-Instruct'; - public const MISTRALAI_MIXTRAL_8X7B_INSTRUCT_V0_1 = 'mistralai/Mixtral-8x7B-Instruct-v0.1'; - public const META_LLAMA_LLAMA_3_3_70B_INSTRUCT_TURBO = 'meta-llama/Llama-3.3-70B-Instruct-Turbo'; - public const META_LLAMA_LLAMA_3_2_3B_INSTRUCT_TURBO = 'meta-llama/Llama-3.2-3B-Instruct-Turbo'; - public const QWEN_QWEN2_5_7B_INSTRUCT_TURBO = 'Qwen/Qwen2.5-7B-Instruct-Turbo'; - public const QWEN_QWEN2_5_CODER_32B_INSTRUCT = 'Qwen/Qwen2.5-Coder-32B-Instruct'; - public const META_LLAMA_META_LLAMA_3_8B_INSTRUCT_LITE = 'meta-llama/Meta-Llama-3-8B-Instruct-Lite'; - public const META_LLAMA_LLAMA_3_70B_CHAT_HF = 'meta-llama/Llama-3-70b-chat-hf'; - public const META_LLAMA_META_LLAMA_3_1_405B_INSTRUCT_TURBO = 'meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo'; - public const META_LLAMA_META_LLAMA_3_1_8B_INSTRUCT_TURBO = 'meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo'; - public const META_LLAMA_META_LLAMA_3_1_70B_INSTRUCT_TURBO = 'meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo'; - public const META_LLAMA_LLAMA_4_SCOUT = 'meta-llama/llama-4-scout'; - public const META_LLAMA_LLAMA_4_MAVERICK = 'meta-llama/llama-4-maverick'; - public const MISTRALAI_MISTRAL_7B_INSTRUCT_V0_2 = 'mistralai/Mistral-7B-Instruct-v0.2'; - public const MISTRALAI_MISTRAL_7B_INSTRUCT_V0_1 = 'mistralai/Mistral-7B-Instruct-v0.1'; - public const MISTRALAI_MISTRAL_7B_INSTRUCT_V0_3 = 'mistralai/Mistral-7B-Instruct-v0.3'; - public const CLAUDE_3_OPUS_20240229 = 'claude-3-opus-20240229'; - public const CLAUDE_3_HAIKU_20240307 = 'claude-3-haiku-20240307'; - public const CLAUDE_3_5_SONNET_20240620 = 'claude-3-5-sonnet-20240620'; - public const CLAUDE_3_5_SONNET_20241022 = 'claude-3-5-sonnet-20241022'; - public const CLAUDE_3_5_HAIKU_20241022 = 'claude-3-5-haiku-20241022'; - public const CLAUDE_3_7_SONNET_20250219 = 'claude-3-7-sonnet-20250219'; - public const ANTHROPIC_CLAUDE_OPUS_4 = 'anthropic/claude-opus-4'; - public const ANTHROPIC_CLAUDE_SONNET_4 = 'anthropic/claude-sonnet-4'; - public const ANTHROPIC_CLAUDE_OPUS_4_1 = 'anthropic/claude-opus-4.1'; - public const CLAUDE_OPUS_4_1 = 'claude-opus-4-1'; - public const CLAUDE_OPUS_4_1_20250805 = 'claude-opus-4-1-20250805'; - public const GEMINI_2_0_FLASH_EXP = 'gemini-2.0-flash-exp'; - public const GEMINI_2_0_FLASH = 'gemini-2.0-flash'; - public const GOOGLE_GEMINI_2_5_FLASH_LITE_PREVIEW = 'google/gemini-2.5-flash-lite-preview'; - public const GOOGLE_GEMINI_2_5_FLASH = 'google/gemini-2.5-flash'; - public const GOOGLE_GEMINI_2_5_PRO = 'google/gemini-2.5-pro'; - public const GOOGLE_GEMMA_2_27B_IT = 'google/gemma-2-27b-it'; - public const GOOGLE_GEMMA_3_4B_IT = 'google/gemma-3-4b-it'; - public const GOOGLE_GEMMA_3_12B_IT = 'google/gemma-3-12b-it'; - public const GOOGLE_GEMMA_3_27B_IT = 'google/gemma-3-27b-it'; - public const GOOGLE_GEMMA_3N_E4B_IT = 'google/gemma-3n-e4b-it'; - public const QWEN_MAX = 'qwen-max'; - public const QWEN_PLUS = 'qwen-plus'; - public const QWEN_TURBO = 'qwen-turbo'; - public const QWEN_MAX_2025_01_25 = 'qwen-max-2025-01-25'; - public const QWEN_QWEN2_5_72B_INSTRUCT_TURBO = 'Qwen/Qwen2.5-72B-Instruct-Turbo'; - public const QWEN_QWQ_32B = 'Qwen/QwQ-32B'; - public const QWEN_QWEN3_235B_A22B_FP8_TPUT = 'Qwen/Qwen3-235B-A22B-fp8-tput'; - public const ALIBABA_QWEN3_32B = 'alibaba/qwen3-32b'; - public const ALIBABA_QWEN3_CODER_480B_A35B_INSTRUCT = 'alibaba/qwen3-coder-480b-a35b-instruct'; - public const ALIBABA_QWEN3_235B_A22B_THINKING_2507 = 'alibaba/qwen3-235b-a22b-thinking-2507'; - public const MISTRALAI_MISTRAL_TINY = 'mistralai/mistral-tiny'; - public const X_AI_GROK_3_BETA = 'x-ai/grok-3-beta'; - public const X_AI_GROK_3_MINI_BETA = 'x-ai/grok-3-mini-beta'; - public const X_AI_GROK_4_07_09 = 'x-ai/grok-4-07-09'; - public const MISTRALAI_MISTRAL_NEMO = 'mistralai/mistral-nemo'; - public const ANTHRACITE_ORG_MAGNUM_V4_72B = 'anthracite-org/magnum-v4-72b'; - public const NVIDIA_LLAMA_3_1_NEMOTRON_70B_INSTRUCT = 'nvidia/llama-3.1-nemotron-70b-instruct'; - public const COHERE_COMMAND_R_PLUS = 'cohere/command-r-plus'; - public const COHERE_COMMAND_A = 'cohere/command-a'; - public const MISTRALAI_CODESTRAL_2501 = 'mistralai/codestral-2501'; - public const MINIMAX_TEXT_01 = 'MiniMax-Text-01'; - public const MINIMAX_M1 = 'minimax/m1'; - public const MOONSHOT_KIMI_K2_PREVIEW = 'moonshot/kimi-k2-preview'; - public const PERPLEXITY_SONAR = 'perplexity/sonar'; - public const PERPLEXITY_SONAR_PRO = 'perplexity/sonar-pro'; - public const ZHIPU_GLM_4_5_AIR = 'zhipu/glm-4.5-air'; - public const ZHIPU_GLM_4_5 = 'zhipu/glm-4.5'; - - public const DEFAULT_CAPABILITIES = [ - Capability::INPUT_MESSAGES, - Capability::OUTPUT_TEXT, - Capability::OUTPUT_STREAMING, - ]; - - public function __construct( - string $name, - array $options = [], - array $capabilities = self::DEFAULT_CAPABILITIES, - ) { - parent::__construct($name, $capabilities, $options); - } } diff --git a/src/platform/src/Bridge/AiMlApi/Embeddings.php b/src/platform/src/Bridge/AiMlApi/Embeddings.php index 03a15763a..c600792e5 100644 --- a/src/platform/src/Bridge/AiMlApi/Embeddings.php +++ b/src/platform/src/Bridge/AiMlApi/Embeddings.php @@ -18,20 +18,4 @@ */ class Embeddings extends Model { - public const TEXT_EMBEDDING_3_SMALL = 'text-embedding-3-small'; - public const TEXT_EMBEDDING_3_LARGE = 'text-embedding-3-large'; - public const TEXT_EMBEDDING_ADA_002 = 'text-embedding-ada-002'; - public const TOGETHERCOMPUTER_M2_BERT_80M_32K_RETRIEVAL = 'togethercomputer/m2-bert-80M-32k-retrieval'; - public const BAAI_BGE_BASE_EN_V1_5 = 'BAAI/bge-base-en-v1.5'; - public const BAAI_BGE_LARGE_EN_V1 = 'BAAI/bge-large-en-v1.'; - public const VOYAGE_LARGE_2_INSTRUCT = 'voyage-large-2-instruct'; - public const VOYAGE_FINANCE_2 = 'voyage-finance-2'; - public const VOYAGE_MULTILINGUAL_2 = 'voyage-multilingual-2'; - public const VOYAGE_LAW_2 = 'voyage-law-2'; - public const VOYAGE_CODE_2 = 'voyage-code-2'; - public const VOYAGE_LARGE_2 = 'voyage-large-2'; - public const VOYAGE_2 = 'voyage-2'; - public const TEXTEMBEDDING_GECKO_003 = 'textembedding-gecko@003'; - public const TEXTEMBEDDING_GECKO_MULTILINGUAL_001 = 'textembedding-gecko-multilingual@001'; - public const TEXT_MULTILINGUAL_EMBEDDING_002 = 'text-multilingual-embedding-002'; } diff --git a/src/platform/src/Bridge/AiMlApi/ModelCatalog.php b/src/platform/src/Bridge/AiMlApi/ModelCatalog.php new file mode 100644 index 000000000..f4936fe1e --- /dev/null +++ b/src/platform/src/Bridge/AiMlApi/ModelCatalog.php @@ -0,0 +1,1082 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\AiMlApi; + +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\ModelCatalog\AbstractModelCatalog; + +/** + * @author Oskar Stark + */ +final class ModelCatalog extends AbstractModelCatalog +{ + /** + * @param array}> $additionalModels + */ + public function __construct(array $additionalModels = []) + { + $defaultModels = [ + // Completion models (GPT variants) + 'gpt-3.5-turbo' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'gpt-3.5-turbo-0125' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'gpt-3.5-turbo-1106' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'gpt-4o' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'gpt-4o-2024-08-06' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'gpt-4o-2024-05-13' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'gpt-4o-mini' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'gpt-4o-mini-2024-07-18' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'gpt-4-turbo' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + ], + ], + 'gpt-4' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'gpt-4-turbo-2024-04-09' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'gpt-4-0125-preview' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'gpt-4-1106-preview' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'chatgpt-4o-latest' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'gpt-4o-audio-preview' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'gpt-4o-mini-audio-preview' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'gpt-4o-search-preview' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'gpt-4o-mini-search-preview' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'o1-mini' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::INPUT_IMAGE, + ], + ], + 'o1-mini-2024-09-12' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::INPUT_IMAGE, + ], + ], + 'o1' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::INPUT_IMAGE, + ], + ], + 'o3-mini' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::INPUT_IMAGE, + Capability::OUTPUT_STRUCTURED, + ], + ], + // OpenAI future models + 'openai/o3-2025-04-16' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::INPUT_IMAGE, + ], + ], + 'openai/o3-pro' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::INPUT_IMAGE, + ], + ], + 'openai/gpt-4.1-2025-04-14' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'openai/gpt-4.1-mini-2025-04-14' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'openai/gpt-4.1-nano-2025-04-14' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'openai/o4-mini-2025-04-16' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::INPUT_IMAGE, + ], + ], + 'openai/gpt-oss-20b' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'openai/gpt-oss-120b' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'openai/gpt-5-2025-08-07' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'openai/gpt-5-mini-2025-08-07' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'openai/gpt-5-nano-2025-08-07' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'openai/gpt-5-chat-latest' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + // DeepSeek models + 'deepseek-chat' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'deepseek/deepseek-chat' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'deepseek/deepseek-chat-v3-0324' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'deepseek/deepseek-r1' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'deepseek-reasoner' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'deepseek/deepseek-prover-v2' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'deepseek/deepseek-chat-v3.1' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'deepseek/deepseek-reasoner-v3.1' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + // Qwen models + 'Qwen/Qwen2-72B-Instruct' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'qwen-max' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'qwen-plus' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'qwen-turbo' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'qwen-max-2025-01-25' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'Qwen/Qwen2.5-72B-Instruct-Turbo' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'Qwen/QwQ-32B' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'Qwen/Qwen3-235B-A22B-fp8-tput' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'alibaba/qwen3-32b' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'alibaba/qwen3-coder-480b-a35b-instruct' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'alibaba/qwen3-235b-a22b-thinking-2507' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'Qwen/Qwen2.5-7B-Instruct-Turbo' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'Qwen/Qwen2.5-Coder-32B-Instruct' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + // Mistral models + 'mistralai/Mixtral-8x7B-Instruct-v0.1' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'mistralai/Mistral-7B-Instruct-v0.2' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'mistralai/Mistral-7B-Instruct-v0.1' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'mistralai/Mistral-7B-Instruct-v0.3' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'mistralai/mistral-tiny' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'mistralai/mistral-nemo' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'mistralai/codestral-2501' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + // Meta Llama models + 'meta-llama/Llama-3.3-70B-Instruct-Turbo' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'meta-llama/Llama-3.2-3B-Instruct-Turbo' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'meta-llama/Meta-Llama-3-8B-Instruct-Lite' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'meta-llama/Llama-3-70b-chat-hf' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'meta-llama/llama-4-scout' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'meta-llama/llama-4-maverick' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + // Claude models + 'claude-3-opus-20240229' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + ], + ], + 'claude-3-haiku-20240307' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + ], + ], + 'claude-3-5-sonnet-20240620' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + ], + ], + 'claude-3-5-sonnet-20241022' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + ], + ], + 'claude-3-5-haiku-20241022' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + ], + ], + 'claude-3-7-sonnet-20250219' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + ], + ], + 'anthropic/claude-opus-4' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + ], + ], + 'anthropic/claude-sonnet-4' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + ], + ], + 'anthropic/claude-opus-4.1' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + ], + ], + 'claude-opus-4-1' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + ], + ], + 'claude-opus-4-1-20250805' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + ], + ], + // Gemini models + 'gemini-2.0-flash-exp' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + ], + ], + 'gemini-2.0-flash' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + ], + ], + 'google/gemini-2.5-flash-lite-preview' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + ], + ], + 'google/gemini-2.5-flash' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + ], + ], + 'google/gemini-2.5-pro' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + ], + ], + 'google/gemma-2-27b-it' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'google/gemma-3-4b-it' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'google/gemma-3-12b-it' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'google/gemma-3-27b-it' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'google/gemma-3n-e4b-it' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + // X.AI models + 'x-ai/grok-3-beta' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'x-ai/grok-3-mini-beta' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'x-ai/grok-4-07-09' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + // Other models + 'anthracite-org/magnum-v4-72b' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'nvidia/llama-3.1-nemotron-70b-instruct' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'cohere/command-r-plus' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'cohere/command-a' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'MiniMax-Text-01' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'minimax/m1' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'moonshot/kimi-k2-preview' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'perplexity/sonar' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'perplexity/sonar-pro' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'zhipu/glm-4.5-air' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'zhipu/glm-4.5' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + // Embedding models + 'text-embedding-3-small' => [ + 'class' => Embeddings::class, + 'capabilities' => [Capability::INPUT_MULTIPLE], + ], + 'text-embedding-3-large' => [ + 'class' => Embeddings::class, + 'capabilities' => [Capability::INPUT_MULTIPLE], + ], + 'text-embedding-ada-002' => [ + 'class' => Embeddings::class, + 'capabilities' => [Capability::INPUT_MULTIPLE], + ], + 'togethercomputer/m2-bert-80M-32k-retrieval' => [ + 'class' => Embeddings::class, + 'capabilities' => [Capability::INPUT_MULTIPLE], + ], + 'BAAI/bge-base-en-v1.5' => [ + 'class' => Embeddings::class, + 'capabilities' => [Capability::INPUT_MULTIPLE], + ], + 'BAAI/bge-large-en-v1.' => [ + 'class' => Embeddings::class, + 'capabilities' => [Capability::INPUT_MULTIPLE], + ], + 'voyage-large-2-instruct' => [ + 'class' => Embeddings::class, + 'capabilities' => [Capability::INPUT_MULTIPLE], + ], + 'voyage-finance-2' => [ + 'class' => Embeddings::class, + 'capabilities' => [Capability::INPUT_MULTIPLE], + ], + 'voyage-multilingual-2' => [ + 'class' => Embeddings::class, + 'capabilities' => [Capability::INPUT_MULTIPLE], + ], + 'voyage-law-2' => [ + 'class' => Embeddings::class, + 'capabilities' => [Capability::INPUT_MULTIPLE], + ], + 'voyage-code-2' => [ + 'class' => Embeddings::class, + 'capabilities' => [Capability::INPUT_MULTIPLE], + ], + 'voyage-large-2' => [ + 'class' => Embeddings::class, + 'capabilities' => [Capability::INPUT_MULTIPLE], + ], + 'voyage-2' => [ + 'class' => Embeddings::class, + 'capabilities' => [Capability::INPUT_MULTIPLE], + ], + 'textembedding-gecko@003' => [ + 'class' => Embeddings::class, + 'capabilities' => [Capability::INPUT_MULTIPLE], + ], + 'textembedding-gecko-multilingual@001' => [ + 'class' => Embeddings::class, + 'capabilities' => [Capability::INPUT_MULTIPLE], + ], + 'text-multilingual-embedding-002' => [ + 'class' => Embeddings::class, + 'capabilities' => [Capability::INPUT_MULTIPLE], + ], + ]; + + $this->models = array_merge($defaultModels, $additionalModels); + } +} diff --git a/src/platform/src/Bridge/AiMlApi/PlatformFactory.php b/src/platform/src/Bridge/AiMlApi/PlatformFactory.php index 368dd6ea9..88b6e42a4 100644 --- a/src/platform/src/Bridge/AiMlApi/PlatformFactory.php +++ b/src/platform/src/Bridge/AiMlApi/PlatformFactory.php @@ -35,6 +35,9 @@ public static function create( [ new Embeddings\ResultConverter(), new Completions\ResultConverter(), - ], $contract); + ], + new ModelCatalog(), + $contract, + ); } } diff --git a/src/platform/src/Bridge/Albert/ModelCatalog.php b/src/platform/src/Bridge/Albert/ModelCatalog.php new file mode 100644 index 000000000..e46dedbd7 --- /dev/null +++ b/src/platform/src/Bridge/Albert/ModelCatalog.php @@ -0,0 +1,44 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\Albert; + +use Symfony\AI\Platform\Bridge\OpenAi\Gpt; +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\ModelCatalog\AbstractModelCatalog; + +/** + * @author Oskar Stark + */ +final class ModelCatalog extends AbstractModelCatalog +{ + /** + * @param array, capabilities: list}> $additionalModels + */ + public function __construct(array $additionalModels = []) + { + $defaultModels = [ + 'gpt-4o' => [ + 'class' => Gpt::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + Capability::OUTPUT_STRUCTURED, + ], + ], + ]; + + $this->models = array_merge($defaultModels, $additionalModels); + } +} diff --git a/src/platform/src/Bridge/Albert/PlatformFactory.php b/src/platform/src/Bridge/Albert/PlatformFactory.php index ed4d3b7b1..4ab0071f3 100644 --- a/src/platform/src/Bridge/Albert/PlatformFactory.php +++ b/src/platform/src/Bridge/Albert/PlatformFactory.php @@ -50,6 +50,7 @@ public static function create( new EmbeddingsModelClient($httpClient, $apiKey, $baseUrl), ], [new Gpt\ResultConverter(), new Embeddings\ResultConverter()], + new ModelCatalog(), Contract::create(), ); } diff --git a/src/platform/src/Bridge/Anthropic/Claude.php b/src/platform/src/Bridge/Anthropic/Claude.php index bec67c1f5..544413929 100644 --- a/src/platform/src/Bridge/Anthropic/Claude.php +++ b/src/platform/src/Bridge/Anthropic/Claude.php @@ -11,7 +11,6 @@ namespace Symfony\AI\Platform\Bridge\Anthropic; -use Symfony\AI\Platform\Capability; use Symfony\AI\Platform\Model; /** @@ -34,16 +33,8 @@ class Claude extends Model /** * @param array $options The default options for the model usage */ - public function __construct(string $name, array $options = []) + public function __construct(string $name, array $capabilities = [], array $options = []) { - $capabilities = [ - Capability::INPUT_MESSAGES, - Capability::INPUT_IMAGE, - Capability::OUTPUT_TEXT, - Capability::OUTPUT_STREAMING, - Capability::TOOL_CALLING, - ]; - if (!isset($options['max_tokens'])) { $options['max_tokens'] = 1000; } diff --git a/src/platform/src/Bridge/Anthropic/ModelCatalog.php b/src/platform/src/Bridge/Anthropic/ModelCatalog.php new file mode 100644 index 000000000..7bb12a136 --- /dev/null +++ b/src/platform/src/Bridge/Anthropic/ModelCatalog.php @@ -0,0 +1,152 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\Anthropic; + +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\ModelCatalog\AbstractModelCatalog; + +/** + * @author Oskar Stark + */ +final class ModelCatalog extends AbstractModelCatalog +{ + /** + * @param array}> $additionalModels + */ + public function __construct(array $additionalModels = []) + { + $defaultModels = [ + 'claude-3-haiku-20240307' => [ + 'class' => Claude::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'claude-3-5-haiku-latest' => [ + 'class' => Claude::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'claude-3-sonnet-20240229' => [ + 'class' => Claude::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'claude-3-5-sonnet-latest' => [ + 'class' => Claude::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'claude-3-5-sonnet-20241022' => [ + 'class' => Claude::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'claude-3-7-sonnet-latest' => [ + 'class' => Claude::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'claude-sonnet-4-20250514' => [ + 'class' => Claude::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'claude-sonnet-4-0' => [ + 'class' => Claude::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'claude-3-opus-20240229' => [ + 'class' => Claude::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'claude-opus-4-20250514' => [ + 'class' => Claude::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'claude-opus-4-0' => [ + 'class' => Claude::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'claude-opus-4-1' => [ + 'class' => Claude::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + ]; + + $this->models = array_merge($defaultModels, $additionalModels); + } +} diff --git a/src/platform/src/Bridge/Anthropic/PlatformFactory.php b/src/platform/src/Bridge/Anthropic/PlatformFactory.php index cab6974b8..cea9c2c2e 100644 --- a/src/platform/src/Bridge/Anthropic/PlatformFactory.php +++ b/src/platform/src/Bridge/Anthropic/PlatformFactory.php @@ -13,6 +13,7 @@ use Symfony\AI\Platform\Bridge\Anthropic\Contract\AnthropicContract; use Symfony\AI\Platform\Contract; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; use Symfony\AI\Platform\Platform; use Symfony\Component\HttpClient\EventSourceHttpClient; use Symfony\Contracts\HttpClient\HttpClientInterface; @@ -25,6 +26,7 @@ public static function create( #[\SensitiveParameter] string $apiKey, ?HttpClientInterface $httpClient = null, + ModelCatalogInterface $modelCatalog = new ModelCatalog(), ?Contract $contract = null, ): Platform { $httpClient = $httpClient instanceof EventSourceHttpClient ? $httpClient : new EventSourceHttpClient($httpClient); @@ -32,6 +34,7 @@ public static function create( return new Platform( [new ModelClient($httpClient, $apiKey)], [new ResultConverter()], + $modelCatalog, $contract ?? AnthropicContract::create(), ); } diff --git a/src/platform/src/Bridge/Azure/Meta/ModelCatalog.php b/src/platform/src/Bridge/Azure/Meta/ModelCatalog.php new file mode 100644 index 000000000..13754d35c --- /dev/null +++ b/src/platform/src/Bridge/Azure/Meta/ModelCatalog.php @@ -0,0 +1,140 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\Azure\Meta; + +use Symfony\AI\Platform\Bridge\Meta\Llama; +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\ModelCatalog\AbstractModelCatalog; + +/** + * @author Oskar Stark + */ +final class ModelCatalog extends AbstractModelCatalog +{ + /** + * @param array}> $additionalModels + */ + public function __construct(array $additionalModels = []) + { + $defaultModels = [ + 'llama-3.3-70B-Instruct' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3.2-90b-vision-instruct' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::INPUT_IMAGE, + ], + ], + 'llama-3.2-11b-vision-instruct' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::INPUT_IMAGE, + ], + ], + 'llama-3.2-3b' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3.2-3b-instruct' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3.2-1b' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3.2-1b-instruct' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3.1-405b-instruct' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3.1-70b' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3-70b-instruct' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3.1-8b' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3.1-8b-instruct' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3-70b' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3-8b-instruct' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3-8b' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + ]; + + $this->models = array_merge($defaultModels, $additionalModels); + } +} diff --git a/src/platform/src/Bridge/Azure/Meta/PlatformFactory.php b/src/platform/src/Bridge/Azure/Meta/PlatformFactory.php index d17e133d1..067d282cb 100644 --- a/src/platform/src/Bridge/Azure/Meta/PlatformFactory.php +++ b/src/platform/src/Bridge/Azure/Meta/PlatformFactory.php @@ -12,6 +12,7 @@ namespace Symfony\AI\Platform\Bridge\Azure\Meta; use Symfony\AI\Platform\Contract; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; use Symfony\AI\Platform\Platform; use Symfony\Component\HttpClient\HttpClient; use Symfony\Contracts\HttpClient\HttpClientInterface; @@ -25,10 +26,11 @@ public static function create( string $baseUrl, #[\SensitiveParameter] string $apiKey, ?HttpClientInterface $httpClient = null, + ModelCatalogInterface $modelCatalog = new ModelCatalog(), ?Contract $contract = null, ): Platform { $modelClient = new LlamaModelClient($httpClient ?? HttpClient::create(), $baseUrl, $apiKey); - return new Platform([$modelClient], [new LlamaResultConverter()], $contract); + return new Platform([$modelClient], [new LlamaResultConverter()], $modelCatalog, $contract); } } diff --git a/src/platform/src/Bridge/Azure/OpenAi/ModelCatalog.php b/src/platform/src/Bridge/Azure/OpenAi/ModelCatalog.php new file mode 100644 index 000000000..7fe5d20c3 --- /dev/null +++ b/src/platform/src/Bridge/Azure/OpenAi/ModelCatalog.php @@ -0,0 +1,125 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\Azure\OpenAi; + +use Symfony\AI\Platform\Bridge\OpenAi\Embeddings; +use Symfony\AI\Platform\Bridge\OpenAi\Gpt; +use Symfony\AI\Platform\Bridge\OpenAi\Whisper; +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\ModelCatalog\AbstractModelCatalog; + +/** + * @author Oskar Stark + */ +final class ModelCatalog extends AbstractModelCatalog +{ + /** + * @param array}> $additionalModels + */ + public function __construct(array $additionalModels = []) + { + $defaultModels = [ + // GPT models + 'gpt-4o' => [ + 'class' => Gpt::class, + 'capabilities' => [ + Capability::INPUT_TEXT, + Capability::INPUT_IMAGE, + Capability::INPUT_AUDIO, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_AUDIO, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'gpt-4o-mini' => [ + 'class' => Gpt::class, + 'capabilities' => [ + Capability::INPUT_TEXT, + Capability::INPUT_IMAGE, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'gpt-4-turbo' => [ + 'class' => Gpt::class, + 'capabilities' => [ + Capability::INPUT_TEXT, + Capability::INPUT_IMAGE, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'gpt-4' => [ + 'class' => Gpt::class, + 'capabilities' => [ + Capability::INPUT_TEXT, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'gpt-35-turbo' => [ + 'class' => Gpt::class, + 'capabilities' => [ + Capability::INPUT_TEXT, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + // Whisper models + 'whisper' => [ + 'class' => Whisper::class, + 'capabilities' => [ + Capability::INPUT_AUDIO, + Capability::OUTPUT_TEXT, + Capability::SPEECH_TO_TEXT, + ], + ], + 'whisper-1' => [ + 'class' => Whisper::class, + 'capabilities' => [ + Capability::INPUT_AUDIO, + Capability::OUTPUT_TEXT, + Capability::SPEECH_TO_TEXT, + ], + ], + // Embedding models + 'text-embedding-ada-002' => [ + 'class' => Embeddings::class, + 'capabilities' => [ + Capability::INPUT_TEXT, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'text-embedding-3-small' => [ + 'class' => Embeddings::class, + 'capabilities' => [ + Capability::INPUT_TEXT, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'text-embedding-3-large' => [ + 'class' => Embeddings::class, + 'capabilities' => [ + Capability::INPUT_TEXT, + Capability::OUTPUT_STRUCTURED, + ], + ], + ]; + + $this->models = array_merge($defaultModels, $additionalModels); + } +} diff --git a/src/platform/src/Bridge/Azure/OpenAi/PlatformFactory.php b/src/platform/src/Bridge/Azure/OpenAi/PlatformFactory.php index 6f87e0d6f..819f1cd7c 100644 --- a/src/platform/src/Bridge/Azure/OpenAi/PlatformFactory.php +++ b/src/platform/src/Bridge/Azure/OpenAi/PlatformFactory.php @@ -16,6 +16,7 @@ use Symfony\AI\Platform\Bridge\OpenAi\Whisper; use Symfony\AI\Platform\Bridge\OpenAi\Whisper\AudioNormalizer; use Symfony\AI\Platform\Contract; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; use Symfony\AI\Platform\Platform; use Symfony\Component\HttpClient\EventSourceHttpClient; use Symfony\Contracts\HttpClient\HttpClientInterface; @@ -31,6 +32,7 @@ public static function create( string $apiVersion, #[\SensitiveParameter] string $apiKey, ?HttpClientInterface $httpClient = null, + ModelCatalogInterface $modelCatalog = new ModelCatalog(), ?Contract $contract = null, ): Platform { $httpClient = $httpClient instanceof EventSourceHttpClient ? $httpClient : new EventSourceHttpClient($httpClient); @@ -41,6 +43,7 @@ public static function create( return new Platform( [$gptModelClient, $embeddingsModelClient, $whisperModelClient], [new Gpt\ResultConverter(), new Embeddings\ResultConverter(), new Whisper\ResultConverter()], + $modelCatalog, $contract ?? Contract::create(new AudioNormalizer()), ); } diff --git a/src/platform/src/Bridge/Bedrock/ModelCatalog.php b/src/platform/src/Bridge/Bedrock/ModelCatalog.php new file mode 100644 index 000000000..754ce2f1b --- /dev/null +++ b/src/platform/src/Bridge/Bedrock/ModelCatalog.php @@ -0,0 +1,295 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\Bedrock; + +use Symfony\AI\Platform\Bridge\Anthropic\Claude; +use Symfony\AI\Platform\Bridge\Bedrock\Nova\Nova; +use Symfony\AI\Platform\Bridge\Meta\Llama; +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\ModelCatalog\AbstractModelCatalog; + +/** + * @author Oskar Stark + */ +final class ModelCatalog extends AbstractModelCatalog +{ + /** + * @param array}> $additionalModels + */ + public function __construct(array $additionalModels = []) + { + $defaultModels = [ + 'nova-micro' => [ + 'class' => Nova::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'nova-lite' => [ + 'class' => Nova::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STRUCTURED, + Capability::TOOL_CALLING, + ], + ], + 'nova-pro' => [ + 'class' => Nova::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STRUCTURED, + Capability::TOOL_CALLING, + ], + ], + 'nova-premier' => [ + 'class' => Nova::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STRUCTURED, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + ], + ], + 'claude-3-7-sonnet-20250219' => [ + 'class' => Claude::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'claude-3-haiku-20240307' => [ + 'class' => Claude::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'claude-3-5-haiku-latest' => [ + 'class' => Claude::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'claude-3-sonnet-20240229' => [ + 'class' => Claude::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'claude-3-5-sonnet-latest' => [ + 'class' => Claude::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'claude-3-7-sonnet-latest' => [ + 'class' => Claude::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'claude-sonnet-4-20250514' => [ + 'class' => Claude::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'claude-sonnet-4-0' => [ + 'class' => Claude::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'claude-3-opus-20240229' => [ + 'class' => Claude::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'claude-opus-4-20250514' => [ + 'class' => Claude::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'claude-opus-4-0' => [ + 'class' => Claude::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'claude-opus-4-1' => [ + 'class' => Claude::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'llama-3.3-70B-Instruct' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3.2-90b-vision-instruct' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3.2-11b-vision-instruct' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3.2-3b' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3.2-3b-instruct' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3.2-1b' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3.2-1b-instruct' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3.1-405b-instruct' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3.1-70b' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3-70b-instruct' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3.1-8b' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3.1-8b-instruct' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3-70b' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3-8b-instruct' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3-8b' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + ]; + + $this->models = array_merge($defaultModels, $additionalModels); + } +} diff --git a/src/platform/src/Bridge/Bedrock/Nova/Nova.php b/src/platform/src/Bridge/Bedrock/Nova/Nova.php index 697bcf83c..e1c7d8b49 100644 --- a/src/platform/src/Bridge/Bedrock/Nova/Nova.php +++ b/src/platform/src/Bridge/Bedrock/Nova/Nova.php @@ -11,7 +11,6 @@ namespace Symfony\AI\Platform\Bridge\Bedrock\Nova; -use Symfony\AI\Platform\Capability; use Symfony\AI\Platform\Model; /** @@ -19,26 +18,4 @@ */ final class Nova extends Model { - public const MICRO = 'nova-micro'; - public const LITE = 'nova-lite'; - public const PRO = 'nova-pro'; - public const PREMIER = 'nova-premier'; - - /** - * @param array $options The default options for the model usage - */ - public function __construct(string $name, array $options = []) - { - $capabilities = [ - Capability::INPUT_MESSAGES, - Capability::OUTPUT_TEXT, - Capability::TOOL_CALLING, - ]; - - if (self::MICRO !== $name) { - $capabilities[] = Capability::INPUT_IMAGE; - } - - parent::__construct($name, $capabilities, $options); - } } diff --git a/src/platform/src/Bridge/Bedrock/PlatformFactory.php b/src/platform/src/Bridge/Bedrock/PlatformFactory.php index 3df891410..3427aaa02 100644 --- a/src/platform/src/Bridge/Bedrock/PlatformFactory.php +++ b/src/platform/src/Bridge/Bedrock/PlatformFactory.php @@ -23,6 +23,7 @@ use Symfony\AI\Platform\Bridge\Meta\Contract as LlamaContract; use Symfony\AI\Platform\Contract; use Symfony\AI\Platform\Exception\RuntimeException; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; use Symfony\AI\Platform\Platform; /** @@ -32,6 +33,7 @@ { public static function create( BedrockRuntimeClient $bedrockRuntimeClient = new BedrockRuntimeClient(), + ModelCatalogInterface $modelCatalog = new ModelCatalog(), ?Contract $contract = null, ): Platform { if (!class_exists(BedrockRuntimeClient::class)) { @@ -49,6 +51,7 @@ public static function create( new LlamaResultConverter(), new NovaResultConverter(), ], + $modelCatalog, $contract ?? Contract::create( new AnthropicContract\AssistantMessageNormalizer(), new AnthropicContract\DocumentNormalizer(), diff --git a/src/platform/src/Bridge/Cerebras/Model.php b/src/platform/src/Bridge/Cerebras/Model.php index adb63a545..0a9a80391 100644 --- a/src/platform/src/Bridge/Cerebras/Model.php +++ b/src/platform/src/Bridge/Cerebras/Model.php @@ -11,7 +11,6 @@ namespace Symfony\AI\Platform\Bridge\Cerebras; -use Symfony\AI\Platform\Capability; use Symfony\AI\Platform\Model as BaseModel; /** @@ -19,30 +18,4 @@ */ final class Model extends BaseModel { - public const LLAMA_4_SCOUT_17B_16E_INSTRUCT = 'llama-4-scout-17b-16e-instruct'; - public const LLAMA3_1_8B = 'llama3.1-8b'; - public const LLAMA_3_3_70B = 'llama-3.3-70b'; - public const LLAMA_4_MAVERICK_17B_128E_INSTRUCT = 'llama-4-maverick-17b-128e-instruct'; - public const QWEN_3_32B = 'qwen-3-32b'; - public const QWEN_3_235B_A22B_INSTRUCT_2507 = 'qwen-3-235b-a22b-instruct-2507'; - public const QWEN_3_235B_A22B_THINKING_2507 = 'qwen-3-235b-a22b-thinking-2507'; - public const QWEN_3_CODER_480B = 'qwen-3-coder-480b'; - public const GPT_OSS_120B = 'gpt-oss-120b'; - - public const CAPABILITIES = [ - Capability::INPUT_MESSAGES, - Capability::OUTPUT_TEXT, - Capability::OUTPUT_STREAMING, - ]; - - /** - * @see https://inference-docs.cerebras.ai/api-reference/chat-completions for details like options - */ - public function __construct( - string $name, - array $capabilities = self::CAPABILITIES, - array $options = [], - ) { - parent::__construct($name, $capabilities, $options); - } } diff --git a/src/platform/src/Bridge/Cerebras/ModelCatalog.php b/src/platform/src/Bridge/Cerebras/ModelCatalog.php new file mode 100644 index 000000000..3d6028265 --- /dev/null +++ b/src/platform/src/Bridge/Cerebras/ModelCatalog.php @@ -0,0 +1,106 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\Cerebras; + +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\ModelCatalog\AbstractModelCatalog; + +/** + * @author Oskar Stark + * + * @see https://inference-docs.cerebras.ai/api-reference/chat-completions for details like options + */ +final class ModelCatalog extends AbstractModelCatalog +{ + /** + * @param array}> $additionalModels + */ + public function __construct(array $additionalModels = []) + { + $defaultModels = [ + 'llama-4-scout-17b-16e-instruct' => [ + 'class' => Model::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + ], + ], + 'llama3.1-8b' => [ + 'class' => Model::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + ], + ], + 'llama-3.3-70b' => [ + 'class' => Model::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + ], + ], + 'llama-4-maverick-17b-128e-instruct' => [ + 'class' => Model::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + ], + ], + 'qwen-3-32b' => [ + 'class' => Model::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + ], + ], + 'qwen-3-235b-a22b-instruct-2507' => [ + 'class' => Model::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + ], + ], + 'qwen-3-235b-a22b-thinking-2507' => [ + 'class' => Model::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + ], + ], + 'qwen-3-coder-480b' => [ + 'class' => Model::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + ], + ], + 'gpt-oss-120b' => [ + 'class' => Model::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + ], + ], + ]; + + $this->models = array_merge($defaultModels, $additionalModels); + } +} diff --git a/src/platform/src/Bridge/Cerebras/PlatformFactory.php b/src/platform/src/Bridge/Cerebras/PlatformFactory.php index 27d277c30..130a78df0 100644 --- a/src/platform/src/Bridge/Cerebras/PlatformFactory.php +++ b/src/platform/src/Bridge/Cerebras/PlatformFactory.php @@ -11,6 +11,7 @@ namespace Symfony\AI\Platform\Bridge\Cerebras; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; use Symfony\AI\Platform\Platform; use Symfony\Component\HttpClient\EventSourceHttpClient; use Symfony\Contracts\HttpClient\HttpClientInterface; @@ -23,12 +24,14 @@ public static function create( #[\SensitiveParameter] string $apiKey, ?HttpClientInterface $httpClient = null, + ModelCatalogInterface $modelCatalog = new ModelCatalog(), ): Platform { $httpClient = $httpClient instanceof EventSourceHttpClient ? $httpClient : new EventSourceHttpClient($httpClient); return new Platform( [new ModelClient($httpClient, $apiKey)], [new ResultConverter()], + $modelCatalog, ); } } diff --git a/src/platform/src/Bridge/DockerModelRunner/Completions.php b/src/platform/src/Bridge/DockerModelRunner/Completions.php index 03423b8cd..3bfc455ee 100644 --- a/src/platform/src/Bridge/DockerModelRunner/Completions.php +++ b/src/platform/src/Bridge/DockerModelRunner/Completions.php @@ -11,7 +11,6 @@ namespace Symfony\AI\Platform\Bridge\DockerModelRunner; -use Symfony\AI\Platform\Capability; use Symfony\AI\Platform\Model; /** @@ -19,25 +18,4 @@ */ class Completions extends Model { - public const GEMMA_3_N = 'ai/gemma3n'; - public const GEMMA_3 = 'ai/gemma3'; - public const QWEN_2_5 = 'ai/qwen2.5'; - public const QWEN_3 = 'ai/qwen3'; - public const QWEN_3_CODER = 'ai/qwen3-coder'; - public const LLAMA_3_1 = 'ai/llama3.1'; - public const LLAMA_3_2 = 'ai/llama3.2'; - public const LLAMA_3_3 = 'ai/llama3.3'; - public const MISTRAL = 'ai/mistral'; - public const MISTRAL_NEMO = 'ai/mistral-nemo'; - public const PHI_4 = 'ai/phi4'; - public const DEEPSEEK_R_1 = 'ai/deepseek-r1-distill-llama'; - public const SEED_OSS = 'ai/seed-oss'; - public const GPT_OSS = 'ai/gpt-oss'; - public const SMOLLM_2 = 'ai/smollm2'; - public const SMOLLM_3 = 'ai/smollm3'; - - public function __construct(string $name, array $options = []) - { - parent::__construct($name, Capability::cases(), $options); - } } diff --git a/src/platform/src/Bridge/DockerModelRunner/Embeddings.php b/src/platform/src/Bridge/DockerModelRunner/Embeddings.php index cfee26a6d..92043e795 100644 --- a/src/platform/src/Bridge/DockerModelRunner/Embeddings.php +++ b/src/platform/src/Bridge/DockerModelRunner/Embeddings.php @@ -11,7 +11,6 @@ namespace Symfony\AI\Platform\Bridge\DockerModelRunner; -use Symfony\AI\Platform\Capability; use Symfony\AI\Platform\Model; /** @@ -19,13 +18,4 @@ */ class Embeddings extends Model { - public const NOMIC_EMBED_TEXT = 'ai/nomic-embed-text-v1.5'; - public const MXBAI_EMBED_LARGE = 'ai/mxbai-embed-large'; - public const EMBEDDING_GEMMA = 'ai/embeddinggemma'; - public const GRANITE_EMBEDDING_MULTI = 'ai/granite-embedding-multilingual'; - - public function __construct(string $name, array $options = []) - { - parent::__construct($name, Capability::cases(), $options); - } } diff --git a/src/platform/src/Bridge/DockerModelRunner/ModelCatalog.php b/src/platform/src/Bridge/DockerModelRunner/ModelCatalog.php new file mode 100644 index 000000000..3d2ae559d --- /dev/null +++ b/src/platform/src/Bridge/DockerModelRunner/ModelCatalog.php @@ -0,0 +1,170 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\DockerModelRunner; + +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\ModelCatalog\AbstractModelCatalog; + +/** + * @author Oskar Stark + */ +final class ModelCatalog extends AbstractModelCatalog +{ + /** + * @param array}> $additionalModels + */ + public function __construct(array $additionalModels = []) + { + $defaultModels = [ + // Completions models + 'ai/gemma3n' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'ai/gemma3' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'ai/qwen2.5' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'ai/qwen3' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'ai/qwen3-coder' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'ai/llama3.1' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'ai/llama3.2' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'ai/llama3.3' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'ai/mistral' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'ai/mistral-nemo' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'ai/phi4' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'ai/deepseek-r1-distill-llama' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'ai/seed-oss' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'ai/gpt-oss' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'ai/smollm2' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'ai/smollm3' => [ + 'class' => Completions::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + // Embeddings models + 'ai/nomic-embed-text-v1.5' => [ + 'class' => Embeddings::class, + 'capabilities' => [ + Capability::INPUT_TEXT, + ], + ], + 'ai/mxbai-embed-large' => [ + 'class' => Embeddings::class, + 'capabilities' => [ + Capability::INPUT_TEXT, + ], + ], + 'ai/embeddinggemma' => [ + 'class' => Embeddings::class, + 'capabilities' => [ + Capability::INPUT_TEXT, + ], + ], + 'ai/granite-embedding-multilingual' => [ + 'class' => Embeddings::class, + 'capabilities' => [ + Capability::INPUT_TEXT, + ], + ], + ]; + + $this->models = array_merge($defaultModels, $additionalModels); + } +} diff --git a/src/platform/src/Bridge/DockerModelRunner/PlatformFactory.php b/src/platform/src/Bridge/DockerModelRunner/PlatformFactory.php index b234d8cf4..f95fc12b3 100644 --- a/src/platform/src/Bridge/DockerModelRunner/PlatformFactory.php +++ b/src/platform/src/Bridge/DockerModelRunner/PlatformFactory.php @@ -12,6 +12,7 @@ namespace Symfony\AI\Platform\Bridge\DockerModelRunner; use Symfony\AI\Platform\Contract; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; use Symfony\AI\Platform\Platform; use Symfony\Component\HttpClient\EventSourceHttpClient; use Symfony\Contracts\HttpClient\HttpClientInterface; @@ -24,6 +25,7 @@ class PlatformFactory public static function create( string $hostUrl = 'http://localhost:12434', ?HttpClientInterface $httpClient = null, + ModelCatalogInterface $modelCatalog = new ModelCatalog(), ?Contract $contract = null, ): Platform { $httpClient = $httpClient instanceof EventSourceHttpClient ? $httpClient : new EventSourceHttpClient($httpClient); @@ -36,6 +38,9 @@ public static function create( [ new Embeddings\ResultConverter(), new Completions\ResultConverter(), - ], $contract); + ], + $modelCatalog, + $contract + ); } } diff --git a/src/platform/src/Bridge/ElevenLabs/ElevenLabs.php b/src/platform/src/Bridge/ElevenLabs/ElevenLabs.php index 8948ec6c9..3cd93ed61 100644 --- a/src/platform/src/Bridge/ElevenLabs/ElevenLabs.php +++ b/src/platform/src/Bridge/ElevenLabs/ElevenLabs.php @@ -11,7 +11,6 @@ namespace Symfony\AI\Platform\Bridge\ElevenLabs; -use Symfony\AI\Platform\Capability; use Symfony\AI\Platform\Model; /** @@ -19,38 +18,4 @@ */ final class ElevenLabs extends Model { - // text-to-speech models - public const ELEVEN_V3 = 'eleven_v3'; - public const ELEVEN_TTV_V3 = 'eleven_ttv_v3'; - public const ELEVEN_MULTILINGUAL_V2 = 'eleven_multilingual_v2'; - public const ELEVEN_FLASH_V250 = 'eleven_flash_v2_5'; - public const ELEVEN_FLASH_V2 = 'eleven_flashv2'; - public const ELEVEN_TURBO_V2_5 = 'eleven_turbo_v2_5'; - public const ELEVEN_TURBO_v2 = 'eleven_turbo_v2'; - public const ELEVEN_MULTILINGUAL_STS_V2 = 'eleven_multilingual_sts_v2'; - public const ELEVEN_MULTILINGUAL_ttv_V2 = 'eleven_multilingual_ttv_v2'; - public const ELEVEN_ENGLISH_STS_V2 = 'eleven_english_sts_v2'; - - // speech-to-text models - public const SCRIBE_V1 = 'scribe_v1'; - public const SCRIBE_V1_EXPERIMENTAL = 'scribe_v1_experimental'; - - public function __construct(string $name, array $options = []) - { - $capabilities = [ - Capability::INPUT_TEXT, - Capability::OUTPUT_AUDIO, - Capability::TEXT_TO_SPEECH, - ]; - - if (\in_array($name, [self::SCRIBE_V1, self::SCRIBE_V1_EXPERIMENTAL], true)) { - $capabilities = [ - Capability::INPUT_AUDIO, - Capability::OUTPUT_TEXT, - Capability::SPEECH_TO_TEXT, - ]; - } - - parent::__construct($name, $capabilities, $options); - } } diff --git a/src/platform/src/Bridge/ElevenLabs/ModelCatalog.php b/src/platform/src/Bridge/ElevenLabs/ModelCatalog.php new file mode 100644 index 000000000..5e3dfe0fa --- /dev/null +++ b/src/platform/src/Bridge/ElevenLabs/ModelCatalog.php @@ -0,0 +1,128 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\ElevenLabs; + +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\ModelCatalog\AbstractModelCatalog; + +/** + * @author Oskar Stark + */ +final class ModelCatalog extends AbstractModelCatalog +{ + /** + * @param array}> $additionalModels + */ + public function __construct(array $additionalModels = []) + { + $defaultModels = [ + 'eleven_v3' => [ + 'class' => ElevenLabs::class, + 'capabilities' => [ + Capability::INPUT_TEXT, + Capability::OUTPUT_AUDIO, + Capability::TEXT_TO_SPEECH, + ], + ], + 'eleven_ttv_v3' => [ + 'class' => ElevenLabs::class, + 'capabilities' => [ + Capability::INPUT_TEXT, + Capability::OUTPUT_AUDIO, + Capability::TEXT_TO_SPEECH, + ], + ], + 'eleven_multilingual_v2' => [ + 'class' => ElevenLabs::class, + 'capabilities' => [ + Capability::INPUT_TEXT, + Capability::OUTPUT_AUDIO, + Capability::TEXT_TO_SPEECH, + ], + ], + 'eleven_flash_v2_5' => [ + 'class' => ElevenLabs::class, + 'capabilities' => [ + Capability::INPUT_TEXT, + Capability::OUTPUT_AUDIO, + Capability::TEXT_TO_SPEECH, + ], + ], + 'eleven_flashv2' => [ + 'class' => ElevenLabs::class, + 'capabilities' => [ + Capability::INPUT_TEXT, + Capability::OUTPUT_AUDIO, + Capability::TEXT_TO_SPEECH, + ], + ], + 'eleven_turbo_v2_5' => [ + 'class' => ElevenLabs::class, + 'capabilities' => [ + Capability::INPUT_TEXT, + Capability::OUTPUT_AUDIO, + Capability::TEXT_TO_SPEECH, + ], + ], + 'eleven_turbo_v2' => [ + 'class' => ElevenLabs::class, + 'capabilities' => [ + Capability::INPUT_TEXT, + Capability::OUTPUT_AUDIO, + Capability::TEXT_TO_SPEECH, + ], + ], + 'eleven_multilingual_sts_v2' => [ + 'class' => ElevenLabs::class, + 'capabilities' => [ + Capability::INPUT_TEXT, + Capability::OUTPUT_AUDIO, + Capability::TEXT_TO_SPEECH, + ], + ], + 'eleven_multilingual_ttv_v2' => [ + 'class' => ElevenLabs::class, + 'capabilities' => [ + Capability::INPUT_TEXT, + Capability::OUTPUT_AUDIO, + Capability::TEXT_TO_SPEECH, + ], + ], + 'eleven_english_sts_v2' => [ + 'class' => ElevenLabs::class, + 'capabilities' => [ + Capability::INPUT_TEXT, + Capability::OUTPUT_AUDIO, + Capability::TEXT_TO_SPEECH, + ], + ], + 'scribe_v1' => [ + 'class' => ElevenLabs::class, + 'capabilities' => [ + Capability::INPUT_AUDIO, + Capability::OUTPUT_TEXT, + Capability::SPEECH_TO_TEXT, + ], + ], + 'scribe_v1_experimental' => [ + 'class' => ElevenLabs::class, + 'capabilities' => [ + Capability::INPUT_AUDIO, + Capability::OUTPUT_TEXT, + Capability::SPEECH_TO_TEXT, + ], + ], + ]; + + $this->models = array_merge($defaultModels, $additionalModels); + } +} diff --git a/src/platform/src/Bridge/ElevenLabs/PlatformFactory.php b/src/platform/src/Bridge/ElevenLabs/PlatformFactory.php index 84792fb7d..7cb29acce 100644 --- a/src/platform/src/Bridge/ElevenLabs/PlatformFactory.php +++ b/src/platform/src/Bridge/ElevenLabs/PlatformFactory.php @@ -13,6 +13,7 @@ use Symfony\AI\Platform\Bridge\ElevenLabs\Contract\ElevenLabsContract; use Symfony\AI\Platform\Contract; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; use Symfony\AI\Platform\Platform; use Symfony\Component\HttpClient\EventSourceHttpClient; use Symfony\Contracts\HttpClient\HttpClientInterface; @@ -26,6 +27,7 @@ public static function create( string $apiKey, string $hostUrl = 'https://api.elevenlabs.io/v1', ?HttpClientInterface $httpClient = null, + ModelCatalogInterface $modelCatalog = new ModelCatalog(), ?Contract $contract = null, ): Platform { $httpClient = $httpClient instanceof EventSourceHttpClient ? $httpClient : new EventSourceHttpClient($httpClient); @@ -33,6 +35,7 @@ public static function create( return new Platform( [new ElevenLabsClient($httpClient, $apiKey, $hostUrl)], [new ElevenLabsResultConverter($httpClient)], + $modelCatalog, $contract ?? ElevenLabsContract::create(), ); } diff --git a/src/platform/src/Bridge/Gemini/Embeddings.php b/src/platform/src/Bridge/Gemini/Embeddings.php index 3d3e42a00..84ecf9300 100644 --- a/src/platform/src/Bridge/Gemini/Embeddings.php +++ b/src/platform/src/Bridge/Gemini/Embeddings.php @@ -12,7 +12,6 @@ namespace Symfony\AI\Platform\Bridge\Gemini; use Symfony\AI\Platform\Bridge\Gemini\Embeddings\TaskType; -use Symfony\AI\Platform\Capability; use Symfony\AI\Platform\Model; /** @@ -20,15 +19,11 @@ */ class Embeddings extends Model { - public const GEMINI_EMBEDDING_EXP_03_07 = 'gemini-embedding-exp-03-07'; - public const TEXT_EMBEDDING_004 = 'text-embedding-004'; - public const EMBEDDING_001 = 'embedding-001'; - /** * @param array{dimensions?: int, task_type?: TaskType|string} $options */ - public function __construct(string $name, array $options = []) + public function __construct(string $name, array $capabilities = [], array $options = []) { - parent::__construct($name, [Capability::INPUT_MULTIPLE], $options); + parent::__construct($name, $capabilities, $options); } } diff --git a/src/platform/src/Bridge/Gemini/Gemini.php b/src/platform/src/Bridge/Gemini/Gemini.php index dc8cf2b29..7ed11d20d 100644 --- a/src/platform/src/Bridge/Gemini/Gemini.php +++ b/src/platform/src/Bridge/Gemini/Gemini.php @@ -11,7 +11,6 @@ namespace Symfony\AI\Platform\Bridge\Gemini; -use Symfony\AI\Platform\Capability; use Symfony\AI\Platform\Model; /** @@ -19,30 +18,4 @@ */ class Gemini extends Model { - public const GEMINI_25_FLASH = 'gemini-2.5-flash'; - public const GEMINI_25_PRO = 'gemini-2.5-pro'; - public const GEMINI_25_FLASH_LITE = 'gemini-2.5-flash-lite'; - public const GEMINI_2_FLASH = 'gemini-2.0-flash'; - public const GEMINI_2_PRO = 'gemini-2.0-pro-exp-02-05'; - public const GEMINI_2_FLASH_LITE = 'gemini-2.0-flash-lite-preview-02-05'; - public const GEMINI_2_FLASH_THINKING = 'gemini-2.0-flash-thinking-exp-01-21'; - public const GEMINI_1_5_FLASH = 'gemini-1.5-flash'; - - /** - * @param array $options The default options for the model usage - */ - public function __construct(string $name, array $options = []) - { - $capabilities = [ - Capability::INPUT_MESSAGES, - Capability::INPUT_IMAGE, - Capability::INPUT_AUDIO, - Capability::INPUT_PDF, - Capability::OUTPUT_STREAMING, - Capability::OUTPUT_STRUCTURED, - Capability::TOOL_CALLING, - ]; - - parent::__construct($name, $capabilities, $options); - } } diff --git a/src/platform/src/Bridge/Gemini/ModelCatalog.php b/src/platform/src/Bridge/Gemini/ModelCatalog.php new file mode 100644 index 000000000..b78fa3320 --- /dev/null +++ b/src/platform/src/Bridge/Gemini/ModelCatalog.php @@ -0,0 +1,140 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\Gemini; + +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\ModelCatalog\AbstractModelCatalog; + +/** + * @author Oskar Stark + */ +final class ModelCatalog extends AbstractModelCatalog +{ + /** + * @param array}> $additionalModels + */ + public function __construct(array $additionalModels = []) + { + $defaultModels = [ + 'gemini-2.5-flash' => [ + 'class' => Gemini::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::INPUT_AUDIO, + Capability::INPUT_PDF, + Capability::OUTPUT_STREAMING, + Capability::OUTPUT_STRUCTURED, + Capability::TOOL_CALLING, + ], + ], + 'gemini-2.5-pro' => [ + 'class' => Gemini::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::INPUT_AUDIO, + Capability::INPUT_PDF, + Capability::OUTPUT_STREAMING, + Capability::OUTPUT_STRUCTURED, + Capability::TOOL_CALLING, + ], + ], + 'gemini-2.5-flash-lite' => [ + 'class' => Gemini::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::INPUT_AUDIO, + Capability::INPUT_PDF, + Capability::OUTPUT_STREAMING, + Capability::OUTPUT_STRUCTURED, + Capability::TOOL_CALLING, + ], + ], + 'gemini-2.0-flash' => [ + 'class' => Gemini::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::INPUT_AUDIO, + Capability::INPUT_PDF, + Capability::OUTPUT_STREAMING, + Capability::OUTPUT_STRUCTURED, + Capability::TOOL_CALLING, + ], + ], + 'gemini-2.0-pro-exp-02-05' => [ + 'class' => Gemini::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::INPUT_AUDIO, + Capability::INPUT_PDF, + Capability::OUTPUT_STREAMING, + Capability::OUTPUT_STRUCTURED, + Capability::TOOL_CALLING, + ], + ], + 'gemini-2.0-flash-lite-preview-02-05' => [ + 'class' => Gemini::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::INPUT_AUDIO, + Capability::INPUT_PDF, + Capability::OUTPUT_STREAMING, + Capability::OUTPUT_STRUCTURED, + Capability::TOOL_CALLING, + ], + ], + 'gemini-2.0-flash-thinking-exp-01-21' => [ + 'class' => Gemini::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::INPUT_AUDIO, + Capability::INPUT_PDF, + Capability::OUTPUT_STREAMING, + Capability::OUTPUT_STRUCTURED, + Capability::TOOL_CALLING, + ], + ], + 'gemini-1.5-flash' => [ + 'class' => Gemini::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::INPUT_AUDIO, + Capability::INPUT_PDF, + Capability::OUTPUT_STREAMING, + Capability::OUTPUT_STRUCTURED, + Capability::TOOL_CALLING, + ], + ], + 'gemini-embedding-exp-03-07' => [ + 'class' => Embeddings::class, + 'capabilities' => [Capability::INPUT_MULTIPLE], + ], + 'text-embedding-004' => [ + 'class' => Embeddings::class, + 'capabilities' => [Capability::INPUT_MULTIPLE], + ], + 'embedding-001' => [ + 'class' => Embeddings::class, + 'capabilities' => [Capability::INPUT_MULTIPLE], + ], + ]; + + $this->models = array_merge($defaultModels, $additionalModels); + } +} diff --git a/src/platform/src/Bridge/Gemini/PlatformFactory.php b/src/platform/src/Bridge/Gemini/PlatformFactory.php index ca0a68952..947928156 100644 --- a/src/platform/src/Bridge/Gemini/PlatformFactory.php +++ b/src/platform/src/Bridge/Gemini/PlatformFactory.php @@ -17,6 +17,7 @@ use Symfony\AI\Platform\Bridge\Gemini\Gemini\ModelClient as GeminiModelClient; use Symfony\AI\Platform\Bridge\Gemini\Gemini\ResultConverter as GeminiResultConverter; use Symfony\AI\Platform\Contract; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; use Symfony\AI\Platform\Platform; use Symfony\Component\HttpClient\EventSourceHttpClient; use Symfony\Contracts\HttpClient\HttpClientInterface; @@ -29,6 +30,7 @@ public static function create( #[\SensitiveParameter] string $apiKey, ?HttpClientInterface $httpClient = null, + ModelCatalogInterface $modelCatalog = new ModelCatalog(), ?Contract $contract = null, ): Platform { $httpClient = $httpClient instanceof EventSourceHttpClient ? $httpClient : new EventSourceHttpClient($httpClient); @@ -36,6 +38,7 @@ public static function create( return new Platform( [new EmbeddingsModelClient($httpClient, $apiKey), new GeminiModelClient($httpClient, $apiKey)], [new EmbeddingsResultConverter(), new GeminiResultConverter()], + $modelCatalog, $contract ?? GeminiContract::create(), ); } diff --git a/src/platform/src/Bridge/HuggingFace/ModelCatalog.php b/src/platform/src/Bridge/HuggingFace/ModelCatalog.php new file mode 100644 index 000000000..3a53ae244 --- /dev/null +++ b/src/platform/src/Bridge/HuggingFace/ModelCatalog.php @@ -0,0 +1,23 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\HuggingFace; + +use Symfony\AI\Platform\ModelCatalog\DynamicModelCatalog; + +/** + * @author Oskar Stark + */ +final class ModelCatalog extends DynamicModelCatalog +{ + // HuggingFace supports a wide range of models dynamically + // Models are identified by repository/model format (e.g., "microsoft/DialoGPT-medium") +} diff --git a/src/platform/src/Bridge/HuggingFace/PlatformFactory.php b/src/platform/src/Bridge/HuggingFace/PlatformFactory.php index 674f040b8..5bfafab48 100644 --- a/src/platform/src/Bridge/HuggingFace/PlatformFactory.php +++ b/src/platform/src/Bridge/HuggingFace/PlatformFactory.php @@ -34,6 +34,7 @@ public static function create( return new Platform( [new ModelClient($httpClient, $provider, $apiKey)], [new ResultConverter()], + new ModelCatalog(), $contract ?? Contract::create( new FileNormalizer(), new MessageBagNormalizer(), diff --git a/src/platform/src/Bridge/LmStudio/Completions.php b/src/platform/src/Bridge/LmStudio/Completions.php index 5d57c4da8..cda9fdbbd 100644 --- a/src/platform/src/Bridge/LmStudio/Completions.php +++ b/src/platform/src/Bridge/LmStudio/Completions.php @@ -11,7 +11,6 @@ namespace Symfony\AI\Platform\Bridge\LmStudio; -use Symfony\AI\Platform\Capability; use Symfony\AI\Platform\Model; /** @@ -19,17 +18,4 @@ */ class Completions extends Model { - public const DEFAULT_CAPABILITIES = [ - Capability::INPUT_MESSAGES, - Capability::OUTPUT_TEXT, - Capability::OUTPUT_STREAMING, - ]; - - public function __construct( - string $name, - array $options = [], - array $capabilities = self::DEFAULT_CAPABILITIES, - ) { - parent::__construct($name, $capabilities, $options); - } } diff --git a/src/platform/src/Bridge/LmStudio/ModelCatalog.php b/src/platform/src/Bridge/LmStudio/ModelCatalog.php new file mode 100644 index 000000000..9713489aa --- /dev/null +++ b/src/platform/src/Bridge/LmStudio/ModelCatalog.php @@ -0,0 +1,23 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\LmStudio; + +use Symfony\AI\Platform\ModelCatalog\DynamicModelCatalog; + +/** + * @author Oskar Stark + */ +final class ModelCatalog extends DynamicModelCatalog +{ + // LmStudio can use any model that is loaded locally + // Models are dynamically available based on what's loaded in LmStudio +} diff --git a/src/platform/src/Bridge/LmStudio/PlatformFactory.php b/src/platform/src/Bridge/LmStudio/PlatformFactory.php index f3d5c6ee9..afebb7990 100644 --- a/src/platform/src/Bridge/LmStudio/PlatformFactory.php +++ b/src/platform/src/Bridge/LmStudio/PlatformFactory.php @@ -13,6 +13,7 @@ use Symfony\AI\Platform\Bridge\LmStudio\Embeddings\ModelClient; use Symfony\AI\Platform\Contract; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; use Symfony\AI\Platform\Platform; use Symfony\Component\HttpClient\EventSourceHttpClient; use Symfony\Contracts\HttpClient\HttpClientInterface; @@ -25,6 +26,7 @@ class PlatformFactory public static function create( string $hostUrl = 'http://localhost:1234', ?HttpClientInterface $httpClient = null, + ModelCatalogInterface $modelCatalog = new ModelCatalog(), ?Contract $contract = null, ): Platform { $httpClient = $httpClient instanceof EventSourceHttpClient ? $httpClient : new EventSourceHttpClient($httpClient); @@ -37,6 +39,9 @@ public static function create( [ new Embeddings\ResultConverter(), new Completions\ResultConverter(), - ], $contract); + ], + $modelCatalog, + $contract + ); } } diff --git a/src/platform/src/Bridge/Meta/Llama.php b/src/platform/src/Bridge/Meta/Llama.php index 2b5485c62..f579d7eb4 100644 --- a/src/platform/src/Bridge/Meta/Llama.php +++ b/src/platform/src/Bridge/Meta/Llama.php @@ -11,7 +11,6 @@ namespace Symfony\AI\Platform\Bridge\Meta; -use Symfony\AI\Platform\Capability; use Symfony\AI\Platform\Model; /** @@ -19,32 +18,4 @@ */ class Llama extends Model { - public const V3_3_70B_INSTRUCT = 'llama-3.3-70B-Instruct'; - public const V3_2_90B_VISION_INSTRUCT = 'llama-3.2-90b-vision-instruct'; - public const V3_2_11B_VISION_INSTRUCT = 'llama-3.2-11b-vision-instruct'; - public const V3_2_3B = 'llama-3.2-3b'; - public const V3_2_3B_INSTRUCT = 'llama-3.2-3b-instruct'; - public const V3_2_1B = 'llama-3.2-1b'; - public const V3_2_1B_INSTRUCT = 'llama-3.2-1b-instruct'; - public const V3_1_405B_INSTRUCT = 'llama-3.1-405b-instruct'; - public const V3_1_70B = 'llama-3.1-70b'; - public const V3_1_70B_INSTRUCT = 'llama-3-70b-instruct'; - public const V3_1_8B = 'llama-3.1-8b'; - public const V3_1_8B_INSTRUCT = 'llama-3.1-8b-instruct'; - public const V3_70B = 'llama-3-70b'; - public const V3_8B_INSTRUCT = 'llama-3-8b-instruct'; - public const V3_8B = 'llama-3-8b'; - - /** - * @param array $options - */ - public function __construct(string $name, array $options = []) - { - $capabilities = [ - Capability::INPUT_MESSAGES, - Capability::OUTPUT_TEXT, - ]; - - parent::__construct($name, $capabilities, $options); - } } diff --git a/src/platform/src/Bridge/Mistral/Embeddings.php b/src/platform/src/Bridge/Mistral/Embeddings.php index 90fe2ad49..4f06f090e 100644 --- a/src/platform/src/Bridge/Mistral/Embeddings.php +++ b/src/platform/src/Bridge/Mistral/Embeddings.php @@ -11,7 +11,6 @@ namespace Symfony\AI\Platform\Bridge\Mistral; -use Symfony\AI\Platform\Capability; use Symfony\AI\Platform\Model; /** @@ -19,13 +18,4 @@ */ final class Embeddings extends Model { - public const MISTRAL_EMBED = 'mistral-embed'; - - /** - * @param array $options - */ - public function __construct(string $name, array $options = []) - { - parent::__construct($name, [Capability::INPUT_MULTIPLE], $options); - } } diff --git a/src/platform/src/Bridge/Mistral/Mistral.php b/src/platform/src/Bridge/Mistral/Mistral.php index 7af0f5720..8bf81e25a 100644 --- a/src/platform/src/Bridge/Mistral/Mistral.php +++ b/src/platform/src/Bridge/Mistral/Mistral.php @@ -11,7 +11,6 @@ namespace Symfony\AI\Platform\Bridge\Mistral; -use Symfony\AI\Platform\Capability; use Symfony\AI\Platform\Model; /** @@ -19,55 +18,4 @@ */ final class Mistral extends Model { - public const CODESTRAL = 'codestral-latest'; - public const MISTRAL_LARGE = 'mistral-large-latest'; - public const MISTRAL_MEDIUM = 'mistral-medium-latest'; - public const MISTRAL_SMALL = 'mistral-small-latest'; - public const MISTRAL_NEMO = 'open-mistral-nemo'; - public const MISTRAL_SABA = 'mistral-saba-latest'; - public const MINISTRAL_3B = 'ministral-3b-latest'; - public const MINISTRAL_8B = 'ministral-8b-latest'; - public const PIXSTRAL_LARGE = 'pixstral-large-latest'; - public const PIXSTRAL = 'pixstral-12b-latest'; - public const VOXTRAL_SMALL = 'voxtral-small-latest'; - public const VOXTRAL_MINI = 'voxtral-mini-latest'; - - /** - * @param array $options - */ - public function __construct(string $name, array $options = []) - { - $capabilities = [ - Capability::INPUT_MESSAGES, - Capability::OUTPUT_TEXT, - Capability::OUTPUT_STREAMING, - Capability::OUTPUT_STRUCTURED, - ]; - - if (\in_array($name, [self::PIXSTRAL, self::PIXSTRAL_LARGE, self::MISTRAL_MEDIUM, self::MISTRAL_SMALL], true)) { - $capabilities[] = Capability::INPUT_IMAGE; - } - - if (\in_array($name, [self::VOXTRAL_SMALL, self::VOXTRAL_MINI], true)) { - $capabilities[] = Capability::INPUT_AUDIO; - } - - if (\in_array($name, [ - self::CODESTRAL, - self::MISTRAL_LARGE, - self::MISTRAL_MEDIUM, - self::MISTRAL_SMALL, - self::MISTRAL_NEMO, - self::MINISTRAL_3B, - self::MINISTRAL_8B, - self::PIXSTRAL, - self::PIXSTRAL_LARGE, - self::VOXTRAL_MINI, - self::VOXTRAL_SMALL, - ], true)) { - $capabilities[] = Capability::TOOL_CALLING; - } - - parent::__construct($name, $capabilities, $options); - } } diff --git a/src/platform/src/Bridge/Mistral/ModelCatalog.php b/src/platform/src/Bridge/Mistral/ModelCatalog.php new file mode 100644 index 000000000..cb92973e0 --- /dev/null +++ b/src/platform/src/Bridge/Mistral/ModelCatalog.php @@ -0,0 +1,161 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\Mistral; + +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\ModelCatalog\AbstractModelCatalog; + +/** + * @author Oskar Stark + */ +final class ModelCatalog extends AbstractModelCatalog +{ + /** + * @param array}> $additionalModels + */ + public function __construct(array $additionalModels = []) + { + $defaultModels = [ + 'codestral-latest' => [ + 'class' => Mistral::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::OUTPUT_STRUCTURED, + Capability::TOOL_CALLING, + ], + ], + 'mistral-large-latest' => [ + 'class' => Mistral::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::OUTPUT_STRUCTURED, + Capability::TOOL_CALLING, + ], + ], + 'mistral-medium-latest' => [ + 'class' => Mistral::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::OUTPUT_STRUCTURED, + Capability::INPUT_IMAGE, + Capability::TOOL_CALLING, + ], + ], + 'mistral-small-latest' => [ + 'class' => Mistral::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::OUTPUT_STRUCTURED, + Capability::INPUT_IMAGE, + Capability::TOOL_CALLING, + ], + ], + 'open-mistral-nemo' => [ + 'class' => Mistral::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::OUTPUT_STRUCTURED, + Capability::TOOL_CALLING, + ], + ], + 'mistral-saba-latest' => [ + 'class' => Mistral::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'ministral-3b-latest' => [ + 'class' => Mistral::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::OUTPUT_STRUCTURED, + Capability::TOOL_CALLING, + ], + ], + 'ministral-8b-latest' => [ + 'class' => Mistral::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::OUTPUT_STRUCTURED, + Capability::TOOL_CALLING, + ], + ], + 'pixstral-large-latest' => [ + 'class' => Mistral::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::OUTPUT_STRUCTURED, + Capability::INPUT_IMAGE, + Capability::TOOL_CALLING, + ], + ], + 'pixstral-12b-latest' => [ + 'class' => Mistral::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::OUTPUT_STRUCTURED, + Capability::INPUT_IMAGE, + Capability::TOOL_CALLING, + ], + ], + 'voxtral-small-latest' => [ + 'class' => Mistral::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::OUTPUT_STRUCTURED, + Capability::INPUT_AUDIO, + Capability::TOOL_CALLING, + ], + ], + 'voxtral-mini-latest' => [ + 'class' => Mistral::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::OUTPUT_STRUCTURED, + Capability::INPUT_AUDIO, + Capability::TOOL_CALLING, + ], + ], + 'mistral-embed' => [ + 'class' => Embeddings::class, + 'capabilities' => [Capability::INPUT_MULTIPLE], + ], + ]; + + $this->models = array_merge($defaultModels, $additionalModels); + } +} diff --git a/src/platform/src/Bridge/Mistral/PlatformFactory.php b/src/platform/src/Bridge/Mistral/PlatformFactory.php index 048f4100a..325247a29 100644 --- a/src/platform/src/Bridge/Mistral/PlatformFactory.php +++ b/src/platform/src/Bridge/Mistral/PlatformFactory.php @@ -15,6 +15,7 @@ use Symfony\AI\Platform\Bridge\Mistral\Contract\DocumentUrlNormalizer; use Symfony\AI\Platform\Bridge\Mistral\Contract\ToolNormalizer; use Symfony\AI\Platform\Contract; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; use Symfony\AI\Platform\Platform; use Symfony\Component\HttpClient\EventSourceHttpClient; use Symfony\Contracts\HttpClient\HttpClientInterface; @@ -27,6 +28,7 @@ final class PlatformFactory public static function create( #[\SensitiveParameter] string $apiKey, ?HttpClientInterface $httpClient = null, + ModelCatalogInterface $modelCatalog = new ModelCatalog(), ?Contract $contract = null, ): Platform { $httpClient = $httpClient instanceof EventSourceHttpClient ? $httpClient : new EventSourceHttpClient($httpClient); @@ -34,6 +36,7 @@ public static function create( return new Platform( [new Embeddings\ModelClient($httpClient, $apiKey), new Llm\ModelClient($httpClient, $apiKey)], [new Embeddings\ResultConverter(), new Llm\ResultConverter()], + $modelCatalog, $contract ?? Contract::create( new ToolNormalizer(), new DocumentNormalizer(), diff --git a/src/platform/src/Bridge/Ollama/ModelCatalog.php b/src/platform/src/Bridge/Ollama/ModelCatalog.php new file mode 100644 index 000000000..41869a3d2 --- /dev/null +++ b/src/platform/src/Bridge/Ollama/ModelCatalog.php @@ -0,0 +1,213 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\Ollama; + +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\ModelCatalog\AbstractModelCatalog; + +/** + * @author Oskar Stark + */ +final class ModelCatalog extends AbstractModelCatalog +{ + /** + * @param array, capabilities: list}> $additionalModels + */ + public function __construct(array $additionalModels = []) + { + $defaultModels = [ + 'deepseek-r1' => [ + 'class' => Ollama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STRUCTURED, + Capability::TOOL_CALLING, + ], + ], + 'llama3.1' => [ + 'class' => Ollama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STRUCTURED, + Capability::TOOL_CALLING, + ], + ], + 'llama3.2' => [ + 'class' => Ollama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STRUCTURED, + Capability::TOOL_CALLING, + ], + ], + 'llama3' => [ + 'class' => Ollama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STRUCTURED, + Capability::TOOL_CALLING, + ], + ], + 'mistral' => [ + 'class' => Ollama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STRUCTURED, + Capability::TOOL_CALLING, + ], + ], + 'qwen3' => [ + 'class' => Ollama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STRUCTURED, + Capability::TOOL_CALLING, + ], + ], + 'qwen' => [ + 'class' => Ollama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STRUCTURED, + Capability::TOOL_CALLING, + ], + ], + 'qwen2' => [ + 'class' => Ollama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STRUCTURED, + Capability::TOOL_CALLING, + ], + ], + 'qwen2.5' => [ + 'class' => Ollama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STRUCTURED, + Capability::TOOL_CALLING, + ], + ], + 'qwen2.5-coder' => [ + 'class' => Ollama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STRUCTURED, + Capability::TOOL_CALLING, + ], + ], + 'gemma3n' => [ + 'class' => Ollama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'gemma3' => [ + 'class' => Ollama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'qwen2.5vl' => [ + 'class' => Ollama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'llava' => [ + 'class' => Ollama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'phi3' => [ + 'class' => Ollama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'gemma2' => [ + 'class' => Ollama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'gemma' => [ + 'class' => Ollama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'llama2' => [ + 'class' => Ollama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'nomic-embed-text' => [ + 'class' => Ollama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STRUCTURED, + Capability::INPUT_MULTIPLE, + ], + ], + 'bge-m3' => [ + 'class' => Ollama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STRUCTURED, + Capability::INPUT_MULTIPLE, + ], + ], + 'all-minilm' => [ + 'class' => Ollama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STRUCTURED, + Capability::INPUT_MULTIPLE, + ], + ], + ]; + + $this->models = array_merge($defaultModels, $additionalModels); + } +} diff --git a/src/platform/src/Bridge/Ollama/Ollama.php b/src/platform/src/Bridge/Ollama/Ollama.php index 000755618..460bc514a 100644 --- a/src/platform/src/Bridge/Ollama/Ollama.php +++ b/src/platform/src/Bridge/Ollama/Ollama.php @@ -11,7 +11,6 @@ namespace Symfony\AI\Platform\Bridge\Ollama; -use Symfony\AI\Platform\Capability; use Symfony\AI\Platform\Model; /** @@ -19,65 +18,4 @@ */ class Ollama extends Model { - public const DEEPSEEK_R_1 = 'deepseek-r1'; - public const GEMMA_3_N = 'gemma3n'; - public const GEMMA_3 = 'gemma3'; - public const QWEN_3 = 'qwen3'; - public const QWEN_3_32B = 'qwen3:32b'; - public const QWEN_2_5_VL = 'qwen2.5vl'; - public const LLAMA_3_1 = 'llama3.1'; - public const LLAMA_3_2 = 'llama3.2'; - public const MISTRAL = 'mistral'; - public const QWEN_2_5 = 'qwen2.5'; - public const LLAMA_3 = 'llama3'; - public const LLAVA = 'llava'; - public const PHI_3 = 'phi3'; - public const GEMMA_2 = 'gemma2'; - public const QWEN_2_5_CODER = 'qwen2.5-coder'; - public const QWEN_2_5_CODER_32B = 'qwen2.5-coder:32b'; - public const GEMMA = 'gemma'; - public const QWEN = 'qwen'; - public const QWEN_2 = 'qwen2'; - public const LLAMA_2 = 'llama2'; - public const NOMIC_EMBED_TEXT = 'nomic-embed-text'; - public const BGE_M3 = 'bge-m3'; - public const ALL_MINILM = 'all-minilm'; - - private const TOOL_PATTERNS = [ - '/./' => [ - Capability::INPUT_MESSAGES, - Capability::OUTPUT_TEXT, - Capability::OUTPUT_STRUCTURED, - ], - '/^llama\D*3(\D*\d+)/' => [ - Capability::TOOL_CALLING, - ], - '/^qwen\d(\.\d)?(-coder)?(:\d+(\.\d+)?b)?$/' => [ - Capability::TOOL_CALLING, - ], - '/^(deepseek|mistral)/' => [ - Capability::TOOL_CALLING, - ], - '/^(nomic|bge|all-minilm).*/' => [ - Capability::INPUT_MULTIPLE, - ], - ]; - - /** - * @param array $options - */ - public function __construct(string $name, array $options = []) - { - $capabilities = []; - - foreach (self::TOOL_PATTERNS as $pattern => $possibleCapabilities) { - if (1 === preg_match($pattern, $name)) { - foreach ($possibleCapabilities as $capability) { - $capabilities[] = $capability; - } - } - } - - parent::__construct($name, $capabilities, $options); - } } diff --git a/src/platform/src/Bridge/Ollama/PlatformFactory.php b/src/platform/src/Bridge/Ollama/PlatformFactory.php index af9b490ba..dd801f39c 100644 --- a/src/platform/src/Bridge/Ollama/PlatformFactory.php +++ b/src/platform/src/Bridge/Ollama/PlatformFactory.php @@ -13,6 +13,7 @@ use Symfony\AI\Platform\Bridge\Ollama\Contract\OllamaContract; use Symfony\AI\Platform\Contract; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; use Symfony\AI\Platform\Platform; use Symfony\Component\HttpClient\EventSourceHttpClient; use Symfony\Contracts\HttpClient\HttpClientInterface; @@ -25,6 +26,7 @@ final class PlatformFactory public static function create( string $hostUrl = 'http://localhost:11434', ?HttpClientInterface $httpClient = null, + ModelCatalogInterface $modelCatalog = new ModelCatalog(), ?Contract $contract = null, ): Platform { $httpClient = $httpClient instanceof EventSourceHttpClient ? $httpClient : new EventSourceHttpClient($httpClient); @@ -32,7 +34,8 @@ public static function create( return new Platform( [new OllamaClient($httpClient, $hostUrl)], [new OllamaResultConverter()], - $contract ?? OllamaContract::create() + $modelCatalog, + $contract ?? OllamaContract::create(), ); } } diff --git a/src/platform/src/Bridge/OpenAi/DallE.php b/src/platform/src/Bridge/OpenAi/DallE.php index e0cb89b21..50247f39f 100644 --- a/src/platform/src/Bridge/OpenAi/DallE.php +++ b/src/platform/src/Bridge/OpenAi/DallE.php @@ -11,7 +11,6 @@ namespace Symfony\AI\Platform\Bridge\OpenAi; -use Symfony\AI\Platform\Capability; use Symfony\AI\Platform\Model; /** @@ -19,17 +18,4 @@ */ class DallE extends Model { - public const DALL_E_2 = 'dall-e-2'; - public const DALL_E_3 = 'dall-e-3'; - - /** @param array $options The default options for the model usage */ - public function __construct(string $name, array $options = []) - { - $capabilities = [ - Capability::INPUT_TEXT, - Capability::OUTPUT_IMAGE, - ]; - - parent::__construct($name, $capabilities, $options); - } } diff --git a/src/platform/src/Bridge/OpenAi/Embeddings.php b/src/platform/src/Bridge/OpenAi/Embeddings.php index 633b447dd..b7ab0ef6b 100644 --- a/src/platform/src/Bridge/OpenAi/Embeddings.php +++ b/src/platform/src/Bridge/OpenAi/Embeddings.php @@ -18,15 +18,11 @@ */ class Embeddings extends Model { - public const TEXT_ADA_002 = 'text-embedding-ada-002'; - public const TEXT_3_LARGE = 'text-embedding-3-large'; - public const TEXT_3_SMALL = 'text-embedding-3-small'; - /** * @param array $options */ - public function __construct(string $name, array $options = []) + public function __construct(string $name, array $capabilities = [], array $options = []) { - parent::__construct($name, [], $options); + parent::__construct($name, $capabilities, $options); } } diff --git a/src/platform/src/Bridge/OpenAi/Gpt.php b/src/platform/src/Bridge/OpenAi/Gpt.php index 2044db1c5..e9c6e386c 100644 --- a/src/platform/src/Bridge/OpenAi/Gpt.php +++ b/src/platform/src/Bridge/OpenAi/Gpt.php @@ -11,7 +11,6 @@ namespace Symfony\AI\Platform\Bridge\OpenAi; -use Symfony\AI\Platform\Capability; use Symfony\AI\Platform\Model; /** @@ -20,83 +19,11 @@ */ class Gpt extends Model { - public const GPT_35_TURBO = 'gpt-3.5-turbo'; - public const GPT_35_TURBO_INSTRUCT = 'gpt-3.5-turbo-instruct'; - public const GPT_4 = 'gpt-4'; - public const GPT_4_TURBO = 'gpt-4-turbo'; - public const GPT_4O = 'gpt-4o'; - public const GPT_4O_MINI = 'gpt-4o-mini'; - public const GPT_4O_AUDIO = 'gpt-4o-audio-preview'; - public const O1_MINI = 'o1-mini'; - public const O1_PREVIEW = 'o1-preview'; - public const O3_MINI = 'o3-mini'; - public const O3_MINI_HIGH = 'o3-mini-high'; - public const GPT_45_PREVIEW = 'gpt-4.5-preview'; - public const GPT_41 = 'gpt-4.1'; - public const GPT_41_MINI = 'gpt-4.1-mini'; - public const GPT_41_NANO = 'gpt-4.1-nano'; - public const GPT_5 = 'gpt-5'; - public const GPT_5_CHAT = 'gpt-5-chat-latest'; - public const GPT_5_MINI = 'gpt-5-mini'; - public const GPT_5_NANO = 'gpt-5-nano'; - - private const IMAGE_SUPPORTING = [ - self::GPT_4_TURBO, - self::GPT_4O, - self::GPT_4O_MINI, - self::O1_MINI, - self::O1_PREVIEW, - self::O3_MINI, - self::GPT_45_PREVIEW, - self::GPT_41, - self::GPT_41_MINI, - self::GPT_41_NANO, - self::GPT_5, - self::GPT_5_MINI, - self::GPT_5_NANO, - self::GPT_5_CHAT, - ]; - - private const STRUCTURED_OUTPUT_SUPPORTING = [ - self::GPT_4O, - self::GPT_4O_MINI, - self::O3_MINI, - self::GPT_45_PREVIEW, - self::GPT_41, - self::GPT_41_MINI, - self::GPT_41_NANO, - self::GPT_5, - self::GPT_5_MINI, - self::GPT_5_NANO, - ]; - /** * @param array $options The default options for the model usage */ - public function __construct(string $name, array $options = []) + public function __construct(string $name, array $capabilities = [], array $options = []) { - $capabilities = [ - Capability::INPUT_MESSAGES, - Capability::OUTPUT_TEXT, - Capability::OUTPUT_STREAMING, - ]; - - if (self::GPT_5_CHAT !== $name) { - $capabilities[] = Capability::TOOL_CALLING; - } - - if (self::GPT_4O_AUDIO === $name) { - $capabilities[] = Capability::INPUT_AUDIO; - } - - if (\in_array($name, self::IMAGE_SUPPORTING, true)) { - $capabilities[] = Capability::INPUT_IMAGE; - } - - if (\in_array($name, self::STRUCTURED_OUTPUT_SUPPORTING, true)) { - $capabilities[] = Capability::OUTPUT_STRUCTURED; - } - parent::__construct($name, $capabilities, $options); } } diff --git a/src/platform/src/Bridge/OpenAi/ModelCatalog.php b/src/platform/src/Bridge/OpenAi/ModelCatalog.php new file mode 100644 index 000000000..0fae198cf --- /dev/null +++ b/src/platform/src/Bridge/OpenAi/ModelCatalog.php @@ -0,0 +1,262 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\OpenAi; + +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\ModelCatalog\AbstractModelCatalog; + +/** + * @author Oskar Stark + */ +final class ModelCatalog extends AbstractModelCatalog +{ + /** + * @param array}> $additionalModels + */ + public function __construct(array $additionalModels = []) + { + $defaultModels = [ + 'gpt-3.5-turbo' => [ + 'class' => Gpt::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'gpt-3.5-turbo-instruct' => [ + 'class' => Gpt::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'gpt-4' => [ + 'class' => Gpt::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'gpt-4-turbo' => [ + 'class' => Gpt::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + ], + ], + 'gpt-4o' => [ + 'class' => Gpt::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'gpt-4o-mini' => [ + 'class' => Gpt::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'gpt-4o-audio-preview' => [ + 'class' => Gpt::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_AUDIO, + Capability::INPUT_IMAGE, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'o1-mini' => [ + 'class' => Gpt::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + ], + ], + 'o1-preview' => [ + 'class' => Gpt::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + ], + ], + 'o3-mini' => [ + 'class' => Gpt::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'o3-mini-high' => [ + 'class' => Gpt::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ], + ], + 'gpt-4.5-preview' => [ + 'class' => Gpt::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'gpt-4.1' => [ + 'class' => Gpt::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'gpt-4.1-mini' => [ + 'class' => Gpt::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'gpt-4.1-nano' => [ + 'class' => Gpt::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'gpt-5' => [ + 'class' => Gpt::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'gpt-5-chat-latest' => [ + 'class' => Gpt::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::INPUT_IMAGE, + ], + ], + 'gpt-5-mini' => [ + 'class' => Gpt::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'gpt-5-nano' => [ + 'class' => Gpt::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::INPUT_IMAGE, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'text-embedding-ada-002' => [ + 'class' => Embeddings::class, + 'capabilities' => [Capability::INPUT_TEXT], + ], + 'text-embedding-3-large' => [ + 'class' => Embeddings::class, + 'capabilities' => [Capability::INPUT_TEXT], + ], + 'text-embedding-3-small' => [ + 'class' => Embeddings::class, + 'capabilities' => [Capability::INPUT_TEXT], + ], + 'whisper-1' => [ + 'class' => Whisper::class, + 'capabilities' => [ + Capability::INPUT_AUDIO, + Capability::OUTPUT_TEXT, + ], + ], + 'dall-e-2' => [ + 'class' => DallE::class, + 'capabilities' => [ + Capability::INPUT_TEXT, + Capability::OUTPUT_IMAGE, + ], + ], + 'dall-e-3' => [ + 'class' => DallE::class, + 'capabilities' => [ + Capability::INPUT_TEXT, + Capability::OUTPUT_IMAGE, + ], + ], + ]; + + $this->models = array_merge($defaultModels, $additionalModels); + } +} diff --git a/src/platform/src/Bridge/OpenAi/PlatformFactory.php b/src/platform/src/Bridge/OpenAi/PlatformFactory.php index 91b6f7aea..f93743ed5 100644 --- a/src/platform/src/Bridge/OpenAi/PlatformFactory.php +++ b/src/platform/src/Bridge/OpenAi/PlatformFactory.php @@ -13,6 +13,7 @@ use Symfony\AI\Platform\Bridge\OpenAi\Contract\OpenAiContract; use Symfony\AI\Platform\Contract; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; use Symfony\AI\Platform\Platform; use Symfony\Component\HttpClient\EventSourceHttpClient; use Symfony\Contracts\HttpClient\HttpClientInterface; @@ -28,6 +29,7 @@ public static function create( #[\SensitiveParameter] string $apiKey, ?HttpClientInterface $httpClient = null, + ModelCatalogInterface $modelCatalog = new ModelCatalog(), ?Contract $contract = null, ?string $region = null, ): Platform { @@ -46,6 +48,7 @@ public static function create( new DallE\ResultConverter(), new Whisper\ResultConverter(), ], + $modelCatalog, $contract ?? OpenAiContract::create(), ); } diff --git a/src/platform/src/Bridge/OpenAi/Whisper.php b/src/platform/src/Bridge/OpenAi/Whisper.php index 882d543b1..c0255c480 100644 --- a/src/platform/src/Bridge/OpenAi/Whisper.php +++ b/src/platform/src/Bridge/OpenAi/Whisper.php @@ -11,7 +11,6 @@ namespace Symfony\AI\Platform\Bridge\OpenAi; -use Symfony\AI\Platform\Capability; use Symfony\AI\Platform\Model; /** @@ -19,18 +18,4 @@ */ class Whisper extends Model { - public const WHISPER_1 = 'whisper-1'; - - /** - * @param array $options - */ - public function __construct(string $name, array $options = []) - { - $capabilities = [ - Capability::INPUT_AUDIO, - Capability::OUTPUT_TEXT, - ]; - - parent::__construct($name, $capabilities, $options); - } } diff --git a/src/platform/src/Bridge/OpenRouter/ModelCatalog.php b/src/platform/src/Bridge/OpenRouter/ModelCatalog.php new file mode 100644 index 000000000..4324272a6 --- /dev/null +++ b/src/platform/src/Bridge/OpenRouter/ModelCatalog.php @@ -0,0 +1,23 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\OpenRouter; + +use Symfony\AI\Platform\ModelCatalog\DynamicModelCatalog; + +/** + * @author Oskar Stark + */ +final class ModelCatalog extends DynamicModelCatalog +{ + // OpenRouter provides access to many different models from various providers + // Models are dynamically available and identified by provider/model format +} diff --git a/src/platform/src/Bridge/OpenRouter/PlatformFactory.php b/src/platform/src/Bridge/OpenRouter/PlatformFactory.php index 00b8b1d02..6825a0468 100644 --- a/src/platform/src/Bridge/OpenRouter/PlatformFactory.php +++ b/src/platform/src/Bridge/OpenRouter/PlatformFactory.php @@ -15,6 +15,7 @@ use Symfony\AI\Platform\Bridge\Gemini\Contract\MessageBagNormalizer; use Symfony\AI\Platform\Bridge\Gemini\Contract\UserMessageNormalizer; use Symfony\AI\Platform\Contract; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; use Symfony\AI\Platform\Platform; use Symfony\Component\HttpClient\EventSourceHttpClient; use Symfony\Contracts\HttpClient\HttpClientInterface; @@ -27,6 +28,7 @@ final class PlatformFactory public static function create( #[\SensitiveParameter] string $apiKey, ?HttpClientInterface $httpClient = null, + ModelCatalogInterface $modelCatalog = new ModelCatalog(), ?Contract $contract = null, ): Platform { $httpClient = $httpClient instanceof EventSourceHttpClient ? $httpClient : new EventSourceHttpClient($httpClient); @@ -34,6 +36,7 @@ public static function create( return new Platform( [new ModelClient($httpClient, $apiKey)], [new ResultConverter()], + $modelCatalog, $contract ?? Contract::create( new AssistantMessageNormalizer(), new MessageBagNormalizer(), diff --git a/src/platform/src/Bridge/Perplexity/ModelCatalog.php b/src/platform/src/Bridge/Perplexity/ModelCatalog.php new file mode 100644 index 000000000..a6b6aa01b --- /dev/null +++ b/src/platform/src/Bridge/Perplexity/ModelCatalog.php @@ -0,0 +1,87 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\Perplexity; + +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\ModelCatalog\AbstractModelCatalog; + +/** + * @author Oskar Stark + */ +final class ModelCatalog extends AbstractModelCatalog +{ + /** + * @param array}> $additionalModels + */ + public function __construct(array $additionalModels = []) + { + $defaultModels = [ + 'sonar' => [ + 'class' => Perplexity::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_PDF, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::OUTPUT_STRUCTURED, + Capability::INPUT_IMAGE, + ], + ], + 'sonar-pro' => [ + 'class' => Perplexity::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_PDF, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::OUTPUT_STRUCTURED, + Capability::INPUT_IMAGE, + ], + ], + 'sonar-reasoning' => [ + 'class' => Perplexity::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_PDF, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::OUTPUT_STRUCTURED, + Capability::INPUT_IMAGE, + ], + ], + 'sonar-reasoning-pro' => [ + 'class' => Perplexity::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_PDF, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::OUTPUT_STRUCTURED, + Capability::INPUT_IMAGE, + ], + ], + 'sonar-deep-research' => [ + 'class' => Perplexity::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_PDF, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::OUTPUT_STRUCTURED, + // Note: sonar-deep-research does not support INPUT_IMAGE + ], + ], + ]; + + $this->models = array_merge($defaultModels, $additionalModels); + } +} diff --git a/src/platform/src/Bridge/Perplexity/Perplexity.php b/src/platform/src/Bridge/Perplexity/Perplexity.php index 38874c3d9..1b8fa8bb1 100644 --- a/src/platform/src/Bridge/Perplexity/Perplexity.php +++ b/src/platform/src/Bridge/Perplexity/Perplexity.php @@ -11,7 +11,6 @@ namespace Symfony\AI\Platform\Bridge\Perplexity; -use Symfony\AI\Platform\Capability; use Symfony\AI\Platform\Model; /** @@ -19,29 +18,4 @@ */ final class Perplexity extends Model { - public const SONAR = 'sonar'; - public const SONAR_PRO = 'sonar-pro'; - public const SONAR_REASONING = 'sonar-reasoning'; - public const SONAR_REASONING_PRO = 'sonar-reasoning-pro'; - public const SONAR_DEEP_RESEARCH = 'sonar-deep-research'; - - /** - * @param array $options - */ - public function __construct(string $name, array $options = []) - { - $capabilities = [ - Capability::INPUT_MESSAGES, - Capability::INPUT_PDF, - Capability::OUTPUT_TEXT, - Capability::OUTPUT_STREAMING, - Capability::OUTPUT_STRUCTURED, - ]; - - if (self::SONAR_DEEP_RESEARCH !== $name) { - $capabilities[] = Capability::INPUT_IMAGE; - } - - parent::__construct($name, $capabilities, $options); - } } diff --git a/src/platform/src/Bridge/Perplexity/PlatformFactory.php b/src/platform/src/Bridge/Perplexity/PlatformFactory.php index 2e8b03799..83241386c 100644 --- a/src/platform/src/Bridge/Perplexity/PlatformFactory.php +++ b/src/platform/src/Bridge/Perplexity/PlatformFactory.php @@ -13,6 +13,7 @@ use Symfony\AI\Platform\Bridge\Perplexity\Contract\PerplexityContract; use Symfony\AI\Platform\Contract; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; use Symfony\AI\Platform\Platform; use Symfony\Component\HttpClient\EventSourceHttpClient; use Symfony\Contracts\HttpClient\HttpClientInterface; @@ -25,6 +26,7 @@ final class PlatformFactory public static function create( #[\SensitiveParameter] string $apiKey, ?HttpClientInterface $httpClient = null, + ModelCatalogInterface $modelCatalog = new ModelCatalog(), ?Contract $contract = null, ): Platform { $httpClient = $httpClient instanceof EventSourceHttpClient ? $httpClient : new EventSourceHttpClient($httpClient); @@ -32,6 +34,7 @@ public static function create( return new Platform( [new ModelClient($httpClient, $apiKey)], [new ResultConverter()], + $modelCatalog, $contract ?? PerplexityContract::create(), ); } diff --git a/src/platform/src/Bridge/Replicate/ModelCatalog.php b/src/platform/src/Bridge/Replicate/ModelCatalog.php new file mode 100644 index 000000000..9958d4fc8 --- /dev/null +++ b/src/platform/src/Bridge/Replicate/ModelCatalog.php @@ -0,0 +1,138 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\Replicate; + +use Symfony\AI\Platform\Bridge\Meta\Llama; +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\ModelCatalog\AbstractModelCatalog; + +/** + * @author Oskar Stark + */ +final class ModelCatalog extends AbstractModelCatalog +{ + /** + * @param array}> $additionalModels + */ + public function __construct(array $additionalModels = []) + { + $defaultModels = [ + 'llama-3.3-70B-Instruct' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3.2-90b-vision-instruct' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3.2-11b-vision-instruct' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3.2-3b' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3.2-3b-instruct' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3.2-1b' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3.2-1b-instruct' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3.1-405b-instruct' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3.1-70b' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3-70b-instruct' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3.1-8b' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3.1-8b-instruct' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3-70b' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3-8b-instruct' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + 'llama-3-8b' => [ + 'class' => Llama::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + ], + ], + ]; + + $this->models = array_merge($defaultModels, $additionalModels); + } +} diff --git a/src/platform/src/Bridge/Replicate/PlatformFactory.php b/src/platform/src/Bridge/Replicate/PlatformFactory.php index 739010630..d1c764cec 100644 --- a/src/platform/src/Bridge/Replicate/PlatformFactory.php +++ b/src/platform/src/Bridge/Replicate/PlatformFactory.php @@ -13,6 +13,7 @@ use Symfony\AI\Platform\Bridge\Replicate\Contract\LlamaMessageBagNormalizer; use Symfony\AI\Platform\Contract; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; use Symfony\AI\Platform\Platform; use Symfony\Component\Clock\Clock; use Symfony\Component\HttpClient\HttpClient; @@ -26,11 +27,13 @@ final class PlatformFactory public static function create( #[\SensitiveParameter] string $apiKey, ?HttpClientInterface $httpClient = null, + ModelCatalogInterface $modelCatalog = new ModelCatalog(), ?Contract $contract = null, ): Platform { return new Platform( [new LlamaModelClient(new Client($httpClient ?? HttpClient::create(), new Clock(), $apiKey))], [new LlamaResultConverter()], + $modelCatalog, $contract ?? Contract::create(new LlamaMessageBagNormalizer()), ); } diff --git a/src/platform/src/Bridge/Scaleway/Embeddings.php b/src/platform/src/Bridge/Scaleway/Embeddings.php index bf66e5c93..adc2d7fc6 100644 --- a/src/platform/src/Bridge/Scaleway/Embeddings.php +++ b/src/platform/src/Bridge/Scaleway/Embeddings.php @@ -18,13 +18,11 @@ */ final class Embeddings extends Model { - public const BAAI_BGE = 'bge-multilingual-gemma2'; - /** * @param array $options */ - public function __construct(string $name = self::BAAI_BGE, array $options = []) + public function __construct(string $name, array $capabilities = [], array $options = []) { - parent::__construct($name, [], $options); + parent::__construct($name, $capabilities, $options); } } diff --git a/src/platform/src/Bridge/Scaleway/ModelCatalog.php b/src/platform/src/Bridge/Scaleway/ModelCatalog.php new file mode 100644 index 000000000..b69803e11 --- /dev/null +++ b/src/platform/src/Bridge/Scaleway/ModelCatalog.php @@ -0,0 +1,147 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\Scaleway; + +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\ModelCatalog\AbstractModelCatalog; + +/** + * @author Oskar Stark + */ +final class ModelCatalog extends AbstractModelCatalog +{ + /** + * @param array}> $additionalModels + */ + public function __construct(array $additionalModels = []) + { + $defaultModels = [ + 'deepseek-r1-distill-llama-70b' => [ + 'class' => Scaleway::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'gemma-3-27b-it' => [ + 'class' => Scaleway::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'llama-3.1-8b-instruct' => [ + 'class' => Scaleway::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'llama-3.3-70b-instruct' => [ + 'class' => Scaleway::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'devstral-small-2505' => [ + 'class' => Scaleway::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'mistral-nemo-instruct-2407' => [ + 'class' => Scaleway::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'pixtral-12b-2409' => [ + 'class' => Scaleway::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'mistral-small-3.2-24b-instruct-2506' => [ + 'class' => Scaleway::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'gpt-oss-120b' => [ + 'class' => Scaleway::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'qwen3-coder-30b-a3b-instruct' => [ + 'class' => Scaleway::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'qwen3-235b-a22b-instruct-2507' => [ + 'class' => Scaleway::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + Capability::OUTPUT_STRUCTURED, + ], + ], + 'bge-multilingual-gemma2' => [ + 'class' => Embeddings::class, + 'capabilities' => [Capability::INPUT_TEXT], + ], + ]; + + $this->models = array_merge($defaultModels, $additionalModels); + } +} diff --git a/src/platform/src/Bridge/Scaleway/PlatformFactory.php b/src/platform/src/Bridge/Scaleway/PlatformFactory.php index 15edfa255..190490317 100644 --- a/src/platform/src/Bridge/Scaleway/PlatformFactory.php +++ b/src/platform/src/Bridge/Scaleway/PlatformFactory.php @@ -16,6 +16,7 @@ use Symfony\AI\Platform\Bridge\Scaleway\Llm\ModelClient as ScalewayModelClient; use Symfony\AI\Platform\Bridge\Scaleway\Llm\ResultConverter as ScalewayResponseConverter; use Symfony\AI\Platform\Contract; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; use Symfony\AI\Platform\Platform; use Symfony\Component\HttpClient\EventSourceHttpClient; use Symfony\Contracts\HttpClient\HttpClientInterface; @@ -28,6 +29,7 @@ final class PlatformFactory public static function create( #[\SensitiveParameter] string $apiKey, ?HttpClientInterface $httpClient = null, + ModelCatalogInterface $modelCatalog = new ModelCatalog(), ?Contract $contract = null, ): Platform { $httpClient = $httpClient instanceof EventSourceHttpClient ? $httpClient : new EventSourceHttpClient($httpClient); @@ -41,6 +43,7 @@ public static function create( new ScalewayResponseConverter(), new ScalewayEmbeddingsResponseConverter(), ], + $modelCatalog, $contract, ); } diff --git a/src/platform/src/Bridge/Scaleway/Scaleway.php b/src/platform/src/Bridge/Scaleway/Scaleway.php index 42f9f3050..95448dfb9 100644 --- a/src/platform/src/Bridge/Scaleway/Scaleway.php +++ b/src/platform/src/Bridge/Scaleway/Scaleway.php @@ -11,7 +11,6 @@ namespace Symfony\AI\Platform\Bridge\Scaleway; -use Symfony\AI\Platform\Capability; use Symfony\AI\Platform\Model; /** @@ -19,34 +18,14 @@ */ final class Scaleway extends Model { - public const DEEPSEEK = 'deepseek-r1-distill-llama-70b'; - public const GOOGLE_GEMMA = 'gemma-3-27b-it'; - public const META_LLAMA_8B = 'llama-3.1-8b-instruct'; - public const META_LLAMA_70B = 'llama-3.3-70b-instruct'; - public const MISTRAL_DEVSTRAL = 'devstral-small-2505'; - public const MISTRAL_NEMO = 'mistral-nemo-instruct-2407'; - public const MISTRAL_PIXTRAL = 'pixtral-12b-2409'; - public const MISTRAL_SMALL = 'mistral-small-3.2-24b-instruct-2506'; - public const OPENAI_OSS = 'gpt-oss-120b'; - public const QWEN_CODE = 'qwen3-coder-30b-a3b-instruct'; - public const QWEN_INSTRUCT = 'qwen3-235b-a22b-instruct-2507'; - /** * @param array $options */ public function __construct( string $name, + array $capabilities = [], array $options = [], ) { - $capabilities = [ - Capability::INPUT_IMAGE, - Capability::INPUT_MESSAGES, - Capability::OUTPUT_STREAMING, - Capability::OUTPUT_STRUCTURED, - Capability::OUTPUT_TEXT, - Capability::TOOL_CALLING, - ]; - parent::__construct($name, $capabilities, $options); } } diff --git a/src/platform/src/Bridge/TransformersPhp/ModelCatalog.php b/src/platform/src/Bridge/TransformersPhp/ModelCatalog.php new file mode 100644 index 000000000..b846d5443 --- /dev/null +++ b/src/platform/src/Bridge/TransformersPhp/ModelCatalog.php @@ -0,0 +1,23 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\TransformersPhp; + +use Symfony\AI\Platform\ModelCatalog\DynamicModelCatalog; + +/** + * @author Oskar Stark + */ +final class ModelCatalog extends DynamicModelCatalog +{ + // TransformersPhp can use various models from HuggingFace + // dynamically loaded through transformers.php library +} diff --git a/src/platform/src/Bridge/TransformersPhp/PlatformFactory.php b/src/platform/src/Bridge/TransformersPhp/PlatformFactory.php index 00fca1b88..dcb0ac782 100644 --- a/src/platform/src/Bridge/TransformersPhp/PlatformFactory.php +++ b/src/platform/src/Bridge/TransformersPhp/PlatformFactory.php @@ -13,6 +13,7 @@ use Codewithkyrian\Transformers\Transformers; use Symfony\AI\Platform\Exception\RuntimeException; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; use Symfony\AI\Platform\Platform; /** @@ -20,12 +21,12 @@ */ final readonly class PlatformFactory { - public static function create(): Platform + public static function create(ModelCatalogInterface $modelCatalog = new ModelCatalog()): Platform { if (!class_exists(Transformers::class)) { throw new RuntimeException('For using the TransformersPHP with FFI to run models in PHP, the codewithkyrian/transformers package is required. Try running "composer require codewithkyrian/transformers".'); } - return new Platform([new ModelClient()], [new ResultConverter()]); + return new Platform([new ModelClient()], [new ResultConverter()], $modelCatalog); } } diff --git a/src/platform/src/Bridge/VertexAi/Embeddings/Model.php b/src/platform/src/Bridge/VertexAi/Embeddings/Model.php index 1bab2d68d..45a901032 100644 --- a/src/platform/src/Bridge/VertexAi/Embeddings/Model.php +++ b/src/platform/src/Bridge/VertexAi/Embeddings/Model.php @@ -11,7 +11,6 @@ namespace Symfony\AI\Platform\Bridge\VertexAi\Embeddings; -use Symfony\AI\Platform\Capability; use Symfony\AI\Platform\Model as BaseModel; /** @@ -19,18 +18,4 @@ */ final class Model extends BaseModel { - /** Upto 3072 dimensions */ - public const GEMINI_EMBEDDING_001 = 'gemini-embedding-001'; - /** Upto 768 dimensions */ - public const TEXT_EMBEDDING_005 = 'text-embedding-005'; - /** Upto 768 dimensions */ - public const TEXT_MULTILINGUAL_EMBEDDING_002 = 'text-multilingual-embedding-002'; - - /** - * @see https://cloud.google.com/vertex-ai/generative-ai/docs/model-reference/text-embeddings-api for various options - */ - public function __construct(string $name, array $options = []) - { - parent::__construct($name, [Capability::INPUT_TEXT, Capability::INPUT_MULTIPLE], $options); - } } diff --git a/src/platform/src/Bridge/VertexAi/Gemini/Model.php b/src/platform/src/Bridge/VertexAi/Gemini/Model.php index 8b83a0622..b16787313 100644 --- a/src/platform/src/Bridge/VertexAi/Gemini/Model.php +++ b/src/platform/src/Bridge/VertexAi/Gemini/Model.php @@ -11,7 +11,6 @@ namespace Symfony\AI\Platform\Bridge\VertexAi\Gemini; -use Symfony\AI\Platform\Capability; use Symfony\AI\Platform\Model as BaseModel; /** @@ -19,29 +18,4 @@ */ final class Model extends BaseModel { - public const GEMINI_2_5_PRO = 'gemini-2.5-pro'; - public const GEMINI_2_5_FLASH = 'gemini-2.5-flash'; - public const GEMINI_2_0_FLASH = 'gemini-2.0-flash'; - public const GEMINI_2_5_FLASH_LITE = 'gemini-2.5-flash-lite'; - public const GEMINI_2_0_FLASH_LITE = 'gemini-2.0-flash-lite'; - - /** - * @param array $options The default options for the model usage - * - * @see https://cloud.google.com/vertex-ai/generative-ai/docs/model-reference/inference for more details - */ - public function __construct(string $name, array $options = []) - { - $capabilities = [ - Capability::INPUT_MESSAGES, - Capability::INPUT_IMAGE, - Capability::INPUT_AUDIO, - Capability::INPUT_PDF, - Capability::OUTPUT_STREAMING, - Capability::OUTPUT_STRUCTURED, - Capability::TOOL_CALLING, - ]; - - parent::__construct($name, $capabilities, $options); - } } diff --git a/src/platform/src/Bridge/VertexAi/ModelCatalog.php b/src/platform/src/Bridge/VertexAi/ModelCatalog.php new file mode 100644 index 000000000..f6bc3074c --- /dev/null +++ b/src/platform/src/Bridge/VertexAi/ModelCatalog.php @@ -0,0 +1,125 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\VertexAi; + +use Symfony\AI\Platform\Bridge\VertexAi\Embeddings\Model as EmbeddingsModel; +use Symfony\AI\Platform\Bridge\VertexAi\Gemini\Model as GeminiModel; +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\ModelCatalog\AbstractModelCatalog; + +/** + * @see https://cloud.google.com/vertex-ai/generative-ai/docs/model-reference/inference for more details + * @see https://cloud.google.com/vertex-ai/generative-ai/docs/model-reference/text-embeddings-api for various options + * + * @author Oskar Stark + */ +final class ModelCatalog extends AbstractModelCatalog +{ + /** + * @param array}> $additionalModels + */ + public function __construct(array $additionalModels = []) + { + $defaultModels = [ + // Gemini models + 'gemini-2.5-pro' => [ + 'class' => GeminiModel::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::INPUT_AUDIO, + Capability::INPUT_PDF, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::OUTPUT_STRUCTURED, + Capability::TOOL_CALLING, + ], + ], + 'gemini-2.5-flash' => [ + 'class' => GeminiModel::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::INPUT_AUDIO, + Capability::INPUT_PDF, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::OUTPUT_STRUCTURED, + Capability::TOOL_CALLING, + ], + ], + 'gemini-2.0-flash' => [ + 'class' => GeminiModel::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::INPUT_AUDIO, + Capability::INPUT_PDF, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::OUTPUT_STRUCTURED, + Capability::TOOL_CALLING, + ], + ], + 'gemini-2.5-flash-lite' => [ + 'class' => GeminiModel::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::INPUT_AUDIO, + Capability::INPUT_PDF, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::OUTPUT_STRUCTURED, + Capability::TOOL_CALLING, + ], + ], + 'gemini-2.0-flash-lite' => [ + 'class' => GeminiModel::class, + 'capabilities' => [ + Capability::INPUT_MESSAGES, + Capability::INPUT_IMAGE, + Capability::INPUT_AUDIO, + Capability::INPUT_PDF, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::OUTPUT_STRUCTURED, + Capability::TOOL_CALLING, + ], + ], + // Embeddings models + 'gemini-embedding-001' => [ + 'class' => EmbeddingsModel::class, + 'capabilities' => [ + Capability::INPUT_TEXT, + Capability::INPUT_MULTIPLE, + ], + ], + 'text-embedding-005' => [ + 'class' => EmbeddingsModel::class, + 'capabilities' => [ + Capability::INPUT_TEXT, + Capability::INPUT_MULTIPLE, + ], + ], + 'text-multilingual-embedding-002' => [ + 'class' => EmbeddingsModel::class, + 'capabilities' => [ + Capability::INPUT_TEXT, + Capability::INPUT_MULTIPLE, + ], + ], + ]; + + $this->models = array_merge($defaultModels, $additionalModels); + } +} diff --git a/src/platform/src/Bridge/VertexAi/PlatformFactory.php b/src/platform/src/Bridge/VertexAi/PlatformFactory.php index 7685cd9a0..657ccd7ab 100644 --- a/src/platform/src/Bridge/VertexAi/PlatformFactory.php +++ b/src/platform/src/Bridge/VertexAi/PlatformFactory.php @@ -19,6 +19,7 @@ use Symfony\AI\Platform\Bridge\VertexAi\Gemini\ResultConverter as GeminiResultConverter; use Symfony\AI\Platform\Contract; use Symfony\AI\Platform\Exception\RuntimeException; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; use Symfony\AI\Platform\Platform; use Symfony\Component\HttpClient\EventSourceHttpClient; use Symfony\Contracts\HttpClient\HttpClientInterface; @@ -32,6 +33,7 @@ public static function create( string $location, string $projectId, ?HttpClientInterface $httpClient = null, + ModelCatalogInterface $modelCatalog = new ModelCatalog(), ?Contract $contract = null, ): Platform { if (!class_exists(ApplicationDefaultCredentials::class)) { @@ -43,6 +45,7 @@ public static function create( return new Platform( [new GeminiModelClient($httpClient, $location, $projectId), new EmbeddingsModelClient($httpClient, $location, $projectId)], [new GeminiResultConverter(), new EmbeddingsResultConverter()], + $modelCatalog, $contract ?? GeminiContract::create(), ); } diff --git a/src/platform/src/Bridge/Voyage/ModelCatalog.php b/src/platform/src/Bridge/Voyage/ModelCatalog.php new file mode 100644 index 000000000..ed31fa908 --- /dev/null +++ b/src/platform/src/Bridge/Voyage/ModelCatalog.php @@ -0,0 +1,69 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\Voyage; + +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\ModelCatalog\AbstractModelCatalog; + +final class ModelCatalog extends AbstractModelCatalog +{ + /** + * @param array}> $additionalModels + */ + public function __construct(array $additionalModels = []) + { + $defaultModels = [ + 'voyage-3.5' => [ + 'class' => Voyage::class, + 'capabilities' => [Capability::INPUT_MULTIPLE], + ], + 'voyage-3.5-lite' => [ + 'class' => Voyage::class, + 'capabilities' => [Capability::INPUT_MULTIPLE], + ], + 'voyage-3' => [ + 'class' => Voyage::class, + 'capabilities' => [Capability::INPUT_MULTIPLE], + ], + 'voyage-3-lite' => [ + 'class' => Voyage::class, + 'capabilities' => [Capability::INPUT_MULTIPLE], + ], + 'voyage-3-large' => [ + 'class' => Voyage::class, + 'capabilities' => [Capability::INPUT_MULTIPLE], + ], + 'voyage-finance-2' => [ + 'class' => Voyage::class, + 'capabilities' => [Capability::INPUT_MULTIPLE], + ], + 'voyage-multilingual-2' => [ + 'class' => Voyage::class, + 'capabilities' => [Capability::INPUT_MULTIPLE], + ], + 'voyage-law-2' => [ + 'class' => Voyage::class, + 'capabilities' => [Capability::INPUT_MULTIPLE], + ], + 'voyage-code-3' => [ + 'class' => Voyage::class, + 'capabilities' => [Capability::INPUT_MULTIPLE], + ], + 'voyage-code-2' => [ + 'class' => Voyage::class, + 'capabilities' => [Capability::INPUT_MULTIPLE], + ], + ]; + + $this->models = array_merge($defaultModels, $additionalModels); + } +} diff --git a/src/platform/src/Bridge/Voyage/PlatformFactory.php b/src/platform/src/Bridge/Voyage/PlatformFactory.php index 8fe9df55e..f6a652a4f 100644 --- a/src/platform/src/Bridge/Voyage/PlatformFactory.php +++ b/src/platform/src/Bridge/Voyage/PlatformFactory.php @@ -12,6 +12,7 @@ namespace Symfony\AI\Platform\Bridge\Voyage; use Symfony\AI\Platform\Contract; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; use Symfony\AI\Platform\Platform; use Symfony\Component\HttpClient\EventSourceHttpClient; use Symfony\Contracts\HttpClient\HttpClientInterface; @@ -24,10 +25,11 @@ final class PlatformFactory public static function create( #[\SensitiveParameter] string $apiKey, ?HttpClientInterface $httpClient = null, + ModelCatalogInterface $modelCatalog = new ModelCatalog(), ?Contract $contract = null, ): Platform { $httpClient = $httpClient instanceof EventSourceHttpClient ? $httpClient : new EventSourceHttpClient($httpClient); - return new Platform([new ModelClient($httpClient, $apiKey)], [new ResultConverter()], $contract); + return new Platform([new ModelClient($httpClient, $apiKey)], [new ResultConverter()], $modelCatalog, $contract); } } diff --git a/src/platform/src/Bridge/Voyage/Voyage.php b/src/platform/src/Bridge/Voyage/Voyage.php index 1d991139b..59d07d72a 100644 --- a/src/platform/src/Bridge/Voyage/Voyage.php +++ b/src/platform/src/Bridge/Voyage/Voyage.php @@ -11,7 +11,6 @@ namespace Symfony\AI\Platform\Bridge\Voyage; -use Symfony\AI\Platform\Capability; use Symfony\AI\Platform\Model; /** @@ -19,25 +18,14 @@ */ class Voyage extends Model { - public const V3_5 = 'voyage-3.5'; - public const V3_5_LITE = 'voyage-3.5-lite'; - public const V3 = 'voyage-3'; - public const V3_LITE = 'voyage-3-lite'; - public const V3_LARGE = 'voyage-3-large'; - public const FINANCE_2 = 'voyage-finance-2'; - public const MULTILINGUAL_2 = 'voyage-multilingual-2'; - public const LAW_2 = 'voyage-law-2'; - public const CODE_3 = 'voyage-code-3'; - public const CODE_2 = 'voyage-code-2'; - public const INPUT_TYPE_DOCUMENT = 'document'; public const INPUT_TYPE_QUERY = 'query'; /** * @param array{dimensions?: int, input_type?: self::INPUT_TYPE_*, truncation?: bool} $options */ - public function __construct(string $name, array $options = []) + public function __construct(string $name, array $capabilities = [], array $options = []) { - parent::__construct($name, [Capability::INPUT_MULTIPLE], $options); + parent::__construct($name, $capabilities, $options); } } diff --git a/src/platform/src/InMemoryPlatform.php b/src/platform/src/InMemoryPlatform.php index 7d1ee744f..548eba99a 100644 --- a/src/platform/src/InMemoryPlatform.php +++ b/src/platform/src/InMemoryPlatform.php @@ -11,6 +11,8 @@ namespace Symfony\AI\Platform; +use Symfony\AI\Platform\ModelCatalog\DynamicModelCatalog; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; use Symfony\AI\Platform\Result\InMemoryRawResult; use Symfony\AI\Platform\Result\ResultInterface; use Symfony\AI\Platform\Result\ResultPromise; @@ -25,16 +27,25 @@ */ class InMemoryPlatform implements PlatformInterface { + private readonly ModelCatalogInterface $modelCatalog; + /** * The mock result can be a string or a callable that returns a string. * If it's a closure, it receives the model, input, and optionally options as parameters like a real platform call. */ public function __construct(private readonly \Closure|string $mockResult) { + $this->modelCatalog = new DynamicModelCatalog(); } - public function invoke(Model $model, array|string|object $input, array $options = []): ResultPromise + public function invoke(string $model, array|string|object $input, array $options = []): ResultPromise { + $model = new class($model) extends Model { + public function __construct(string $name) + { + parent::__construct($name); + } + }; $result = \is_string($this->mockResult) ? $this->mockResult : ($this->mockResult)($model, $input, $options); if ($result instanceof ResultInterface) { @@ -44,6 +55,11 @@ public function invoke(Model $model, array|string|object $input, array $options return $this->createPromise(new TextResult($result), $options); } + public function getModelCatalog(): ModelCatalogInterface + { + return $this->modelCatalog; + } + /** * Creates a ResultPromise from a ResultInterface. * diff --git a/src/platform/src/Model.php b/src/platform/src/Model.php index 62069f222..86b4f397b 100644 --- a/src/platform/src/Model.php +++ b/src/platform/src/Model.php @@ -21,7 +21,7 @@ class Model /** * @param non-empty-string $name * @param Capability[] $capabilities - * @param array $options + * @param array $options The default options for the model usage */ public function __construct( private readonly string $name, diff --git a/src/platform/src/ModelCatalog/AbstractModelCatalog.php b/src/platform/src/ModelCatalog/AbstractModelCatalog.php new file mode 100644 index 000000000..ad3b337c6 --- /dev/null +++ b/src/platform/src/ModelCatalog/AbstractModelCatalog.php @@ -0,0 +1,93 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\ModelCatalog; + +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\Exception\InvalidArgumentException; +use Symfony\AI\Platform\Exception\ModelNotFoundException; +use Symfony\AI\Platform\Model; + +/** + * @author Oskar Stark + */ +abstract class AbstractModelCatalog implements ModelCatalogInterface +{ + /** + * @var array}> + */ + protected array $models; + + public function getModel(string $modelName): Model + { + if ('' === $modelName) { + throw new InvalidArgumentException('Model name cannot be empty.'); + } + + $parsed = self::parseModelName($modelName); + $actualModelName = $parsed['name']; + $options = $parsed['options']; + + if (!isset($this->models[$actualModelName])) { + throw new ModelNotFoundException(\sprintf('Model "%s" not found.', $actualModelName)); + } + + $modelConfig = $this->models[$actualModelName]; + $modelClass = $modelConfig['class']; + + if (!class_exists($modelClass)) { + throw new InvalidArgumentException(\sprintf('Model class "%s" does not exist.', $modelClass)); + } + + $model = new $modelClass($actualModelName, $modelConfig['capabilities'], $options); + if (!$model instanceof Model) { + throw new InvalidArgumentException(\sprintf('Model class "%s" must extend "%s".', $modelClass, Model::class)); + } + + return $model; + } + + /** + * @return array}> + */ + public function getModels(): array + { + return $this->models; + } + + /** + * Extracts model name and options from a model name string that may contain query parameters. + * + * @param string $modelName The model name, potentially with query parameters (e.g., "model-name?param=value&other=123") + * + * @return array{name: string, options: array} An array containing the model name and parsed options + */ + protected static function parseModelName(string $modelName): array + { + $options = []; + $actualModelName = $modelName; + + if (str_contains($modelName, '?')) { + [$actualModelName, $queryString] = explode('?', $modelName, 2); + + if ('' === $actualModelName) { + throw new InvalidArgumentException('Model name cannot be empty.'); + } + + parse_str($queryString, $options); + } + + return [ + 'name' => $actualModelName, + 'options' => $options, + ]; + } +} diff --git a/src/platform/src/ModelCatalog/DynamicModelCatalog.php b/src/platform/src/ModelCatalog/DynamicModelCatalog.php new file mode 100644 index 000000000..9e43c5c32 --- /dev/null +++ b/src/platform/src/ModelCatalog/DynamicModelCatalog.php @@ -0,0 +1,39 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\ModelCatalog; + +/* + * A dynamic model catalog that accepts any model name and creates models with all capabilities. + * + * This class is useful for platforms that support a wide range of models dynamically + * without needing to predefine them in a static catalog. Since we don't know what specific + * capabilities each dynamic model supports, we provide all capabilities by default. + * + * @author Oskar Stark + */ +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\Model; + +class DynamicModelCatalog extends AbstractModelCatalog +{ + public function __construct() + { + $this->models = []; + } + + public function getModel(string $modelName): Model + { + $parsed = self::parseModelName($modelName); + + return new Model($parsed['name'], Capability::cases(), $parsed['options']); + } +} diff --git a/src/platform/src/ModelCatalog/ModelCatalogInterface.php b/src/platform/src/ModelCatalog/ModelCatalogInterface.php new file mode 100644 index 000000000..b57571c1a --- /dev/null +++ b/src/platform/src/ModelCatalog/ModelCatalogInterface.php @@ -0,0 +1,31 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\ModelCatalog; + +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\Model; + +/** + * @author Oskar Stark + */ +interface ModelCatalogInterface +{ + /** + * @param non-empty-string $modelName + */ + public function getModel(string $modelName): Model; + + /** + * @return array}> + */ + public function getModels(): array; +} diff --git a/src/platform/src/Platform.php b/src/platform/src/Platform.php index de5a16dc9..b3c19ff3c 100644 --- a/src/platform/src/Platform.php +++ b/src/platform/src/Platform.php @@ -12,6 +12,7 @@ namespace Symfony\AI\Platform; use Symfony\AI\Platform\Exception\RuntimeException; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; use Symfony\AI\Platform\Result\RawResultInterface; use Symfony\AI\Platform\Result\ResultPromise; @@ -37,6 +38,7 @@ final class Platform implements PlatformInterface public function __construct( iterable $modelClients, iterable $resultConverters, + private ModelCatalogInterface $modelCatalog, private ?Contract $contract = null, ) { $this->contract = $contract ?? Contract::create(); @@ -44,8 +46,9 @@ public function __construct( $this->resultConverters = $resultConverters instanceof \Traversable ? iterator_to_array($resultConverters) : $resultConverters; } - public function invoke(Model $model, array|string|object $input, array $options = []): ResultPromise + public function invoke(string $model, array|string|object $input, array $options = []): ResultPromise { + $model = $this->modelCatalog->getModel($model); $payload = $this->contract->createRequestPayload($model, $input); $options = array_merge($model->getOptions(), $options); @@ -58,6 +61,11 @@ public function invoke(Model $model, array|string|object $input, array $options return $this->convertResult($model, $result, $options); } + public function getModelCatalog(): ModelCatalogInterface + { + return $this->modelCatalog; + } + /** * @param array $payload * @param array $options diff --git a/src/platform/src/PlatformInterface.php b/src/platform/src/PlatformInterface.php index 927a77153..e6dcfbd36 100644 --- a/src/platform/src/PlatformInterface.php +++ b/src/platform/src/PlatformInterface.php @@ -11,6 +11,7 @@ namespace Symfony\AI\Platform; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; use Symfony\AI\Platform\Result\ResultPromise; /** @@ -19,8 +20,11 @@ interface PlatformInterface { /** - * @param array|string|object $input - * @param array $options + * @param non-empty-string $model The model name + * @param array|string|object $input The input data + * @param array $options The options to customize the model invocation */ - public function invoke(Model $model, array|string|object $input, array $options = []): ResultPromise; + public function invoke(string $model, array|string|object $input, array $options = []): ResultPromise; + + public function getModelCatalog(): ModelCatalogInterface; } diff --git a/src/platform/src/Tests/ModelCatalogTestCase.php b/src/platform/src/Tests/ModelCatalogTestCase.php new file mode 100644 index 000000000..7fb31764e --- /dev/null +++ b/src/platform/src/Tests/ModelCatalogTestCase.php @@ -0,0 +1,123 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Tests; + +use PHPUnit\Framework\Attributes\DataProvider; +use PHPUnit\Framework\TestCase; +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\Exception\ModelNotFoundException; +use Symfony\AI\Platform\Model; +use Symfony\AI\Platform\ModelCatalog\DynamicModelCatalog; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; + +/** + * Base test case for testing ModelCatalog implementations. + * + * @author Oskar Stark + */ +abstract class ModelCatalogTestCase extends TestCase +{ + /** + * @return iterable, list}> + */ + abstract public static function modelsProvider(): iterable; + + /** + * @param class-string $expectedClass + * @param list $expectedCapabilities + */ + #[DataProvider('modelsProvider')] + public function testGetModel(string $modelName, string $expectedClass, array $expectedCapabilities) + { + $catalog = $this->createModelCatalog(); + $model = $catalog->getModel($modelName); + + $this->assertInstanceOf(Model::class, $model); + $this->assertInstanceOf($expectedClass, $model); + $this->assertSame($modelName, $model->getName()); + + // Check capabilities + $actualCapabilities = $model->getCapabilities(); + sort($expectedCapabilities); + sort($actualCapabilities); + + $this->assertSame( + $expectedCapabilities, + $actualCapabilities, + \sprintf('Model "%s" capabilities do not match expected', $modelName) + ); + } + + public function testGetModelThrowsExceptionForUnknownModel() + { + $catalog = $this->createModelCatalog(); + + // Skip this test for catalogs that accept any model (like DynamicModelCatalog) + if ($catalog instanceof DynamicModelCatalog) { + $this->markTestSkipped('This catalog accepts any model name'); + } + + $this->expectException(ModelNotFoundException::class); + $this->expectExceptionMessage('Model "unknown-model-that-does-not-exist" not found'); + + $catalog->getModel('unknown-model-that-does-not-exist'); + } + + public function testGetModels() + { + $catalog = $this->createModelCatalog(); + $models = $catalog->getModels(); + + // Skip this test for catalogs that accept any model (like DynamicModelCatalog) + if ($catalog instanceof DynamicModelCatalog) { + $this->markTestSkipped('This catalog accepts any model name'); + } + + foreach ($models as $modelName => $modelDefinition) { + $this->assertIsString($modelName); + $this->assertArrayHasKey('class', $modelDefinition); + $this->assertArrayHasKey('capabilities', $modelDefinition); + $this->assertIsArray($modelDefinition['capabilities']); + + // Verify each capability is valid + foreach ($modelDefinition['capabilities'] as $capability) { + $this->assertInstanceOf(Capability::class, $capability); + } + } + } + + public function testAllModelsHaveValidClass() + { + $catalog = $this->createModelCatalog(); + + // Skip this test for catalogs that accept any model (like DynamicModelCatalog) + if ($catalog instanceof DynamicModelCatalog) { + $this->markTestSkipped('This catalog accepts any model name'); + } + + $models = $catalog->getModels(); + + foreach ($models as $modelName => $modelDefinition) { + $this->assertArrayHasKey('class', $modelDefinition, \sprintf('Model "%s" missing class', $modelName)); + $this->assertTrue( + class_exists($modelDefinition['class']), + \sprintf('Model "%s" has non-existent class "%s"', $modelName, $modelDefinition['class']) + ); + $this->assertTrue( + is_subclass_of($modelDefinition['class'], Model::class) || Model::class === $modelDefinition['class'], + \sprintf('Model "%s" class "%s" must extend Model', $modelName, $modelDefinition['class']) + ); + } + } + + abstract protected function createModelCatalog(): ModelCatalogInterface; +} diff --git a/src/platform/tests/Bridge/AiMlApi/ModelCatalogTest.php b/src/platform/tests/Bridge/AiMlApi/ModelCatalogTest.php new file mode 100644 index 000000000..6d4640463 --- /dev/null +++ b/src/platform/tests/Bridge/AiMlApi/ModelCatalogTest.php @@ -0,0 +1,174 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Tests\Bridge\AiMlApi; + +use Symfony\AI\Platform\Bridge\AiMlApi\Completions; +use Symfony\AI\Platform\Bridge\AiMlApi\Embeddings; +use Symfony\AI\Platform\Bridge\AiMlApi\ModelCatalog; +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; +use Symfony\AI\Platform\Tests\ModelCatalogTestCase; + +final class ModelCatalogTest extends ModelCatalogTestCase +{ + public static function modelsProvider(): iterable + { + // Completion models (GPT variants) + yield 'gpt-3.5-turbo' => ['gpt-3.5-turbo', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'gpt-3.5-turbo-0125' => ['gpt-3.5-turbo-0125', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'gpt-3.5-turbo-1106' => ['gpt-3.5-turbo-1106', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'gpt-4o' => ['gpt-4o', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; + yield 'gpt-4o-2024-08-06' => ['gpt-4o-2024-08-06', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; + yield 'gpt-4o-2024-05-13' => ['gpt-4o-2024-05-13', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; + yield 'gpt-4o-mini' => ['gpt-4o-mini', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; + yield 'gpt-4o-mini-2024-07-18' => ['gpt-4o-mini-2024-07-18', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; + yield 'gpt-4-turbo' => ['gpt-4-turbo', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE]]; + yield 'gpt-4' => ['gpt-4', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'gpt-4-turbo-2024-04-09' => ['gpt-4-turbo-2024-04-09', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'gpt-4-0125-preview' => ['gpt-4-0125-preview', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'gpt-4-1106-preview' => ['gpt-4-1106-preview', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'chatgpt-4o-latest' => ['chatgpt-4o-latest', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; + yield 'gpt-4o-audio-preview' => ['gpt-4o-audio-preview', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; + yield 'gpt-4o-mini-audio-preview' => ['gpt-4o-mini-audio-preview', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; + yield 'gpt-4o-search-preview' => ['gpt-4o-search-preview', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; + yield 'gpt-4o-mini-search-preview' => ['gpt-4o-mini-search-preview', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; + yield 'o1-mini' => ['o1-mini', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::INPUT_IMAGE]]; + yield 'o1-mini-2024-09-12' => ['o1-mini-2024-09-12', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::INPUT_IMAGE]]; + yield 'o1' => ['o1', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::INPUT_IMAGE]]; + yield 'o3-mini' => ['o3-mini', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; + + // OpenAI future models + yield 'openai/o3-2025-04-16' => ['openai/o3-2025-04-16', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::INPUT_IMAGE]]; + yield 'openai/o3-pro' => ['openai/o3-pro', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::INPUT_IMAGE]]; + yield 'openai/gpt-4.1-2025-04-14' => ['openai/gpt-4.1-2025-04-14', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'openai/gpt-4.1-mini-2025-04-14' => ['openai/gpt-4.1-mini-2025-04-14', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'openai/gpt-4.1-nano-2025-04-14' => ['openai/gpt-4.1-nano-2025-04-14', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'openai/o4-mini-2025-04-16' => ['openai/o4-mini-2025-04-16', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::INPUT_IMAGE]]; + yield 'openai/gpt-oss-20b' => ['openai/gpt-oss-20b', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'openai/gpt-oss-120b' => ['openai/gpt-oss-120b', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'openai/gpt-5-2025-08-07' => ['openai/gpt-5-2025-08-07', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'openai/gpt-5-mini-2025-08-07' => ['openai/gpt-5-mini-2025-08-07', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'openai/gpt-5-nano-2025-08-07' => ['openai/gpt-5-nano-2025-08-07', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'openai/gpt-5-chat-latest' => ['openai/gpt-5-chat-latest', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + + // DeepSeek models + yield 'deepseek-chat' => ['deepseek-chat', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'deepseek/deepseek-chat' => ['deepseek/deepseek-chat', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'deepseek/deepseek-chat-v3-0324' => ['deepseek/deepseek-chat-v3-0324', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'deepseek/deepseek-r1' => ['deepseek/deepseek-r1', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'deepseek-reasoner' => ['deepseek-reasoner', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'deepseek/deepseek-prover-v2' => ['deepseek/deepseek-prover-v2', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'deepseek/deepseek-chat-v3.1' => ['deepseek/deepseek-chat-v3.1', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'deepseek/deepseek-reasoner-v3.1' => ['deepseek/deepseek-reasoner-v3.1', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + + // Qwen models + yield 'Qwen/Qwen2-72B-Instruct' => ['Qwen/Qwen2-72B-Instruct', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'qwen-max' => ['qwen-max', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'qwen-plus' => ['qwen-plus', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'qwen-turbo' => ['qwen-turbo', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'qwen-max-2025-01-25' => ['qwen-max-2025-01-25', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'Qwen/Qwen2.5-72B-Instruct-Turbo' => ['Qwen/Qwen2.5-72B-Instruct-Turbo', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'Qwen/QwQ-32B' => ['Qwen/QwQ-32B', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'Qwen/Qwen3-235B-A22B-fp8-tput' => ['Qwen/Qwen3-235B-A22B-fp8-tput', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'alibaba/qwen3-32b' => ['alibaba/qwen3-32b', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'alibaba/qwen3-coder-480b-a35b-instruct' => ['alibaba/qwen3-coder-480b-a35b-instruct', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'alibaba/qwen3-235b-a22b-thinking-2507' => ['alibaba/qwen3-235b-a22b-thinking-2507', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'Qwen/Qwen2.5-7B-Instruct-Turbo' => ['Qwen/Qwen2.5-7B-Instruct-Turbo', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'Qwen/Qwen2.5-Coder-32B-Instruct' => ['Qwen/Qwen2.5-Coder-32B-Instruct', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + + // Mistral models + yield 'mistralai/Mixtral-8x7B-Instruct-v0.1' => ['mistralai/Mixtral-8x7B-Instruct-v0.1', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'mistralai/Mistral-7B-Instruct-v0.2' => ['mistralai/Mistral-7B-Instruct-v0.2', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'mistralai/Mistral-7B-Instruct-v0.1' => ['mistralai/Mistral-7B-Instruct-v0.1', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'mistralai/Mistral-7B-Instruct-v0.3' => ['mistralai/Mistral-7B-Instruct-v0.3', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'mistralai/mistral-tiny' => ['mistralai/mistral-tiny', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'mistralai/mistral-nemo' => ['mistralai/mistral-nemo', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'mistralai/codestral-2501' => ['mistralai/codestral-2501', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + + // Meta Llama models + yield 'meta-llama/Llama-3.3-70B-Instruct-Turbo' => ['meta-llama/Llama-3.3-70B-Instruct-Turbo', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'meta-llama/Llama-3.2-3B-Instruct-Turbo' => ['meta-llama/Llama-3.2-3B-Instruct-Turbo', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'meta-llama/Meta-Llama-3-8B-Instruct-Lite' => ['meta-llama/Meta-Llama-3-8B-Instruct-Lite', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'meta-llama/Llama-3-70b-chat-hf' => ['meta-llama/Llama-3-70b-chat-hf', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo' => ['meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo' => ['meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo' => ['meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'meta-llama/llama-4-scout' => ['meta-llama/llama-4-scout', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'meta-llama/llama-4-maverick' => ['meta-llama/llama-4-maverick', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + + // Claude models + yield 'claude-3-opus-20240229' => ['claude-3-opus-20240229', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE]]; + yield 'claude-3-haiku-20240307' => ['claude-3-haiku-20240307', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE]]; + yield 'claude-3-5-sonnet-20240620' => ['claude-3-5-sonnet-20240620', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE]]; + yield 'claude-3-5-sonnet-20241022' => ['claude-3-5-sonnet-20241022', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE]]; + yield 'claude-3-5-haiku-20241022' => ['claude-3-5-haiku-20241022', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE]]; + yield 'claude-3-7-sonnet-20250219' => ['claude-3-7-sonnet-20250219', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE]]; + yield 'anthropic/claude-opus-4' => ['anthropic/claude-opus-4', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE]]; + yield 'anthropic/claude-sonnet-4' => ['anthropic/claude-sonnet-4', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE]]; + yield 'anthropic/claude-opus-4.1' => ['anthropic/claude-opus-4.1', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE]]; + yield 'claude-opus-4-1' => ['claude-opus-4-1', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE]]; + yield 'claude-opus-4-1-20250805' => ['claude-opus-4-1-20250805', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE]]; + + // Gemini models + yield 'gemini-2.0-flash-exp' => ['gemini-2.0-flash-exp', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE]]; + yield 'gemini-2.0-flash' => ['gemini-2.0-flash', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE]]; + yield 'google/gemini-2.5-flash-lite-preview' => ['google/gemini-2.5-flash-lite-preview', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE]]; + yield 'google/gemini-2.5-flash' => ['google/gemini-2.5-flash', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE]]; + yield 'google/gemini-2.5-pro' => ['google/gemini-2.5-pro', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE]]; + yield 'google/gemma-2-27b-it' => ['google/gemma-2-27b-it', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'google/gemma-3-4b-it' => ['google/gemma-3-4b-it', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'google/gemma-3-12b-it' => ['google/gemma-3-12b-it', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'google/gemma-3-27b-it' => ['google/gemma-3-27b-it', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'google/gemma-3n-e4b-it' => ['google/gemma-3n-e4b-it', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + + // X.AI models + yield 'x-ai/grok-3-beta' => ['x-ai/grok-3-beta', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'x-ai/grok-3-mini-beta' => ['x-ai/grok-3-mini-beta', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'x-ai/grok-4-07-09' => ['x-ai/grok-4-07-09', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + + // Other models + yield 'anthracite-org/magnum-v4-72b' => ['anthracite-org/magnum-v4-72b', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'nvidia/llama-3.1-nemotron-70b-instruct' => ['nvidia/llama-3.1-nemotron-70b-instruct', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'cohere/command-r-plus' => ['cohere/command-r-plus', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'cohere/command-a' => ['cohere/command-a', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'MiniMax-Text-01' => ['MiniMax-Text-01', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'minimax/m1' => ['minimax/m1', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'moonshot/kimi-k2-preview' => ['moonshot/kimi-k2-preview', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'perplexity/sonar' => ['perplexity/sonar', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'perplexity/sonar-pro' => ['perplexity/sonar-pro', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'zhipu/glm-4.5-air' => ['zhipu/glm-4.5-air', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'zhipu/glm-4.5' => ['zhipu/glm-4.5', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + + // Embedding models + yield 'text-embedding-3-small' => ['text-embedding-3-small', Embeddings::class, [Capability::INPUT_MULTIPLE]]; + yield 'text-embedding-3-large' => ['text-embedding-3-large', Embeddings::class, [Capability::INPUT_MULTIPLE]]; + yield 'text-embedding-ada-002' => ['text-embedding-ada-002', Embeddings::class, [Capability::INPUT_MULTIPLE]]; + yield 'togethercomputer/m2-bert-80M-32k-retrieval' => ['togethercomputer/m2-bert-80M-32k-retrieval', Embeddings::class, [Capability::INPUT_MULTIPLE]]; + yield 'BAAI/bge-base-en-v1.5' => ['BAAI/bge-base-en-v1.5', Embeddings::class, [Capability::INPUT_MULTIPLE]]; + yield 'BAAI/bge-large-en-v1.' => ['BAAI/bge-large-en-v1.', Embeddings::class, [Capability::INPUT_MULTIPLE]]; + yield 'voyage-large-2-instruct' => ['voyage-large-2-instruct', Embeddings::class, [Capability::INPUT_MULTIPLE]]; + yield 'voyage-finance-2' => ['voyage-finance-2', Embeddings::class, [Capability::INPUT_MULTIPLE]]; + yield 'voyage-multilingual-2' => ['voyage-multilingual-2', Embeddings::class, [Capability::INPUT_MULTIPLE]]; + yield 'voyage-law-2' => ['voyage-law-2', Embeddings::class, [Capability::INPUT_MULTIPLE]]; + yield 'voyage-code-2' => ['voyage-code-2', Embeddings::class, [Capability::INPUT_MULTIPLE]]; + yield 'voyage-large-2' => ['voyage-large-2', Embeddings::class, [Capability::INPUT_MULTIPLE]]; + yield 'voyage-2' => ['voyage-2', Embeddings::class, [Capability::INPUT_MULTIPLE]]; + yield 'textembedding-gecko@003' => ['textembedding-gecko@003', Embeddings::class, [Capability::INPUT_MULTIPLE]]; + yield 'textembedding-gecko-multilingual@001' => ['textembedding-gecko-multilingual@001', Embeddings::class, [Capability::INPUT_MULTIPLE]]; + yield 'text-multilingual-embedding-002' => ['text-multilingual-embedding-002', Embeddings::class, [Capability::INPUT_MULTIPLE]]; + } + + protected function createModelCatalog(): ModelCatalogInterface + { + return new ModelCatalog(); + } +} diff --git a/src/platform/tests/Bridge/Albert/ModelCatalogTest.php b/src/platform/tests/Bridge/Albert/ModelCatalogTest.php new file mode 100644 index 000000000..b48cc1f19 --- /dev/null +++ b/src/platform/tests/Bridge/Albert/ModelCatalogTest.php @@ -0,0 +1,34 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Tests\Bridge\Albert; + +use Symfony\AI\Platform\Bridge\Albert\ModelCatalog; +use Symfony\AI\Platform\Bridge\OpenAi\Gpt; +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; +use Symfony\AI\Platform\Tests\ModelCatalogTestCase; + +/** + * @author Oskar Stark + */ +final class ModelCatalogTest extends ModelCatalogTestCase +{ + public static function modelsProvider(): iterable + { + yield 'gpt-4o' => ['gpt-4o', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; + } + + protected function createModelCatalog(): ModelCatalogInterface + { + return new ModelCatalog(); + } +} diff --git a/src/platform/tests/Bridge/Anthropic/ClaudeTest.php b/src/platform/tests/Bridge/Anthropic/ClaudeTest.php index 7a9293524..a0b69bcbc 100644 --- a/src/platform/tests/Bridge/Anthropic/ClaudeTest.php +++ b/src/platform/tests/Bridge/Anthropic/ClaudeTest.php @@ -21,25 +21,25 @@ final class ClaudeTest extends TestCase { public function testItCreatesClaudeWithDefaultSettings() { - $claude = new Claude(Claude::SONNET_35); + $claude = new Claude('claude-3-5-sonnet-latest'); - $this->assertSame(Claude::SONNET_35, $claude->getName()); + $this->assertSame('claude-3-5-sonnet-latest', $claude->getName()); $this->assertSame(['max_tokens' => 1000], $claude->getOptions()); } public function testItCreatesClaudeWithCustomSettingsIncludingMaxTokens() { - $claude = new Claude(Claude::SONNET_35, ['temperature' => 0.5, 'max_tokens' => 2000]); + $claude = new Claude('claude-3-5-sonnet-latest', [], ['temperature' => 0.5, 'max_tokens' => 2000]); - $this->assertSame(Claude::SONNET_35, $claude->getName()); + $this->assertSame('claude-3-5-sonnet-latest', $claude->getName()); $this->assertSame(['temperature' => 0.5, 'max_tokens' => 2000], $claude->getOptions()); } public function testItCreatesClaudeWithCustomSettingsWithoutMaxTokens() { - $claude = new Claude(Claude::SONNET_35, ['temperature' => 0.5]); + $claude = new Claude('claude-3-5-sonnet-latest', [], ['temperature' => 0.5]); - $this->assertSame(Claude::SONNET_35, $claude->getName()); + $this->assertSame('claude-3-5-sonnet-latest', $claude->getName()); $this->assertSame(['temperature' => 0.5, 'max_tokens' => 1000], $claude->getOptions()); } } diff --git a/src/platform/tests/Bridge/Anthropic/Contract/AssistantMessageNormalizerTest.php b/src/platform/tests/Bridge/Anthropic/Contract/AssistantMessageNormalizerTest.php index 056b4ae37..a523652fb 100644 --- a/src/platform/tests/Bridge/Anthropic/Contract/AssistantMessageNormalizerTest.php +++ b/src/platform/tests/Bridge/Anthropic/Contract/AssistantMessageNormalizerTest.php @@ -26,7 +26,7 @@ public function testSupportsNormalization() $normalizer = new AssistantMessageNormalizer(); $this->assertTrue($normalizer->supportsNormalization(new AssistantMessage('Hello'), context: [ - Contract::CONTEXT_MODEL => new Claude(Claude::SONNET_37), + Contract::CONTEXT_MODEL => new Claude('claude-3-5-sonnet-latest'), ])); $this->assertFalse($normalizer->supportsNormalization('not an assistant message')); } diff --git a/src/platform/tests/Bridge/Anthropic/ModelCatalogTest.php b/src/platform/tests/Bridge/Anthropic/ModelCatalogTest.php new file mode 100644 index 000000000..984f68be2 --- /dev/null +++ b/src/platform/tests/Bridge/Anthropic/ModelCatalogTest.php @@ -0,0 +1,44 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Tests\Bridge\Anthropic; + +use Symfony\AI\Platform\Bridge\Anthropic\Claude; +use Symfony\AI\Platform\Bridge\Anthropic\ModelCatalog; +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; +use Symfony\AI\Platform\Tests\ModelCatalogTestCase; + +/** + * @author Oskar Stark + */ +final class ModelCatalogTest extends ModelCatalogTestCase +{ + public static function modelsProvider(): iterable + { + yield 'claude-3-haiku-20240307' => ['claude-3-haiku-20240307', Claude::class, [Capability::INPUT_MESSAGES, Capability::INPUT_IMAGE, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'claude-3-5-haiku-latest' => ['claude-3-5-haiku-latest', Claude::class, [Capability::INPUT_MESSAGES, Capability::INPUT_IMAGE, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'claude-3-sonnet-20240229' => ['claude-3-sonnet-20240229', Claude::class, [Capability::INPUT_MESSAGES, Capability::INPUT_IMAGE, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'claude-3-5-sonnet-latest' => ['claude-3-5-sonnet-latest', Claude::class, [Capability::INPUT_MESSAGES, Capability::INPUT_IMAGE, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'claude-3-7-sonnet-latest' => ['claude-3-7-sonnet-latest', Claude::class, [Capability::INPUT_MESSAGES, Capability::INPUT_IMAGE, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'claude-sonnet-4-20250514' => ['claude-sonnet-4-20250514', Claude::class, [Capability::INPUT_MESSAGES, Capability::INPUT_IMAGE, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'claude-sonnet-4-0' => ['claude-sonnet-4-0', Claude::class, [Capability::INPUT_MESSAGES, Capability::INPUT_IMAGE, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'claude-3-opus-20240229' => ['claude-3-opus-20240229', Claude::class, [Capability::INPUT_MESSAGES, Capability::INPUT_IMAGE, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'claude-opus-4-20250514' => ['claude-opus-4-20250514', Claude::class, [Capability::INPUT_MESSAGES, Capability::INPUT_IMAGE, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'claude-opus-4-0' => ['claude-opus-4-0', Claude::class, [Capability::INPUT_MESSAGES, Capability::INPUT_IMAGE, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'claude-opus-4-1' => ['claude-opus-4-1', Claude::class, [Capability::INPUT_MESSAGES, Capability::INPUT_IMAGE, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + } + + protected function createModelCatalog(): ModelCatalogInterface + { + return new ModelCatalog(); + } +} diff --git a/src/platform/tests/Bridge/Anthropic/ModelClientTest.php b/src/platform/tests/Bridge/Anthropic/ModelClientTest.php index 6e6dbd025..d15870b5c 100644 --- a/src/platform/tests/Bridge/Anthropic/ModelClientTest.php +++ b/src/platform/tests/Bridge/Anthropic/ModelClientTest.php @@ -25,7 +25,7 @@ class ModelClientTest extends TestCase protected function setUp(): void { - $this->model = new Claude(Claude::SONNET_37); + $this->model = new Claude('claude-3-5-sonnet-latest'); } public function testAnthropicBetaHeaderIsSetWithSingleBetaFeature() diff --git a/src/platform/tests/Bridge/Azure/Meta/ModelCatalogTest.php b/src/platform/tests/Bridge/Azure/Meta/ModelCatalogTest.php new file mode 100644 index 000000000..6dd5eb52e --- /dev/null +++ b/src/platform/tests/Bridge/Azure/Meta/ModelCatalogTest.php @@ -0,0 +1,48 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Tests\Bridge\Azure\Meta; + +use Symfony\AI\Platform\Bridge\Azure\Meta\ModelCatalog; +use Symfony\AI\Platform\Bridge\Meta\Llama; +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; +use Symfony\AI\Platform\Tests\ModelCatalogTestCase; + +/** + * @author Oskar Stark + */ +final class ModelCatalogTest extends ModelCatalogTestCase +{ + public static function modelsProvider(): iterable + { + yield 'llama-3.3-70B-Instruct' => ['llama-3.3-70B-Instruct', Llama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'llama-3.2-90b-vision-instruct' => ['llama-3.2-90b-vision-instruct', Llama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::INPUT_IMAGE]]; + yield 'llama-3.2-11b-vision-instruct' => ['llama-3.2-11b-vision-instruct', Llama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::INPUT_IMAGE]]; + yield 'llama-3.2-3b' => ['llama-3.2-3b', Llama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'llama-3.2-3b-instruct' => ['llama-3.2-3b-instruct', Llama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'llama-3.2-1b' => ['llama-3.2-1b', Llama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'llama-3.2-1b-instruct' => ['llama-3.2-1b-instruct', Llama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'llama-3.1-405b-instruct' => ['llama-3.1-405b-instruct', Llama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'llama-3.1-70b' => ['llama-3.1-70b', Llama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'llama-3-70b-instruct' => ['llama-3-70b-instruct', Llama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'llama-3.1-8b' => ['llama-3.1-8b', Llama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'llama-3.1-8b-instruct' => ['llama-3.1-8b-instruct', Llama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'llama-3-70b' => ['llama-3-70b', Llama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'llama-3-8b-instruct' => ['llama-3-8b-instruct', Llama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'llama-3-8b' => ['llama-3-8b', Llama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + } + + protected function createModelCatalog(): ModelCatalogInterface + { + return new ModelCatalog(); + } +} diff --git a/src/platform/tests/Bridge/Azure/OpenAi/EmbeddingsModelClientTest.php b/src/platform/tests/Bridge/Azure/OpenAi/EmbeddingsModelClientTest.php index 491f876b4..d721de9dd 100644 --- a/src/platform/tests/Bridge/Azure/OpenAi/EmbeddingsModelClientTest.php +++ b/src/platform/tests/Bridge/Azure/OpenAi/EmbeddingsModelClientTest.php @@ -71,7 +71,7 @@ public function testItIsSupportingTheCorrectModel() { $client = new EmbeddingsModelClient(new MockHttpClient(), 'test.azure.com', 'deployment', '2023-12-01', 'api-key'); - $this->assertTrue($client->supports(new Embeddings(Embeddings::TEXT_3_SMALL))); + $this->assertTrue($client->supports(new Embeddings('text-embedding-3-small'))); } public function testItIsExecutingTheCorrectRequest() @@ -87,6 +87,6 @@ public function testItIsExecutingTheCorrectRequest() $httpClient = new MockHttpClient([$resultCallback]); $client = new EmbeddingsModelClient($httpClient, 'test.azure.com', 'embeddings-deployment', '2023-12-01', 'test-api-key'); - $client->request(new Embeddings(Embeddings::TEXT_3_SMALL), 'Hello, world!'); + $client->request(new Embeddings('text-embedding-3-small'), 'Hello, world!'); } } diff --git a/src/platform/tests/Bridge/Azure/OpenAi/GptModelClientTest.php b/src/platform/tests/Bridge/Azure/OpenAi/GptModelClientTest.php index 5f6ee9b57..b260847cc 100644 --- a/src/platform/tests/Bridge/Azure/OpenAi/GptModelClientTest.php +++ b/src/platform/tests/Bridge/Azure/OpenAi/GptModelClientTest.php @@ -71,7 +71,7 @@ public function testItIsSupportingTheCorrectModel() { $client = new GptModelClient(new MockHttpClient(), 'test.azure.com', 'deployment', '2023-12-01', 'api-key'); - $this->assertTrue($client->supports(new Gpt(Gpt::GPT_4O))); + $this->assertTrue($client->supports(new Gpt('gpt-4o'))); } public function testItIsExecutingTheCorrectRequest() @@ -87,6 +87,6 @@ public function testItIsExecutingTheCorrectRequest() $httpClient = new MockHttpClient([$resultCallback]); $client = new GptModelClient($httpClient, 'test.azure.com', 'gpt-deployment', '2023-12-01', 'test-api-key'); - $client->request(new Gpt(Gpt::GPT_4O), ['messages' => [['role' => 'user', 'content' => 'Hello']]]); + $client->request(new Gpt('gpt-4o'), ['messages' => [['role' => 'user', 'content' => 'Hello']]]); } } diff --git a/src/platform/tests/Bridge/Azure/OpenAi/ModelCatalogTest.php b/src/platform/tests/Bridge/Azure/OpenAi/ModelCatalogTest.php new file mode 100644 index 000000000..56cd34bd5 --- /dev/null +++ b/src/platform/tests/Bridge/Azure/OpenAi/ModelCatalogTest.php @@ -0,0 +1,45 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Tests\Bridge\Azure\OpenAi; + +use Symfony\AI\Platform\Bridge\Azure\OpenAi\ModelCatalog; +use Symfony\AI\Platform\Bridge\OpenAi\Embeddings; +use Symfony\AI\Platform\Bridge\OpenAi\Gpt; +use Symfony\AI\Platform\Bridge\OpenAi\Whisper; +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; +use Symfony\AI\Platform\Tests\ModelCatalogTestCase; + +/** + * @author Oskar Stark + */ +final class ModelCatalogTest extends ModelCatalogTestCase +{ + public static function modelsProvider(): iterable + { + yield 'gpt-4o' => ['gpt-4o', Gpt::class, [Capability::INPUT_TEXT, Capability::INPUT_IMAGE, Capability::INPUT_AUDIO, Capability::OUTPUT_TEXT, Capability::OUTPUT_AUDIO, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'gpt-4o-mini' => ['gpt-4o-mini', Gpt::class, [Capability::INPUT_TEXT, Capability::INPUT_IMAGE, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'gpt-4-turbo' => ['gpt-4-turbo', Gpt::class, [Capability::INPUT_TEXT, Capability::INPUT_IMAGE, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'gpt-4' => ['gpt-4', Gpt::class, [Capability::INPUT_TEXT, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'gpt-35-turbo' => ['gpt-35-turbo', Gpt::class, [Capability::INPUT_TEXT, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'whisper' => ['whisper', Whisper::class, [Capability::INPUT_AUDIO, Capability::OUTPUT_TEXT, Capability::SPEECH_TO_TEXT]]; + yield 'whisper-1' => ['whisper-1', Whisper::class, [Capability::INPUT_AUDIO, Capability::OUTPUT_TEXT, Capability::SPEECH_TO_TEXT]]; + yield 'text-embedding-ada-002' => ['text-embedding-ada-002', Embeddings::class, [Capability::INPUT_TEXT, Capability::OUTPUT_STRUCTURED]]; + yield 'text-embedding-3-small' => ['text-embedding-3-small', Embeddings::class, [Capability::INPUT_TEXT, Capability::OUTPUT_STRUCTURED]]; + yield 'text-embedding-3-large' => ['text-embedding-3-large', Embeddings::class, [Capability::INPUT_TEXT, Capability::OUTPUT_STRUCTURED]]; + } + + protected function createModelCatalog(): ModelCatalogInterface + { + return new ModelCatalog(); + } +} diff --git a/src/platform/tests/Bridge/Azure/OpenAi/WhisperModelClientTest.php b/src/platform/tests/Bridge/Azure/OpenAi/WhisperModelClientTest.php index 43dbf27b7..40084b314 100644 --- a/src/platform/tests/Bridge/Azure/OpenAi/WhisperModelClientTest.php +++ b/src/platform/tests/Bridge/Azure/OpenAi/WhisperModelClientTest.php @@ -74,7 +74,7 @@ public function testItSupportsWhisperModel() '2023-12-01-preview', 'test-key' ); - $model = new Whisper(Whisper::WHISPER_1); + $model = new Whisper('whisper-1'); $this->assertTrue($client->supports($model)); } @@ -91,7 +91,7 @@ function ($method, $url): MockResponse { ]); $client = new WhisperModelClient($httpClient, 'test.azure.com', 'whspr', '2023-12', 'test-key'); - $client->request(new Whisper(Whisper::WHISPER_1), ['file' => 'audio-data']); + $client->request(new Whisper('whisper-1'), ['file' => 'audio-data']); $this->assertSame(1, $httpClient->getRequestsCount()); } @@ -108,7 +108,7 @@ function ($method, $url): MockResponse { ]); $client = new WhisperModelClient($httpClient, 'test.azure.com', 'whspr', '2023-12', 'test-key'); - $client->request(new Whisper(Whisper::WHISPER_1), ['file' => 'audio-data'], ['task' => Task::TRANSCRIPTION]); + $client->request(new Whisper('whisper-1'), ['file' => 'audio-data'], ['task' => Task::TRANSCRIPTION]); $this->assertSame(1, $httpClient->getRequestsCount()); } @@ -125,7 +125,7 @@ function ($method, $url): MockResponse { ]); $client = new WhisperModelClient($httpClient, 'test.azure.com', 'whspr', '2023-12', 'test-key'); - $client->request(new Whisper(Whisper::WHISPER_1), ['file' => 'audio-data'], ['task' => Task::TRANSLATION]); + $client->request(new Whisper('whisper-1'), ['file' => 'audio-data'], ['task' => Task::TRANSLATION]); $this->assertSame(1, $httpClient->getRequestsCount()); } diff --git a/src/platform/tests/Bridge/Bedrock/ModelCatalogTest.php b/src/platform/tests/Bridge/Bedrock/ModelCatalogTest.php new file mode 100644 index 000000000..cafcb693a --- /dev/null +++ b/src/platform/tests/Bridge/Bedrock/ModelCatalogTest.php @@ -0,0 +1,39 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Tests\Bridge\Bedrock; + +use Symfony\AI\Platform\Bridge\Anthropic\Claude; +use Symfony\AI\Platform\Bridge\Bedrock\ModelCatalog; +use Symfony\AI\Platform\Bridge\Bedrock\Nova\Nova; +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; +use Symfony\AI\Platform\Tests\ModelCatalogTestCase; + +/** + * @author Oskar Stark + */ +final class ModelCatalogTest extends ModelCatalogTestCase +{ + public static function modelsProvider(): iterable + { + yield 'nova-micro' => ['nova-micro', Nova::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'nova-lite' => ['nova-lite', Nova::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STRUCTURED, Capability::TOOL_CALLING]]; + yield 'nova-pro' => ['nova-pro', Nova::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STRUCTURED, Capability::TOOL_CALLING]]; + yield 'nova-premier' => ['nova-premier', Nova::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STRUCTURED, Capability::TOOL_CALLING, Capability::INPUT_IMAGE]]; + yield 'claude-3-7-sonnet-20250219' => ['claude-3-7-sonnet-20250219', Claude::class, [Capability::INPUT_MESSAGES, Capability::INPUT_IMAGE, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + } + + protected function createModelCatalog(): ModelCatalogInterface + { + return new ModelCatalog(); + } +} diff --git a/src/platform/tests/Bridge/Bedrock/Nova/ContractTest.php b/src/platform/tests/Bridge/Bedrock/Nova/ContractTest.php index d3a3874d6..505fa3483 100644 --- a/src/platform/tests/Bridge/Bedrock/Nova/ContractTest.php +++ b/src/platform/tests/Bridge/Bedrock/Nova/ContractTest.php @@ -37,7 +37,7 @@ public function testConvert(MessageBag $bag, array $expected) new UserMessageNormalizer(), ); - $this->assertEquals($expected, $contract->createRequestPayload(new Nova(Nova::PRO), $bag)); + $this->assertEquals($expected, $contract->createRequestPayload(new Nova('nova-pro'), $bag)); } /** diff --git a/src/platform/tests/Bridge/Bedrock/Nova/NovaResultConverterTest.php b/src/platform/tests/Bridge/Bedrock/Nova/NovaResultConverterTest.php index 1d921eb0d..44fd46d58 100644 --- a/src/platform/tests/Bridge/Bedrock/Nova/NovaResultConverterTest.php +++ b/src/platform/tests/Bridge/Bedrock/Nova/NovaResultConverterTest.php @@ -30,10 +30,8 @@ final class NovaResultConverterTest extends TestCase #[TestDox('Supports Nova model')] public function testSupports() { - $model = new Nova(Nova::PRO); - $converter = new NovaResultConverter(); - $this->assertTrue($converter->supports($model)); + $this->assertTrue($converter->supports(new Nova('nova-pro'))); } #[TestDox('Converts response with text content to TextResult')] diff --git a/src/platform/tests/Bridge/Cerebras/ModelCatalogTest.php b/src/platform/tests/Bridge/Cerebras/ModelCatalogTest.php new file mode 100644 index 000000000..fd5d7b858 --- /dev/null +++ b/src/platform/tests/Bridge/Cerebras/ModelCatalogTest.php @@ -0,0 +1,42 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Tests\Bridge\Cerebras; + +use Symfony\AI\Platform\Bridge\Cerebras\ModelCatalog; +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\Model; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; +use Symfony\AI\Platform\Tests\ModelCatalogTestCase; + +/** + * @author Oskar Stark + */ +final class ModelCatalogTest extends ModelCatalogTestCase +{ + public static function modelsProvider(): iterable + { + yield 'llama-4-scout-17b-16e-instruct' => ['llama-4-scout-17b-16e-instruct', Model::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING]]; + yield 'llama3.1-8b' => ['llama3.1-8b', Model::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING]]; + yield 'llama-3.3-70b' => ['llama-3.3-70b', Model::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING]]; + yield 'llama-4-maverick-17b-128e-instruct' => ['llama-4-maverick-17b-128e-instruct', Model::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING]]; + yield 'qwen-3-32b' => ['qwen-3-32b', Model::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING]]; + yield 'qwen-3-235b-a22b-instruct-2507' => ['qwen-3-235b-a22b-instruct-2507', Model::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING]]; + yield 'qwen-3-235b-a22b-thinking-2507' => ['qwen-3-235b-a22b-thinking-2507', Model::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING]]; + yield 'qwen-3-coder-480b' => ['qwen-3-coder-480b', Model::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING]]; + yield 'gpt-oss-120b' => ['gpt-oss-120b', Model::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING]]; + } + + protected function createModelCatalog(): ModelCatalogInterface + { + return new ModelCatalog(); + } +} diff --git a/src/platform/tests/Bridge/Cerebras/ModelClientTest.php b/src/platform/tests/Bridge/Cerebras/ModelClientTest.php index b130375e5..f420dde73 100644 --- a/src/platform/tests/Bridge/Cerebras/ModelClientTest.php +++ b/src/platform/tests/Bridge/Cerebras/ModelClientTest.php @@ -50,7 +50,7 @@ public function testItSupportsTheCorrectModel() { $client = new ModelClient(new MockHttpClient(), 'csk-1234567890abcdef'); - $this->assertTrue($client->supports(new Model(Model::GPT_OSS_120B))); + $this->assertTrue($client->supports(new Model('llama3.1-8b'))); } public function testItSuccessfullyInvokesTheModel() @@ -76,7 +76,7 @@ public function testItSuccessfullyInvokesTheModel() ], ]; - $result = $client->request(new Model(Model::LLAMA_3_3_70B), $payload); + $result = $client->request(new Model('llama-3.3-70b'), $payload); $data = $result->getData(); $info = $result->getObject()->getInfo(); diff --git a/src/platform/tests/Bridge/Cerebras/ResultConverterTest.php b/src/platform/tests/Bridge/Cerebras/ResultConverterTest.php index b9da7612e..864e14aef 100644 --- a/src/platform/tests/Bridge/Cerebras/ResultConverterTest.php +++ b/src/platform/tests/Bridge/Cerebras/ResultConverterTest.php @@ -25,6 +25,6 @@ public function testItSupportsTheCorrectModel() { $client = new ModelClient(new MockHttpClient(), 'csk-1234567890abcdef'); - $this->assertTrue($client->supports(new Model(Model::GPT_OSS_120B))); + $this->assertTrue($client->supports(new Model('llama3.1-8b'))); } } diff --git a/src/platform/tests/Bridge/DockerModelRunner/ModelCatalogTest.php b/src/platform/tests/Bridge/DockerModelRunner/ModelCatalogTest.php new file mode 100644 index 000000000..7c22dcdbc --- /dev/null +++ b/src/platform/tests/Bridge/DockerModelRunner/ModelCatalogTest.php @@ -0,0 +1,57 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Tests\Bridge\DockerModelRunner; + +use Symfony\AI\Platform\Bridge\DockerModelRunner\Completions; +use Symfony\AI\Platform\Bridge\DockerModelRunner\Embeddings; +use Symfony\AI\Platform\Bridge\DockerModelRunner\ModelCatalog; +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; +use Symfony\AI\Platform\Tests\ModelCatalogTestCase; + +/** + * @author Oskar Stark + */ +final class ModelCatalogTest extends ModelCatalogTestCase +{ + public static function modelsProvider(): iterable + { + // Completions models + yield 'ai/gemma3n' => ['ai/gemma3n', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'ai/gemma3' => ['ai/gemma3', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'ai/qwen2.5' => ['ai/qwen2.5', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'ai/qwen3' => ['ai/qwen3', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'ai/qwen3-coder' => ['ai/qwen3-coder', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'ai/llama3.1' => ['ai/llama3.1', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'ai/llama3.2' => ['ai/llama3.2', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'ai/llama3.3' => ['ai/llama3.3', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'ai/mistral' => ['ai/mistral', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'ai/mistral-nemo' => ['ai/mistral-nemo', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'ai/phi4' => ['ai/phi4', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'ai/deepseek-r1-distill-llama' => ['ai/deepseek-r1-distill-llama', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'ai/seed-oss' => ['ai/seed-oss', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'ai/gpt-oss' => ['ai/gpt-oss', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'ai/smollm2' => ['ai/smollm2', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'ai/smollm3' => ['ai/smollm3', Completions::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + + // Embeddings models + yield 'ai/nomic-embed-text-v1.5' => ['ai/nomic-embed-text-v1.5', Embeddings::class, [Capability::INPUT_TEXT]]; + yield 'ai/mxbai-embed-large' => ['ai/mxbai-embed-large', Embeddings::class, [Capability::INPUT_TEXT]]; + yield 'ai/embeddinggemma' => ['ai/embeddinggemma', Embeddings::class, [Capability::INPUT_TEXT]]; + yield 'ai/granite-embedding-multilingual' => ['ai/granite-embedding-multilingual', Embeddings::class, [Capability::INPUT_TEXT]]; + } + + protected function createModelCatalog(): ModelCatalogInterface + { + return new ModelCatalog(); + } +} diff --git a/src/platform/tests/Bridge/ElevenLabs/Contract/ElevenLabsContractTest.php b/src/platform/tests/Bridge/ElevenLabs/Contract/ElevenLabsContractTest.php index 7120dea39..5fd637065 100644 --- a/src/platform/tests/Bridge/ElevenLabs/Contract/ElevenLabsContractTest.php +++ b/src/platform/tests/Bridge/ElevenLabs/Contract/ElevenLabsContractTest.php @@ -24,7 +24,7 @@ public function testItCanCreatePayloadWithAudio() $contract = ElevenLabsContract::create(); - $payload = $contract->createRequestPayload(new ElevenLabs(ElevenLabs::ELEVEN_MULTILINGUAL_V2), $audio); + $payload = $contract->createRequestPayload(new ElevenLabs('eleven_multilingual_v2'), $audio); $this->assertSame([ 'type' => 'input_audio', diff --git a/src/platform/tests/Bridge/ElevenLabs/ElevenLabsClientTest.php b/src/platform/tests/Bridge/ElevenLabs/ElevenLabsClientTest.php index 21941544e..5eefeec5e 100644 --- a/src/platform/tests/Bridge/ElevenLabs/ElevenLabsClientTest.php +++ b/src/platform/tests/Bridge/ElevenLabs/ElevenLabsClientTest.php @@ -15,6 +15,7 @@ use Symfony\AI\Platform\Bridge\ElevenLabs\Contract\AudioNormalizer; use Symfony\AI\Platform\Bridge\ElevenLabs\ElevenLabs; use Symfony\AI\Platform\Bridge\ElevenLabs\ElevenLabsClient; +use Symfony\AI\Platform\Capability; use Symfony\AI\Platform\Exception\InvalidArgumentException; use Symfony\AI\Platform\Message\Content\Audio; use Symfony\AI\Platform\Model; @@ -32,7 +33,7 @@ public function testSupportsModel() 'my-api-key', ); - $this->assertTrue($client->supports(new ElevenLabs(ElevenLabs::ELEVEN_MULTILINGUAL_V2))); + $this->assertTrue($client->supports(new ElevenLabs('eleven_multilingual_v2'))); $this->assertFalse($client->supports(new Model('any-model'))); } @@ -72,7 +73,7 @@ public function testClientCannotPerformSpeechToTextRequestWithInvalidPayload() $this->expectException(InvalidArgumentException::class); $this->expectExceptionMessage('The payload must be an array, received "string".'); $this->expectExceptionCode(0); - $client->request(new ElevenLabs(ElevenLabs::ELEVEN_MULTILINGUAL_V2), 'foo'); + $client->request(new ElevenLabs('eleven_multilingual_v2'), 'foo'); } public function testClientCanPerformSpeechToTextRequest() @@ -91,7 +92,7 @@ public function testClientCanPerformSpeechToTextRequest() $payload = $normalizer->normalize(Audio::fromFile(\dirname(__DIR__, 5).'/fixtures/audio.mp3')); - $client->request(new ElevenLabs(ElevenLabs::SCRIBE_V1), $payload); + $client->request(new ElevenLabs('scribe_v1', [Capability::INPUT_AUDIO, Capability::OUTPUT_TEXT, Capability::SPEECH_TO_TEXT]), $payload); $this->assertSame(1, $httpClient->getRequestsCount()); } @@ -112,7 +113,7 @@ public function testClientCanPerformSpeechToTextRequestWithExperimentalModel() $payload = $normalizer->normalize(Audio::fromFile(\dirname(__DIR__, 5).'/fixtures/audio.mp3')); - $client->request(new ElevenLabs(ElevenLabs::SCRIBE_V1_EXPERIMENTAL), $payload); + $client->request(new ElevenLabs('scribe_v1_experimental', [Capability::INPUT_AUDIO, Capability::OUTPUT_TEXT, Capability::SPEECH_TO_TEXT]), $payload); $this->assertSame(1, $httpClient->getRequestsCount()); } @@ -122,7 +123,7 @@ public function testClientCannotPerformTextToSpeechRequestWithoutValidPayload() $mockHttpClient = new MockHttpClient([ new JsonMockResponse([ [ - 'model_id' => ElevenLabs::ELEVEN_MULTILINGUAL_V2, + 'model_id' => 'eleven_multilingual_v2', 'can_do_text_to_speech' => true, ], ]), @@ -137,7 +138,7 @@ public function testClientCannotPerformTextToSpeechRequestWithoutValidPayload() $this->expectException(InvalidArgumentException::class); $this->expectExceptionMessage('The payload must contain a "text" key'); $this->expectExceptionCode(0); - $client->request(new ElevenLabs(ElevenLabs::ELEVEN_MULTILINGUAL_V2, [ + $client->request(new ElevenLabs('eleven_multilingual_v2', options: [ 'voice' => 'Dslrhjl3ZpzrctukrQSN', ]), []); } @@ -149,7 +150,7 @@ public function testClientCanPerformTextToSpeechRequest() $httpClient = new MockHttpClient([ new JsonMockResponse([ [ - 'model_id' => ElevenLabs::ELEVEN_MULTILINGUAL_V2, + 'model_id' => 'eleven_multilingual_v2', 'can_do_text_to_speech' => true, ], ]), @@ -161,7 +162,7 @@ public function testClientCanPerformTextToSpeechRequest() 'my-api-key', ); - $client->request(new ElevenLabs(ElevenLabs::ELEVEN_MULTILINGUAL_V2, [ + $client->request(new ElevenLabs('eleven_multilingual_v2', options: [ 'voice' => 'Dslrhjl3ZpzrctukrQSN', ]), [ 'text' => 'foo', @@ -177,7 +178,7 @@ public function testClientCanPerformTextToSpeechRequestWhenVoiceKeyIsProvidedAsR $httpClient = new MockHttpClient([ new JsonMockResponse([ [ - 'model_id' => ElevenLabs::ELEVEN_MULTILINGUAL_V2, + 'model_id' => 'eleven_multilingual_v2', 'can_do_text_to_speech' => true, ], ]), @@ -189,7 +190,7 @@ public function testClientCanPerformTextToSpeechRequestWhenVoiceKeyIsProvidedAsR 'my-api-key', ); - $client->request(new ElevenLabs(ElevenLabs::ELEVEN_MULTILINGUAL_V2), [ + $client->request(new ElevenLabs('eleven_multilingual_v2'), [ 'text' => 'foo', ], [ 'voice' => 'Dslrhjl3ZpzrctukrQSN', @@ -205,7 +206,7 @@ public function testClientCanPerformTextToSpeechRequestAsStream() $httpClient = new MockHttpClient([ new JsonMockResponse([ [ - 'model_id' => ElevenLabs::ELEVEN_MULTILINGUAL_V2, + 'model_id' => 'eleven_multilingual_v2', 'can_do_text_to_speech' => true, ], ]), @@ -217,7 +218,7 @@ public function testClientCanPerformTextToSpeechRequestAsStream() 'my-api-key', ); - $result = $client->request(new ElevenLabs(ElevenLabs::ELEVEN_MULTILINGUAL_V2, [ + $result = $client->request(new ElevenLabs('eleven_multilingual_v2', options: [ 'voice' => 'Dslrhjl3ZpzrctukrQSN', 'stream' => true, ]), [ @@ -235,7 +236,7 @@ public function testClientCanPerformTextToSpeechRequestAsStreamVoiceKeyIsProvide $httpClient = new MockHttpClient([ new JsonMockResponse([ [ - 'model_id' => ElevenLabs::ELEVEN_MULTILINGUAL_V2, + 'model_id' => 'eleven_multilingual_v2', 'can_do_text_to_speech' => true, ], ]), @@ -247,7 +248,7 @@ public function testClientCanPerformTextToSpeechRequestAsStreamVoiceKeyIsProvide 'my-api-key', ); - $result = $client->request(new ElevenLabs(ElevenLabs::ELEVEN_MULTILINGUAL_V2), [ + $result = $client->request(new ElevenLabs('eleven_multilingual_v2'), [ 'text' => 'foo', ], [ 'voice' => 'Dslrhjl3ZpzrctukrQSN', diff --git a/src/platform/tests/Bridge/ElevenLabs/ElevenLabsConverterTest.php b/src/platform/tests/Bridge/ElevenLabs/ElevenLabsConverterTest.php index 31560ea02..7ba865372 100644 --- a/src/platform/tests/Bridge/ElevenLabs/ElevenLabsConverterTest.php +++ b/src/platform/tests/Bridge/ElevenLabs/ElevenLabsConverterTest.php @@ -26,7 +26,7 @@ public function testSupportsModel() { $converter = new ElevenLabsResultConverter(new MockHttpClient()); - $this->assertTrue($converter->supports(new ElevenLabs(ElevenLabs::ELEVEN_MULTILINGUAL_V2))); + $this->assertTrue($converter->supports(new ElevenLabs('eleven_multilingual_v2'))); $this->assertFalse($converter->supports(new Model('any-model'))); } diff --git a/src/platform/tests/Bridge/ElevenLabs/ElevenLabsTest.php b/src/platform/tests/Bridge/ElevenLabs/ElevenLabsTest.php deleted file mode 100644 index e719c4bd2..000000000 --- a/src/platform/tests/Bridge/ElevenLabs/ElevenLabsTest.php +++ /dev/null @@ -1,135 +0,0 @@ - - * - * For the full copyright and license information, please view the LICENSE - * file that was distributed with this source code. - */ - -namespace Symfony\AI\Platform\Tests\Bridge\ElevenLabs; - -use PHPUnit\Framework\Attributes\DataProvider; -use PHPUnit\Framework\TestCase; -use Symfony\AI\Platform\Bridge\ElevenLabs\ElevenLabs; -use Symfony\AI\Platform\Capability; - -final class ElevenLabsTest extends TestCase -{ - public function testSpeechToTextModelHasCorrectCapabilities() - { - $model = new ElevenLabs(ElevenLabs::SCRIBE_V1); - - $this->assertTrue($model->supports(Capability::INPUT_AUDIO)); - $this->assertTrue($model->supports(Capability::OUTPUT_TEXT)); - $this->assertTrue($model->supports(Capability::SPEECH_TO_TEXT)); - $this->assertFalse($model->supports(Capability::INPUT_TEXT)); - $this->assertFalse($model->supports(Capability::OUTPUT_AUDIO)); - $this->assertFalse($model->supports(Capability::TEXT_TO_SPEECH)); - } - - public function testSpeechToTextExperimentalModelHasCorrectCapabilities() - { - $model = new ElevenLabs(ElevenLabs::SCRIBE_V1_EXPERIMENTAL); - - $this->assertTrue($model->supports(Capability::INPUT_AUDIO)); - $this->assertTrue($model->supports(Capability::OUTPUT_TEXT)); - $this->assertTrue($model->supports(Capability::SPEECH_TO_TEXT)); - $this->assertFalse($model->supports(Capability::INPUT_TEXT)); - $this->assertFalse($model->supports(Capability::OUTPUT_AUDIO)); - $this->assertFalse($model->supports(Capability::TEXT_TO_SPEECH)); - } - - public function testTextToSpeechModelHasCorrectCapabilities() - { - $model = new ElevenLabs(ElevenLabs::ELEVEN_MULTILINGUAL_V2); - - $this->assertTrue($model->supports(Capability::INPUT_TEXT)); - $this->assertTrue($model->supports(Capability::OUTPUT_AUDIO)); - $this->assertTrue($model->supports(Capability::TEXT_TO_SPEECH)); - $this->assertFalse($model->supports(Capability::INPUT_AUDIO)); - $this->assertFalse($model->supports(Capability::OUTPUT_TEXT)); - $this->assertFalse($model->supports(Capability::SPEECH_TO_TEXT)); - } - - public function testGetCapabilitiesReturnsSpeechToTextCapabilities() - { - $model = new ElevenLabs(ElevenLabs::SCRIBE_V1); - - $capabilities = $model->getCapabilities(); - - $this->assertCount(3, $capabilities); - $this->assertContains(Capability::INPUT_AUDIO, $capabilities); - $this->assertContains(Capability::OUTPUT_TEXT, $capabilities); - $this->assertContains(Capability::SPEECH_TO_TEXT, $capabilities); - } - - public function testGetCapabilitiesReturnsTextToSpeechCapabilities() - { - $model = new ElevenLabs(ElevenLabs::ELEVEN_V3); - - $capabilities = $model->getCapabilities(); - - $this->assertCount(3, $capabilities); - $this->assertContains(Capability::INPUT_TEXT, $capabilities); - $this->assertContains(Capability::OUTPUT_AUDIO, $capabilities); - $this->assertContains(Capability::TEXT_TO_SPEECH, $capabilities); - } - - public function testModelNameIsCorrectlySet() - { - $model = new ElevenLabs(ElevenLabs::SCRIBE_V1); - - $this->assertSame(ElevenLabs::SCRIBE_V1, $model->getName()); - } - - public function testModelOptionsAreCorrectlySet() - { - $options = ['voice' => 'test-voice', 'speed' => 1.2]; - $model = new ElevenLabs(ElevenLabs::ELEVEN_MULTILINGUAL_V2, $options); - - $this->assertSame($options, $model->getOptions()); - } - - #[DataProvider('speechToTextModelProvider')] - public function testAllSpeechToTextModelsHaveCorrectCapabilities(string $modelName) - { - $model = new ElevenLabs($modelName); - - $this->assertTrue($model->supports(Capability::SPEECH_TO_TEXT)); - $this->assertTrue($model->supports(Capability::INPUT_AUDIO)); - $this->assertTrue($model->supports(Capability::OUTPUT_TEXT)); - } - - #[DataProvider('textToSpeechModelProvider')] - public function testAllTextToSpeechModelsHaveCorrectCapabilities(string $modelName) - { - $model = new ElevenLabs($modelName); - - $this->assertTrue($model->supports(Capability::TEXT_TO_SPEECH)); - $this->assertTrue($model->supports(Capability::INPUT_TEXT)); - $this->assertTrue($model->supports(Capability::OUTPUT_AUDIO)); - } - - public static function speechToTextModelProvider(): iterable - { - yield [ElevenLabs::SCRIBE_V1]; - yield [ElevenLabs::SCRIBE_V1_EXPERIMENTAL]; - } - - public static function textToSpeechModelProvider(): iterable - { - yield [ElevenLabs::ELEVEN_V3]; - yield [ElevenLabs::ELEVEN_TTV_V3]; - yield [ElevenLabs::ELEVEN_MULTILINGUAL_V2]; - yield [ElevenLabs::ELEVEN_FLASH_V250]; - yield [ElevenLabs::ELEVEN_FLASH_V2]; - yield [ElevenLabs::ELEVEN_TURBO_V2_5]; - yield [ElevenLabs::ELEVEN_TURBO_v2]; - yield [ElevenLabs::ELEVEN_MULTILINGUAL_STS_V2]; - yield [ElevenLabs::ELEVEN_MULTILINGUAL_ttv_V2]; - yield [ElevenLabs::ELEVEN_ENGLISH_STS_V2]; - } -} diff --git a/src/platform/tests/Bridge/ElevenLabs/ModelCatalogTest.php b/src/platform/tests/Bridge/ElevenLabs/ModelCatalogTest.php new file mode 100644 index 000000000..35bc30830 --- /dev/null +++ b/src/platform/tests/Bridge/ElevenLabs/ModelCatalogTest.php @@ -0,0 +1,45 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Tests\Bridge\ElevenLabs; + +use Symfony\AI\Platform\Bridge\ElevenLabs\ElevenLabs; +use Symfony\AI\Platform\Bridge\ElevenLabs\ModelCatalog; +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; +use Symfony\AI\Platform\Tests\ModelCatalogTestCase; + +/** + * @author Oskar Stark + */ +final class ModelCatalogTest extends ModelCatalogTestCase +{ + public static function modelsProvider(): iterable + { + yield 'eleven_v3' => ['eleven_v3', ElevenLabs::class, [Capability::INPUT_TEXT, Capability::OUTPUT_AUDIO, Capability::TEXT_TO_SPEECH]]; + yield 'eleven_ttv_v3' => ['eleven_ttv_v3', ElevenLabs::class, [Capability::INPUT_TEXT, Capability::OUTPUT_AUDIO, Capability::TEXT_TO_SPEECH]]; + yield 'eleven_multilingual_v2' => ['eleven_multilingual_v2', ElevenLabs::class, [Capability::INPUT_TEXT, Capability::OUTPUT_AUDIO, Capability::TEXT_TO_SPEECH]]; + yield 'eleven_flash_v2_5' => ['eleven_flash_v2_5', ElevenLabs::class, [Capability::INPUT_TEXT, Capability::OUTPUT_AUDIO, Capability::TEXT_TO_SPEECH]]; + yield 'eleven_flashv2' => ['eleven_flashv2', ElevenLabs::class, [Capability::INPUT_TEXT, Capability::OUTPUT_AUDIO, Capability::TEXT_TO_SPEECH]]; + yield 'eleven_turbo_v2_5' => ['eleven_turbo_v2_5', ElevenLabs::class, [Capability::INPUT_TEXT, Capability::OUTPUT_AUDIO, Capability::TEXT_TO_SPEECH]]; + yield 'eleven_turbo_v2' => ['eleven_turbo_v2', ElevenLabs::class, [Capability::INPUT_TEXT, Capability::OUTPUT_AUDIO, Capability::TEXT_TO_SPEECH]]; + yield 'eleven_multilingual_sts_v2' => ['eleven_multilingual_sts_v2', ElevenLabs::class, [Capability::INPUT_TEXT, Capability::OUTPUT_AUDIO, Capability::TEXT_TO_SPEECH]]; + yield 'eleven_multilingual_ttv_v2' => ['eleven_multilingual_ttv_v2', ElevenLabs::class, [Capability::INPUT_TEXT, Capability::OUTPUT_AUDIO, Capability::TEXT_TO_SPEECH]]; + yield 'eleven_english_sts_v2' => ['eleven_english_sts_v2', ElevenLabs::class, [Capability::INPUT_TEXT, Capability::OUTPUT_AUDIO, Capability::TEXT_TO_SPEECH]]; + yield 'scribe_v1' => ['scribe_v1', ElevenLabs::class, [Capability::INPUT_AUDIO, Capability::OUTPUT_TEXT, Capability::SPEECH_TO_TEXT]]; + yield 'scribe_v1_experimental' => ['scribe_v1_experimental', ElevenLabs::class, [Capability::INPUT_AUDIO, Capability::OUTPUT_TEXT, Capability::SPEECH_TO_TEXT]]; + } + + protected function createModelCatalog(): ModelCatalogInterface + { + return new ModelCatalog(); + } +} diff --git a/src/platform/tests/Bridge/Gemini/Contract/AssistantMessageNormalizerTest.php b/src/platform/tests/Bridge/Gemini/Contract/AssistantMessageNormalizerTest.php index f31239da6..ccf964a83 100644 --- a/src/platform/tests/Bridge/Gemini/Contract/AssistantMessageNormalizerTest.php +++ b/src/platform/tests/Bridge/Gemini/Contract/AssistantMessageNormalizerTest.php @@ -26,7 +26,7 @@ public function testSupportsNormalization() $normalizer = new AssistantMessageNormalizer(); $this->assertTrue($normalizer->supportsNormalization(new AssistantMessage('Hello'), context: [ - Contract::CONTEXT_MODEL => new Gemini(Gemini::GEMINI_2_PRO), + Contract::CONTEXT_MODEL => new Gemini('gemini-2.0-flash'), ])); $this->assertFalse($normalizer->supportsNormalization('not an assistant message')); } diff --git a/src/platform/tests/Bridge/Gemini/Contract/MessageBagNormalizerTest.php b/src/platform/tests/Bridge/Gemini/Contract/MessageBagNormalizerTest.php index 4b38310be..ee2e3f27b 100644 --- a/src/platform/tests/Bridge/Gemini/Contract/MessageBagNormalizerTest.php +++ b/src/platform/tests/Bridge/Gemini/Contract/MessageBagNormalizerTest.php @@ -32,7 +32,7 @@ public function testSupportsNormalization() $normalizer = new MessageBagNormalizer(); $this->assertTrue($normalizer->supportsNormalization(new MessageBag(), context: [ - Contract::CONTEXT_MODEL => new Gemini(Gemini::GEMINI_2_PRO), + Contract::CONTEXT_MODEL => new Gemini('gemini-2.0-flash'), ])); $this->assertFalse($normalizer->supportsNormalization('not a message bag')); } diff --git a/src/platform/tests/Bridge/Gemini/Contract/ToolCallMessageNormalizerTest.php b/src/platform/tests/Bridge/Gemini/Contract/ToolCallMessageNormalizerTest.php index 3ee0f7b9e..260483840 100644 --- a/src/platform/tests/Bridge/Gemini/Contract/ToolCallMessageNormalizerTest.php +++ b/src/platform/tests/Bridge/Gemini/Contract/ToolCallMessageNormalizerTest.php @@ -26,7 +26,7 @@ public function testSupportsNormalization() $normalizer = new ToolCallMessageNormalizer(); $this->assertTrue($normalizer->supportsNormalization(new ToolCallMessage(new ToolCall('', '', []), ''), context: [ - Contract::CONTEXT_MODEL => new Gemini(Gemini::GEMINI_2_PRO), + Contract::CONTEXT_MODEL => new Gemini('gemini-2.0-flash'), ])); $this->assertFalse($normalizer->supportsNormalization('not a tool call')); } diff --git a/src/platform/tests/Bridge/Gemini/Contract/ToolNormalizerTest.php b/src/platform/tests/Bridge/Gemini/Contract/ToolNormalizerTest.php index a80cc6b1e..e59e84ead 100644 --- a/src/platform/tests/Bridge/Gemini/Contract/ToolNormalizerTest.php +++ b/src/platform/tests/Bridge/Gemini/Contract/ToolNormalizerTest.php @@ -28,7 +28,7 @@ public function testSupportsNormalization() $normalizer = new ToolNormalizer(); $this->assertTrue($normalizer->supportsNormalization(new Tool(new ExecutionReference(ToolNoParams::class), 'test', 'test'), context: [ - Contract::CONTEXT_MODEL => new Gemini(Gemini::GEMINI_2_PRO), + Contract::CONTEXT_MODEL => new Gemini('gemini-2.0-flash'), ])); $this->assertFalse($normalizer->supportsNormalization('not a tool')); } diff --git a/src/platform/tests/Bridge/Gemini/Contract/UserMessageNormalizerTest.php b/src/platform/tests/Bridge/Gemini/Contract/UserMessageNormalizerTest.php index 905adfa4b..17261a509 100644 --- a/src/platform/tests/Bridge/Gemini/Contract/UserMessageNormalizerTest.php +++ b/src/platform/tests/Bridge/Gemini/Contract/UserMessageNormalizerTest.php @@ -30,7 +30,7 @@ public function testSupportsNormalization() $normalizer = new UserMessageNormalizer(); $this->assertTrue($normalizer->supportsNormalization(new UserMessage(new Text('Hello')), context: [ - Contract::CONTEXT_MODEL => new Gemini(Gemini::GEMINI_2_PRO), + Contract::CONTEXT_MODEL => new Gemini('gemini-2.0-flash'), ])); $this->assertFalse($normalizer->supportsNormalization('not a user message')); } diff --git a/src/platform/tests/Bridge/Gemini/Embeddings/ModelClientTest.php b/src/platform/tests/Bridge/Gemini/Embeddings/ModelClientTest.php index 05220f04f..bc61f2f6a 100644 --- a/src/platform/tests/Bridge/Gemini/Embeddings/ModelClientTest.php +++ b/src/platform/tests/Bridge/Gemini/Embeddings/ModelClientTest.php @@ -54,7 +54,7 @@ public function testItMakesARequestWithCorrectPayload() ) ->willReturn($result); - $model = new Embeddings(Embeddings::GEMINI_EMBEDDING_EXP_03_07, ['dimensions' => 1536, 'task_type' => 'CLASSIFICATION']); + $model = new Embeddings('gemini-embedding-exp-03-07', options: ['dimensions' => 1536, 'task_type' => 'CLASSIFICATION']); $result = (new ModelClient($httpClient, 'test'))->request($model, ['payload1', 'payload2']); $this->assertSame(json_decode($this->getEmbeddingStub(), true), $result->getData()); diff --git a/src/platform/tests/Bridge/Gemini/ModelCatalogTest.php b/src/platform/tests/Bridge/Gemini/ModelCatalogTest.php new file mode 100644 index 000000000..430df07bb --- /dev/null +++ b/src/platform/tests/Bridge/Gemini/ModelCatalogTest.php @@ -0,0 +1,45 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Tests\Bridge\Gemini; + +use Symfony\AI\Platform\Bridge\Gemini\Embeddings; +use Symfony\AI\Platform\Bridge\Gemini\Gemini; +use Symfony\AI\Platform\Bridge\Gemini\ModelCatalog; +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; +use Symfony\AI\Platform\Tests\ModelCatalogTestCase; + +/** + * @author Oskar Stark + */ +final class ModelCatalogTest extends ModelCatalogTestCase +{ + public static function modelsProvider(): iterable + { + yield 'gemini-2.5-flash' => ['gemini-2.5-flash', Gemini::class, [Capability::INPUT_MESSAGES, Capability::INPUT_IMAGE, Capability::INPUT_AUDIO, Capability::INPUT_PDF, Capability::OUTPUT_STREAMING, Capability::OUTPUT_STRUCTURED, Capability::TOOL_CALLING]]; + yield 'gemini-2.5-pro' => ['gemini-2.5-pro', Gemini::class, [Capability::INPUT_MESSAGES, Capability::INPUT_IMAGE, Capability::INPUT_AUDIO, Capability::INPUT_PDF, Capability::OUTPUT_STREAMING, Capability::OUTPUT_STRUCTURED, Capability::TOOL_CALLING]]; + yield 'gemini-2.5-flash-lite' => ['gemini-2.5-flash-lite', Gemini::class, [Capability::INPUT_MESSAGES, Capability::INPUT_IMAGE, Capability::INPUT_AUDIO, Capability::INPUT_PDF, Capability::OUTPUT_STREAMING, Capability::OUTPUT_STRUCTURED, Capability::TOOL_CALLING]]; + yield 'gemini-2.0-flash' => ['gemini-2.0-flash', Gemini::class, [Capability::INPUT_MESSAGES, Capability::INPUT_IMAGE, Capability::INPUT_AUDIO, Capability::INPUT_PDF, Capability::OUTPUT_STREAMING, Capability::OUTPUT_STRUCTURED, Capability::TOOL_CALLING]]; + yield 'gemini-2.0-pro-exp-02-05' => ['gemini-2.0-pro-exp-02-05', Gemini::class, [Capability::INPUT_MESSAGES, Capability::INPUT_IMAGE, Capability::INPUT_AUDIO, Capability::INPUT_PDF, Capability::OUTPUT_STREAMING, Capability::OUTPUT_STRUCTURED, Capability::TOOL_CALLING]]; + yield 'gemini-2.0-flash-lite-preview-02-05' => ['gemini-2.0-flash-lite-preview-02-05', Gemini::class, [Capability::INPUT_MESSAGES, Capability::INPUT_IMAGE, Capability::INPUT_AUDIO, Capability::INPUT_PDF, Capability::OUTPUT_STREAMING, Capability::OUTPUT_STRUCTURED, Capability::TOOL_CALLING]]; + yield 'gemini-2.0-flash-thinking-exp-01-21' => ['gemini-2.0-flash-thinking-exp-01-21', Gemini::class, [Capability::INPUT_MESSAGES, Capability::INPUT_IMAGE, Capability::INPUT_AUDIO, Capability::INPUT_PDF, Capability::OUTPUT_STREAMING, Capability::OUTPUT_STRUCTURED, Capability::TOOL_CALLING]]; + yield 'gemini-1.5-flash' => ['gemini-1.5-flash', Gemini::class, [Capability::INPUT_MESSAGES, Capability::INPUT_IMAGE, Capability::INPUT_AUDIO, Capability::INPUT_PDF, Capability::OUTPUT_STREAMING, Capability::OUTPUT_STRUCTURED, Capability::TOOL_CALLING]]; + yield 'gemini-embedding-exp-03-07' => ['gemini-embedding-exp-03-07', Embeddings::class, [Capability::INPUT_MULTIPLE]]; + yield 'text-embedding-004' => ['text-embedding-004', Embeddings::class, [Capability::INPUT_MULTIPLE]]; + yield 'embedding-001' => ['embedding-001', Embeddings::class, [Capability::INPUT_MULTIPLE]]; + } + + protected function createModelCatalog(): ModelCatalogInterface + { + return new ModelCatalog(); + } +} diff --git a/src/platform/tests/Bridge/Mistral/Contract/DocumentNormalizerTest.php b/src/platform/tests/Bridge/Mistral/Contract/DocumentNormalizerTest.php index a8393803f..a5cfc9dc7 100644 --- a/src/platform/tests/Bridge/Mistral/Contract/DocumentNormalizerTest.php +++ b/src/platform/tests/Bridge/Mistral/Contract/DocumentNormalizerTest.php @@ -25,7 +25,7 @@ public function testSupportsNormalization() $normalizer = new DocumentNormalizer(); $this->assertTrue($normalizer->supportsNormalization(new Document('some content', 'application/pdf'), context: [ - Contract::CONTEXT_MODEL => new Mistral(Mistral::MISTRAL_LARGE), + Contract::CONTEXT_MODEL => new Mistral('mistral-large-latest'), ])); $this->assertFalse($normalizer->supportsNormalization('not a document')); } diff --git a/src/platform/tests/Bridge/Mistral/Contract/DocumentUrlNormalizerTest.php b/src/platform/tests/Bridge/Mistral/Contract/DocumentUrlNormalizerTest.php index 72254bd04..573e24091 100644 --- a/src/platform/tests/Bridge/Mistral/Contract/DocumentUrlNormalizerTest.php +++ b/src/platform/tests/Bridge/Mistral/Contract/DocumentUrlNormalizerTest.php @@ -25,7 +25,7 @@ public function testSupportsNormalization() $normalizer = new DocumentUrlNormalizer(); $this->assertTrue($normalizer->supportsNormalization(new DocumentUrl('https://example.com/document.pdf'), context: [ - Contract::CONTEXT_MODEL => new Mistral(Mistral::MISTRAL_LARGE), + Contract::CONTEXT_MODEL => new Mistral('mistral-large-latest'), ])); $this->assertFalse($normalizer->supportsNormalization('not a document url')); } diff --git a/src/platform/tests/Bridge/Mistral/ModelCatalogTest.php b/src/platform/tests/Bridge/Mistral/ModelCatalogTest.php new file mode 100644 index 000000000..cfdec2d0f --- /dev/null +++ b/src/platform/tests/Bridge/Mistral/ModelCatalogTest.php @@ -0,0 +1,47 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Tests\Bridge\Mistral; + +use Symfony\AI\Platform\Bridge\Mistral\Embeddings; +use Symfony\AI\Platform\Bridge\Mistral\Mistral; +use Symfony\AI\Platform\Bridge\Mistral\ModelCatalog; +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; +use Symfony\AI\Platform\Tests\ModelCatalogTestCase; + +/** + * @author Oskar Stark + */ +final class ModelCatalogTest extends ModelCatalogTestCase +{ + public static function modelsProvider(): iterable + { + yield 'codestral-latest' => ['codestral-latest', Mistral::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::OUTPUT_STRUCTURED, Capability::TOOL_CALLING]]; + yield 'mistral-large-latest' => ['mistral-large-latest', Mistral::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::OUTPUT_STRUCTURED, Capability::TOOL_CALLING]]; + yield 'mistral-medium-latest' => ['mistral-medium-latest', Mistral::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::OUTPUT_STRUCTURED, Capability::INPUT_IMAGE, Capability::TOOL_CALLING]]; + yield 'mistral-small-latest' => ['mistral-small-latest', Mistral::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::OUTPUT_STRUCTURED, Capability::INPUT_IMAGE, Capability::TOOL_CALLING]]; + yield 'open-mistral-nemo' => ['open-mistral-nemo', Mistral::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::OUTPUT_STRUCTURED, Capability::TOOL_CALLING]]; + yield 'mistral-saba-latest' => ['mistral-saba-latest', Mistral::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::OUTPUT_STRUCTURED]]; + yield 'ministral-3b-latest' => ['ministral-3b-latest', Mistral::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::OUTPUT_STRUCTURED, Capability::TOOL_CALLING]]; + yield 'ministral-8b-latest' => ['ministral-8b-latest', Mistral::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::OUTPUT_STRUCTURED, Capability::TOOL_CALLING]]; + yield 'pixstral-large-latest' => ['pixstral-large-latest', Mistral::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::OUTPUT_STRUCTURED, Capability::INPUT_IMAGE, Capability::TOOL_CALLING]]; + yield 'pixstral-12b-latest' => ['pixstral-12b-latest', Mistral::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::OUTPUT_STRUCTURED, Capability::INPUT_IMAGE, Capability::TOOL_CALLING]]; + yield 'voxtral-small-latest' => ['voxtral-small-latest', Mistral::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::OUTPUT_STRUCTURED, Capability::INPUT_AUDIO, Capability::TOOL_CALLING]]; + yield 'voxtral-mini-latest' => ['voxtral-mini-latest', Mistral::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::OUTPUT_STRUCTURED, Capability::INPUT_AUDIO, Capability::TOOL_CALLING]]; + yield 'mistral-embed' => ['mistral-embed', Embeddings::class, [Capability::INPUT_MULTIPLE]]; + } + + protected function createModelCatalog(): ModelCatalogInterface + { + return new ModelCatalog(); + } +} diff --git a/src/platform/tests/Bridge/Ollama/Contract/AssistantMessageNormalizerTest.php b/src/platform/tests/Bridge/Ollama/Contract/AssistantMessageNormalizerTest.php index db26c0b64..6f9ee9eb3 100644 --- a/src/platform/tests/Bridge/Ollama/Contract/AssistantMessageNormalizerTest.php +++ b/src/platform/tests/Bridge/Ollama/Contract/AssistantMessageNormalizerTest.php @@ -33,7 +33,7 @@ protected function setUp(): void public function testSupportsNormalization() { $this->assertTrue($this->normalizer->supportsNormalization(new AssistantMessage('Hello'), context: [ - Contract::CONTEXT_MODEL => new Ollama(Ollama::LLAMA_3_2), + Contract::CONTEXT_MODEL => new Ollama('llama3.2'), ])); $this->assertFalse($this->normalizer->supportsNormalization(new AssistantMessage('Hello'), context: [ Contract::CONTEXT_MODEL => new Model('any-model'), diff --git a/src/platform/tests/Bridge/Ollama/ModelCatalogTest.php b/src/platform/tests/Bridge/Ollama/ModelCatalogTest.php new file mode 100644 index 000000000..9e1c3fad5 --- /dev/null +++ b/src/platform/tests/Bridge/Ollama/ModelCatalogTest.php @@ -0,0 +1,54 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Tests\Bridge\Ollama; + +use Symfony\AI\Platform\Bridge\Ollama\ModelCatalog; +use Symfony\AI\Platform\Bridge\Ollama\Ollama; +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; +use Symfony\AI\Platform\Tests\ModelCatalogTestCase; + +/** + * @author Oskar Stark + */ +final class ModelCatalogTest extends ModelCatalogTestCase +{ + public static function modelsProvider(): iterable + { + yield 'deepseek-r1' => ['deepseek-r1', Ollama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STRUCTURED, Capability::TOOL_CALLING]]; + yield 'llama3.1' => ['llama3.1', Ollama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STRUCTURED, Capability::TOOL_CALLING]]; + yield 'llama3.2' => ['llama3.2', Ollama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STRUCTURED, Capability::TOOL_CALLING]]; + yield 'llama3' => ['llama3', Ollama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STRUCTURED, Capability::TOOL_CALLING]]; + yield 'mistral' => ['mistral', Ollama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STRUCTURED, Capability::TOOL_CALLING]]; + yield 'qwen3' => ['qwen3', Ollama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STRUCTURED, Capability::TOOL_CALLING]]; + yield 'qwen' => ['qwen', Ollama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STRUCTURED, Capability::TOOL_CALLING]]; + yield 'qwen2' => ['qwen2', Ollama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STRUCTURED, Capability::TOOL_CALLING]]; + yield 'qwen2.5' => ['qwen2.5', Ollama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STRUCTURED, Capability::TOOL_CALLING]]; + yield 'qwen2.5-coder' => ['qwen2.5-coder', Ollama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STRUCTURED, Capability::TOOL_CALLING]]; + yield 'gemma3n' => ['gemma3n', Ollama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STRUCTURED]]; + yield 'gemma3' => ['gemma3', Ollama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STRUCTURED]]; + yield 'qwen2.5vl' => ['qwen2.5vl', Ollama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STRUCTURED]]; + yield 'llava' => ['llava', Ollama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STRUCTURED]]; + yield 'phi3' => ['phi3', Ollama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STRUCTURED]]; + yield 'gemma2' => ['gemma2', Ollama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STRUCTURED]]; + yield 'gemma' => ['gemma', Ollama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STRUCTURED]]; + yield 'llama2' => ['llama2', Ollama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STRUCTURED]]; + yield 'nomic-embed-text' => ['nomic-embed-text', Ollama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STRUCTURED, Capability::INPUT_MULTIPLE]]; + yield 'bge-m3' => ['bge-m3', Ollama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STRUCTURED, Capability::INPUT_MULTIPLE]]; + yield 'all-minilm' => ['all-minilm', Ollama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STRUCTURED, Capability::INPUT_MULTIPLE]]; + } + + protected function createModelCatalog(): ModelCatalogInterface + { + return new ModelCatalog(); + } +} diff --git a/src/platform/tests/Bridge/Ollama/OllamaClientTest.php b/src/platform/tests/Bridge/Ollama/OllamaClientTest.php index 8454201d8..83e252259 100644 --- a/src/platform/tests/Bridge/Ollama/OllamaClientTest.php +++ b/src/platform/tests/Bridge/Ollama/OllamaClientTest.php @@ -29,7 +29,7 @@ public function testSupportsModel() { $client = new OllamaClient(new MockHttpClient(), 'http://localhost:1234'); - $this->assertTrue($client->supports(new Ollama(Ollama::LLAMA_3_2))); + $this->assertTrue($client->supports(new Ollama('llama3.2'))); $this->assertFalse($client->supports(new Model('any-model'))); } @@ -50,7 +50,7 @@ public function testOutputStructureIsSupported() ], 'http://127.0.0.1:1234'); $client = new OllamaClient($httpClient, 'http://127.0.0.1:1234'); - $response = $client->request(new Ollama(Ollama::LLAMA_3_2), [ + $response = $client->request(new Ollama('llama3.2'), [ 'messages' => [ [ 'role' => 'user', @@ -107,7 +107,7 @@ public function testStreamingIsSupported() ], 'http://127.0.0.1:1234'); $platform = PlatformFactory::create('http://127.0.0.1:1234', $httpClient); - $response = $platform->invoke(new Ollama(Ollama::LLAMA_3_2), [ + $response = $platform->invoke('llama3.2', [ 'messages' => [ [ 'role' => 'user', diff --git a/src/platform/tests/Bridge/Ollama/OllamaResultConverterTest.php b/src/platform/tests/Bridge/Ollama/OllamaResultConverterTest.php index 8d7e0f653..4ebe63fb3 100644 --- a/src/platform/tests/Bridge/Ollama/OllamaResultConverterTest.php +++ b/src/platform/tests/Bridge/Ollama/OllamaResultConverterTest.php @@ -28,7 +28,7 @@ public function testSupportsLlamaModel() { $converter = new OllamaResultConverter(); - $this->assertTrue($converter->supports(new Ollama(Ollama::LLAMA_3_2))); + $this->assertTrue($converter->supports(new Ollama('llama3.2'))); $this->assertFalse($converter->supports(new Model('any-model'))); } diff --git a/src/platform/tests/Bridge/Ollama/OllamaTest.php b/src/platform/tests/Bridge/Ollama/OllamaTest.php deleted file mode 100644 index ffd61e357..000000000 --- a/src/platform/tests/Bridge/Ollama/OllamaTest.php +++ /dev/null @@ -1,104 +0,0 @@ - - * - * For the full copyright and license information, please view the LICENSE - * file that was distributed with this source code. - */ - -namespace Symfony\AI\Platform\Tests\Bridge\Ollama; - -use PHPUnit\Framework\Attributes\DataProvider; -use PHPUnit\Framework\TestCase; -use Symfony\AI\Platform\Bridge\Ollama\Ollama; -use Symfony\AI\Platform\Capability; - -final class OllamaTest extends TestCase -{ - #[DataProvider('provideModelsWithToolCallingCapability')] - public function testModelsWithToolCallingCapability(string $modelName) - { - $model = new Ollama($modelName); - - $this->assertTrue( - $model->supports(Capability::TOOL_CALLING), - \sprintf('Model "%s" should support tool calling capability', $modelName) - ); - } - - #[DataProvider('provideModelsWithoutToolCallingCapability')] - public function testModelsWithoutToolCallingCapability(string $modelName) - { - $model = new Ollama($modelName); - - $this->assertFalse( - $model->supports(Capability::TOOL_CALLING), - \sprintf('Model "%s" should not support tool calling capability', $modelName) - ); - } - - #[DataProvider('provideModelsWithMultipleInputCapabilities')] - public function testModelsWithMultipleInputCapabilities(string $modelName) - { - $model = new Ollama($modelName); - - $this->assertTrue( - $model->supports(Capability::INPUT_MULTIPLE), - \sprintf('Model "%s" should not support multiple input capabilities', $modelName) - ); - } - - /** - * @return iterable - */ - public static function provideModelsWithToolCallingCapability(): iterable - { - // Models that match the llama3.x pattern - yield 'llama3.1' => [Ollama::LLAMA_3_1]; - yield 'llama3.2' => [Ollama::LLAMA_3_2]; - - // Models that match the qwen pattern - yield 'qwen2' => [Ollama::QWEN_2]; - yield 'qwen2.5' => [Ollama::QWEN_2_5]; - yield 'qwen2.5-coder' => [Ollama::QWEN_2_5_CODER]; - yield 'qwen2.5-coder:32b' => [Ollama::QWEN_2_5_CODER_32B]; - yield 'qwen3' => [Ollama::QWEN_3]; - yield 'qwen3:32b' => [Ollama::QWEN_3_32B]; - - // Models that match the deepseek pattern - yield 'deepseek-r1' => [Ollama::DEEPSEEK_R_1]; - - // Models that match the mistral pattern - yield 'mistral' => [Ollama::MISTRAL]; - } - - /** - * @return iterable - */ - public static function provideModelsWithoutToolCallingCapability(): iterable - { - // Models that don't match any of the tool calling patterns - yield 'llama3' => [Ollama::LLAMA_3]; // No version number - yield 'llama2' => [Ollama::LLAMA_2]; - yield 'gemma' => [Ollama::GEMMA]; - yield 'gemma2' => [Ollama::GEMMA_2]; - yield 'gemma3' => [Ollama::GEMMA_3]; - yield 'gemma3n' => [Ollama::GEMMA_3_N]; - yield 'phi3' => [Ollama::PHI_3]; - yield 'llava' => [Ollama::LLAVA]; - yield 'qwen2.5vl' => [Ollama::QWEN_2_5_VL]; // This has 'vl' suffix which doesn't match the pattern - } - - /** - * @return iterable - */ - public static function provideModelsWithMultipleInputCapabilities(): iterable - { - yield 'nomic-embed-text' => [Ollama::NOMIC_EMBED_TEXT]; - yield 'bge-m3' => [Ollama::BGE_M3]; - yield 'all-minilm' => [Ollama::ALL_MINILM]; - } -} diff --git a/src/platform/tests/Bridge/OpenAi/Contract/DocumentNormalizerTest.php b/src/platform/tests/Bridge/OpenAi/Contract/DocumentNormalizerTest.php index 563643b72..afc5cc4f7 100644 --- a/src/platform/tests/Bridge/OpenAi/Contract/DocumentNormalizerTest.php +++ b/src/platform/tests/Bridge/OpenAi/Contract/DocumentNormalizerTest.php @@ -25,7 +25,7 @@ public function testSupportsNormalization() $normalizer = new DocumentNormalizer(); $this->assertTrue($normalizer->supportsNormalization(new Document('some content', 'application/pdf'), context: [ - Contract::CONTEXT_MODEL => new Gpt(Gpt::GPT_4O), + Contract::CONTEXT_MODEL => new Gpt('gpt-4o'), ])); $this->assertFalse($normalizer->supportsNormalization('not a document')); } diff --git a/src/platform/tests/Bridge/OpenAi/DallE/ModelClientTest.php b/src/platform/tests/Bridge/OpenAi/DallE/ModelClientTest.php index 920855c81..cd365d9e1 100644 --- a/src/platform/tests/Bridge/OpenAi/DallE/ModelClientTest.php +++ b/src/platform/tests/Bridge/OpenAi/DallE/ModelClientTest.php @@ -55,7 +55,7 @@ public function testItIsSupportingTheCorrectModel() { $modelClient = new ModelClient(new MockHttpClient(), 'sk-api-key'); - $this->assertTrue($modelClient->supports(new DallE(DallE::DALL_E_2))); + $this->assertTrue($modelClient->supports(new DallE('dall-e-2'))); } public function testItIsExecutingTheCorrectRequest() @@ -70,7 +70,7 @@ public function testItIsExecutingTheCorrectRequest() }; $httpClient = new MockHttpClient([$resultCallback]); $modelClient = new ModelClient($httpClient, 'sk-api-key'); - $modelClient->request(new DallE(DallE::DALL_E_2), 'foo', ['n' => 1, 'response_format' => 'url']); + $modelClient->request(new DallE('dall-e-2'), 'foo', ['n' => 1, 'response_format' => 'url']); } #[TestWith(['EU', 'https://eu.api.openai.com/v1/images/generations'])] @@ -87,6 +87,6 @@ public function testItUsesCorrectBaseUrl(?string $region, string $expectedUrl) }; $httpClient = new MockHttpClient([$resultCallback]); $modelClient = new ModelClient($httpClient, 'sk-api-key', $region); - $modelClient->request(new DallE(DallE::DALL_E_2), 'foo', ['n' => 1, 'response_format' => 'url']); + $modelClient->request(new DallE('dall-e-2'), 'foo', ['n' => 1, 'response_format' => 'url']); } } diff --git a/src/platform/tests/Bridge/OpenAi/DallETest.php b/src/platform/tests/Bridge/OpenAi/DallETest.php index 54283ea17..01d9738c1 100644 --- a/src/platform/tests/Bridge/OpenAi/DallETest.php +++ b/src/platform/tests/Bridge/OpenAi/DallETest.php @@ -18,17 +18,17 @@ final class DallETest extends TestCase { public function testItCreatesDallEWithDefaultSettings() { - $dallE = new DallE(DallE::DALL_E_2); + $dallE = new DallE('dall-e-2'); - $this->assertSame(DallE::DALL_E_2, $dallE->getName()); + $this->assertSame('dall-e-2', $dallE->getName()); $this->assertSame([], $dallE->getOptions()); } public function testItCreatesDallEWithCustomSettings() { - $dallE = new DallE(DallE::DALL_E_3, ['response_format' => 'base64', 'n' => 2]); + $dallE = new DallE('dall-e-3', options: ['response_format' => 'base64', 'n' => 2]); - $this->assertSame(DallE::DALL_E_3, $dallE->getName()); + $this->assertSame('dall-e-3', $dallE->getName()); $this->assertSame(['response_format' => 'base64', 'n' => 2], $dallE->getOptions()); } } diff --git a/src/platform/tests/Bridge/OpenAi/Embeddings/ModelClientTest.php b/src/platform/tests/Bridge/OpenAi/Embeddings/ModelClientTest.php index 1e05353a6..1f37e77a3 100644 --- a/src/platform/tests/Bridge/OpenAi/Embeddings/ModelClientTest.php +++ b/src/platform/tests/Bridge/OpenAi/Embeddings/ModelClientTest.php @@ -58,7 +58,7 @@ public function testItIsSupportingTheCorrectModel() { $modelClient = new ModelClient(new MockHttpClient(), 'sk-api-key'); - $this->assertTrue($modelClient->supports(new Embeddings(Embeddings::TEXT_3_SMALL))); + $this->assertTrue($modelClient->supports(new Embeddings('text-embedding-3-small'))); } public function testItIsExecutingTheCorrectRequest() @@ -73,7 +73,7 @@ public function testItIsExecutingTheCorrectRequest() }; $httpClient = new MockHttpClient([$resultCallback]); $modelClient = new ModelClient($httpClient, 'sk-api-key'); - $modelClient->request(new Embeddings(Embeddings::TEXT_3_SMALL), 'test text', []); + $modelClient->request(new Embeddings('text-embedding-3-small'), 'test text', []); } public function testItIsExecutingTheCorrectRequestWithCustomOptions() @@ -88,7 +88,7 @@ public function testItIsExecutingTheCorrectRequestWithCustomOptions() }; $httpClient = new MockHttpClient([$resultCallback]); $modelClient = new ModelClient($httpClient, 'sk-api-key'); - $modelClient->request(new Embeddings(Embeddings::TEXT_3_LARGE), 'test text', ['dimensions' => 256]); + $modelClient->request(new Embeddings('text-embedding-3-large'), 'test text', ['dimensions' => 256]); } public function testItIsExecutingTheCorrectRequestWithArrayInput() @@ -103,7 +103,7 @@ public function testItIsExecutingTheCorrectRequestWithArrayInput() }; $httpClient = new MockHttpClient([$resultCallback]); $modelClient = new ModelClient($httpClient, 'sk-api-key'); - $modelClient->request(new Embeddings(Embeddings::TEXT_3_SMALL), ['text1', 'text2', 'text3'], []); + $modelClient->request(new Embeddings('text-embedding-3-small'), ['text1', 'text2', 'text3'], []); } #[TestWith(['EU', 'https://eu.api.openai.com/v1/embeddings'])] @@ -120,6 +120,6 @@ public function testItUsesCorrectBaseUrl(?string $region, string $expectedUrl) }; $httpClient = new MockHttpClient([$resultCallback]); $modelClient = new ModelClient($httpClient, 'sk-api-key', $region); - $modelClient->request(new Embeddings(Embeddings::TEXT_3_SMALL), 'test input', []); + $modelClient->request(new Embeddings('text-embedding-3-small'), 'test input', []); } } diff --git a/src/platform/tests/Bridge/OpenAi/EmbeddingsTest.php b/src/platform/tests/Bridge/OpenAi/EmbeddingsTest.php index 11c67f661..3b72d0c86 100644 --- a/src/platform/tests/Bridge/OpenAi/EmbeddingsTest.php +++ b/src/platform/tests/Bridge/OpenAi/EmbeddingsTest.php @@ -21,24 +21,24 @@ final class EmbeddingsTest extends TestCase { public function testItCreatesEmbeddingsWithDefaultSettings() { - $embeddings = new Embeddings(Embeddings::TEXT_3_SMALL); + $embeddings = new Embeddings('text-embedding-3-small'); - $this->assertSame(Embeddings::TEXT_3_SMALL, $embeddings->getName()); + $this->assertSame('text-embedding-3-small', $embeddings->getName()); $this->assertSame([], $embeddings->getOptions()); } public function testItCreatesEmbeddingsWithCustomSettings() { - $embeddings = new Embeddings(Embeddings::TEXT_3_LARGE, ['dimensions' => 256]); + $embeddings = new Embeddings('text-embedding-3-large', options: ['dimensions' => 256]); - $this->assertSame(Embeddings::TEXT_3_LARGE, $embeddings->getName()); + $this->assertSame('text-embedding-3-large', $embeddings->getName()); $this->assertSame(['dimensions' => 256], $embeddings->getOptions()); } public function testItCreatesEmbeddingsWithAdaModel() { - $embeddings = new Embeddings(Embeddings::TEXT_ADA_002); + $embeddings = new Embeddings('text-embedding-ada-002'); - $this->assertSame(Embeddings::TEXT_ADA_002, $embeddings->getName()); + $this->assertSame('text-embedding-ada-002', $embeddings->getName()); } } diff --git a/src/platform/tests/Bridge/OpenAi/Gpt/ModelClientTest.php b/src/platform/tests/Bridge/OpenAi/Gpt/ModelClientTest.php index 4775f4714..47cf7bfb1 100644 --- a/src/platform/tests/Bridge/OpenAi/Gpt/ModelClientTest.php +++ b/src/platform/tests/Bridge/OpenAi/Gpt/ModelClientTest.php @@ -75,7 +75,7 @@ public function testItIsSupportingTheCorrectModel() { $modelClient = new ModelClient(new MockHttpClient(), 'sk-api-key'); - $this->assertTrue($modelClient->supports(new Gpt(Gpt::GPT_4O))); + $this->assertTrue($modelClient->supports(new Gpt('gpt-4o'))); } public function testItIsExecutingTheCorrectRequest() @@ -90,7 +90,7 @@ public function testItIsExecutingTheCorrectRequest() }; $httpClient = new MockHttpClient([$resultCallback]); $modelClient = new ModelClient($httpClient, 'sk-api-key'); - $modelClient->request(new Gpt(Gpt::GPT_4O), ['model' => 'gpt-4o', 'messages' => [['role' => 'user', 'content' => 'test message']]], ['temperature' => 1]); + $modelClient->request(new Gpt('gpt-4o'), ['model' => 'gpt-4o', 'messages' => [['role' => 'user', 'content' => 'test message']]], ['temperature' => 1]); } public function testItIsExecutingTheCorrectRequestWithArrayPayload() @@ -105,7 +105,7 @@ public function testItIsExecutingTheCorrectRequestWithArrayPayload() }; $httpClient = new MockHttpClient([$resultCallback]); $modelClient = new ModelClient($httpClient, 'sk-api-key'); - $modelClient->request(new Gpt(Gpt::GPT_4O), ['model' => 'gpt-4o', 'messages' => [['role' => 'user', 'content' => 'Hello']]], ['temperature' => 0.7]); + $modelClient->request(new Gpt('gpt-4o'), ['model' => 'gpt-4o', 'messages' => [['role' => 'user', 'content' => 'Hello']]], ['temperature' => 0.7]); } #[TestWith(['EU', 'https://eu.api.openai.com/v1/chat/completions'])] @@ -122,6 +122,6 @@ public function testItUsesCorrectBaseUrl(?string $region, string $expectedUrl) }; $httpClient = new MockHttpClient([$resultCallback]); $modelClient = new ModelClient($httpClient, 'sk-api-key', $region); - $modelClient->request(new Gpt(Gpt::GPT_4O), ['messages' => []], []); + $modelClient->request(new Gpt('gpt-4o'), ['messages' => []], []); } } diff --git a/src/platform/tests/Bridge/OpenAi/GptTest.php b/src/platform/tests/Bridge/OpenAi/GptTest.php index e172a5181..2073cd93d 100644 --- a/src/platform/tests/Bridge/OpenAi/GptTest.php +++ b/src/platform/tests/Bridge/OpenAi/GptTest.php @@ -21,17 +21,17 @@ final class GptTest extends TestCase { public function testItCreatesGptWithDefaultSettings() { - $gpt = new Gpt(Gpt::GPT_4O); + $gpt = new Gpt('gpt-4o'); - $this->assertSame(Gpt::GPT_4O, $gpt->getName()); + $this->assertSame('gpt-4o', $gpt->getName()); $this->assertSame([], $gpt->getOptions()); } public function testItCreatesGptWithCustomSettings() { - $gpt = new Gpt(Gpt::GPT_4_TURBO, ['temperature' => 0.5, 'max_tokens' => 1000]); + $gpt = new Gpt('gpt-4-turbo', [], ['temperature' => 0.5, 'max_tokens' => 1000]); - $this->assertSame(Gpt::GPT_4_TURBO, $gpt->getName()); + $this->assertSame('gpt-4-turbo', $gpt->getName()); $this->assertSame(['temperature' => 0.5, 'max_tokens' => 1000], $gpt->getOptions()); } } diff --git a/src/platform/tests/Bridge/OpenAi/ModelCatalogTest.php b/src/platform/tests/Bridge/OpenAi/ModelCatalogTest.php new file mode 100644 index 000000000..0cd8a73bb --- /dev/null +++ b/src/platform/tests/Bridge/OpenAi/ModelCatalogTest.php @@ -0,0 +1,68 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Tests\Bridge\OpenAi; + +use Symfony\AI\Platform\Bridge\OpenAi\DallE; +use Symfony\AI\Platform\Bridge\OpenAi\Embeddings; +use Symfony\AI\Platform\Bridge\OpenAi\Gpt; +use Symfony\AI\Platform\Bridge\OpenAi\ModelCatalog; +use Symfony\AI\Platform\Bridge\OpenAi\Whisper; +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; +use Symfony\AI\Platform\Tests\ModelCatalogTestCase; + +/** + * @author Oskar Stark + */ +final class ModelCatalogTest extends ModelCatalogTestCase +{ + public static function modelsProvider(): iterable + { + // GPT models + yield 'gpt-3.5-turbo' => ['gpt-3.5-turbo', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'gpt-3.5-turbo-instruct' => ['gpt-3.5-turbo-instruct', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'gpt-4' => ['gpt-4', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'gpt-4-turbo' => ['gpt-4-turbo', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE]]; + yield 'gpt-4o' => ['gpt-4o', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; + yield 'gpt-4o-mini' => ['gpt-4o-mini', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; + yield 'gpt-4o-audio-preview' => ['gpt-4o-audio-preview', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_AUDIO, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; + yield 'o1-mini' => ['o1-mini', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE]]; + yield 'o1-preview' => ['o1-preview', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE]]; + yield 'o3-mini' => ['o3-mini', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; + yield 'o3-mini-high' => ['o3-mini-high', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; + yield 'gpt-4.5-preview' => ['gpt-4.5-preview', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; + yield 'gpt-4.1' => ['gpt-4.1', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; + yield 'gpt-4.1-mini' => ['gpt-4.1-mini', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; + yield 'gpt-4.1-nano' => ['gpt-4.1-nano', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; + yield 'gpt-5' => ['gpt-5', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; + yield 'gpt-5-chat-latest' => ['gpt-5-chat-latest', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::INPUT_IMAGE]]; + yield 'gpt-5-mini' => ['gpt-5-mini', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; + yield 'gpt-5-nano' => ['gpt-5-nano', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; + + // Embedding models + yield 'text-embedding-ada-002' => ['text-embedding-ada-002', Embeddings::class, [Capability::INPUT_TEXT]]; + yield 'text-embedding-3-large' => ['text-embedding-3-large', Embeddings::class, [Capability::INPUT_TEXT]]; + yield 'text-embedding-3-small' => ['text-embedding-3-small', Embeddings::class, [Capability::INPUT_TEXT]]; + + // Whisper models + yield 'whisper-1' => ['whisper-1', Whisper::class, [Capability::INPUT_AUDIO, Capability::OUTPUT_TEXT]]; + + // DALL-E models + yield 'dall-e-2' => ['dall-e-2', DallE::class, [Capability::INPUT_TEXT, Capability::OUTPUT_IMAGE]]; + yield 'dall-e-3' => ['dall-e-3', DallE::class, [Capability::INPUT_TEXT, Capability::OUTPUT_IMAGE]]; + } + + protected function createModelCatalog(): ModelCatalogInterface + { + return new ModelCatalog(); + } +} diff --git a/src/platform/tests/Bridge/OpenAi/Whisper/ModelClientTest.php b/src/platform/tests/Bridge/OpenAi/Whisper/ModelClientTest.php index ef14126a6..70065a7e1 100644 --- a/src/platform/tests/Bridge/OpenAi/Whisper/ModelClientTest.php +++ b/src/platform/tests/Bridge/OpenAi/Whisper/ModelClientTest.php @@ -54,7 +54,7 @@ public function testItAcceptsValidApiKey() public function testItSupportsWhisperModel() { $client = new ModelClient(new MockHttpClient(), 'sk-test-key'); - $this->assertTrue($client->supports(new Whisper(Whisper::WHISPER_1))); + $this->assertTrue($client->supports(new Whisper('whisper-1'))); } public function testItUsesTranscriptionEndpointByDefault() @@ -69,7 +69,7 @@ function ($method, $url): MockResponse { ]); $client = new ModelClient($httpClient, 'sk-test-key'); - $client->request(new Whisper(Whisper::WHISPER_1), ['file' => 'audio-data']); + $client->request(new Whisper('whisper-1'), ['file' => 'audio-data']); $this->assertSame(1, $httpClient->getRequestsCount()); } @@ -86,7 +86,7 @@ function ($method, $url): MockResponse { ]); $client = new ModelClient($httpClient, 'sk-test-key'); - $client->request(new Whisper(Whisper::WHISPER_1), ['file' => 'audio-data'], ['task' => Task::TRANSCRIPTION]); + $client->request(new Whisper('whisper-1'), ['file' => 'audio-data'], ['task' => Task::TRANSCRIPTION]); $this->assertSame(1, $httpClient->getRequestsCount()); } @@ -103,7 +103,7 @@ function ($method, $url): MockResponse { ]); $client = new ModelClient($httpClient, 'sk-test-key'); - $client->request(new Whisper(Whisper::WHISPER_1), ['file' => 'audio-data'], ['task' => Task::TRANSLATION]); + $client->request(new Whisper('whisper-1'), ['file' => 'audio-data'], ['task' => Task::TRANSLATION]); $this->assertSame(1, $httpClient->getRequestsCount()); } @@ -123,7 +123,7 @@ function ($method, $url) use ($expectedUrl): MockResponse { ]); $client = new ModelClient($httpClient, 'sk-test-key', $region); - $client->request(new Whisper(Whisper::WHISPER_1), ['file' => 'audio-data']); + $client->request(new Whisper('whisper-1'), ['file' => 'audio-data']); $this->assertSame(1, $httpClient->getRequestsCount()); } @@ -143,7 +143,7 @@ function ($method, $url) use ($expectedUrl): MockResponse { ]); $client = new ModelClient($httpClient, 'sk-test-key', $region); - $client->request(new Whisper(Whisper::WHISPER_1), ['file' => 'audio-data'], ['task' => Task::TRANSLATION]); + $client->request(new Whisper('whisper-1'), ['file' => 'audio-data'], ['task' => Task::TRANSLATION]); $this->assertSame(1, $httpClient->getRequestsCount()); } diff --git a/src/platform/tests/Bridge/OpenAi/WhisperTest.php b/src/platform/tests/Bridge/OpenAi/WhisperTest.php index 2a96ff4f4..0a8b79f85 100644 --- a/src/platform/tests/Bridge/OpenAi/WhisperTest.php +++ b/src/platform/tests/Bridge/OpenAi/WhisperTest.php @@ -21,17 +21,17 @@ final class WhisperTest extends TestCase { public function testItCreatesWhisperWithDefaultSettings() { - $whisper = new Whisper(Whisper::WHISPER_1); + $whisper = new Whisper('whisper-1'); - $this->assertSame(Whisper::WHISPER_1, $whisper->getName()); + $this->assertSame('whisper-1', $whisper->getName()); $this->assertSame([], $whisper->getOptions()); } public function testItCreatesWhisperWithCustomSettings() { - $whisper = new Whisper(Whisper::WHISPER_1, ['language' => 'en', 'response_format' => 'json']); + $whisper = new Whisper('whisper-1', options: ['language' => 'en', 'response_format' => 'json']); - $this->assertSame(Whisper::WHISPER_1, $whisper->getName()); + $this->assertSame('whisper-1', $whisper->getName()); $this->assertSame(['language' => 'en', 'response_format' => 'json'], $whisper->getOptions()); } } diff --git a/src/platform/tests/Bridge/Perplexity/Contract/FileUrlNormalizerTest.php b/src/platform/tests/Bridge/Perplexity/Contract/FileUrlNormalizerTest.php index 38f5ac9a0..de8228354 100644 --- a/src/platform/tests/Bridge/Perplexity/Contract/FileUrlNormalizerTest.php +++ b/src/platform/tests/Bridge/Perplexity/Contract/FileUrlNormalizerTest.php @@ -25,7 +25,7 @@ public function testSupportsNormalization() $normalizer = new FileUrlNormalizer(); $this->assertTrue($normalizer->supportsNormalization(new DocumentUrl(\dirname(__DIR__, 6).'/fixtures/not-a-document.pdf'), context: [ - Contract::CONTEXT_MODEL => new Perplexity(Perplexity::SONAR), + Contract::CONTEXT_MODEL => new Perplexity('sonar'), ])); $this->assertFalse($normalizer->supportsNormalization('not a document')); } diff --git a/src/platform/tests/Bridge/Perplexity/ModelCatalogTest.php b/src/platform/tests/Bridge/Perplexity/ModelCatalogTest.php new file mode 100644 index 000000000..a1b479d96 --- /dev/null +++ b/src/platform/tests/Bridge/Perplexity/ModelCatalogTest.php @@ -0,0 +1,38 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Tests\Bridge\Perplexity; + +use Symfony\AI\Platform\Bridge\Perplexity\ModelCatalog; +use Symfony\AI\Platform\Bridge\Perplexity\Perplexity; +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; +use Symfony\AI\Platform\Tests\ModelCatalogTestCase; + +/** + * @author Oskar Stark + */ +final class ModelCatalogTest extends ModelCatalogTestCase +{ + public static function modelsProvider(): iterable + { + yield 'sonar' => ['sonar', Perplexity::class, [Capability::INPUT_MESSAGES, Capability::INPUT_PDF, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::OUTPUT_STRUCTURED, Capability::INPUT_IMAGE]]; + yield 'sonar-pro' => ['sonar-pro', Perplexity::class, [Capability::INPUT_MESSAGES, Capability::INPUT_PDF, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::OUTPUT_STRUCTURED, Capability::INPUT_IMAGE]]; + yield 'sonar-reasoning' => ['sonar-reasoning', Perplexity::class, [Capability::INPUT_MESSAGES, Capability::INPUT_PDF, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::OUTPUT_STRUCTURED, Capability::INPUT_IMAGE]]; + yield 'sonar-reasoning-pro' => ['sonar-reasoning-pro', Perplexity::class, [Capability::INPUT_MESSAGES, Capability::INPUT_PDF, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::OUTPUT_STRUCTURED, Capability::INPUT_IMAGE]]; + yield 'sonar-deep-research' => ['sonar-deep-research', Perplexity::class, [Capability::INPUT_MESSAGES, Capability::INPUT_PDF, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::OUTPUT_STRUCTURED]]; + } + + protected function createModelCatalog(): ModelCatalogInterface + { + return new ModelCatalog(); + } +} diff --git a/src/platform/tests/Bridge/Perplexity/ModelClientTest.php b/src/platform/tests/Bridge/Perplexity/ModelClientTest.php index d1ed9b2fe..34e286650 100644 --- a/src/platform/tests/Bridge/Perplexity/ModelClientTest.php +++ b/src/platform/tests/Bridge/Perplexity/ModelClientTest.php @@ -75,7 +75,7 @@ public function testItIsSupportingTheCorrectModel() { $modelClient = new ModelClient(new MockHttpClient(), 'pplx-api-key'); - $this->assertTrue($modelClient->supports(new Perplexity(Perplexity::SONAR))); + $this->assertTrue($modelClient->supports(new Perplexity('sonar'))); } public function testItIsExecutingTheCorrectRequest() @@ -90,7 +90,7 @@ public function testItIsExecutingTheCorrectRequest() }; $httpClient = new MockHttpClient([$resultCallback]); $modelClient = new ModelClient($httpClient, 'pplx-api-key'); - $modelClient->request(new Perplexity(Perplexity::SONAR), ['model' => 'sonar', 'messages' => [['role' => 'user', 'content' => 'test message']]]); + $modelClient->request(new Perplexity('sonar'), ['model' => 'sonar', 'messages' => [['role' => 'user', 'content' => 'test message']]]); } public function testItIsExecutingTheCorrectRequestWithArrayPayload() @@ -105,6 +105,6 @@ public function testItIsExecutingTheCorrectRequestWithArrayPayload() }; $httpClient = new MockHttpClient([$resultCallback]); $modelClient = new ModelClient($httpClient, 'pplx-api-key'); - $modelClient->request(new Perplexity(Perplexity::SONAR), ['model' => 'sonar', 'messages' => [['role' => 'user', 'content' => 'Hello']]]); + $modelClient->request(new Perplexity('sonar'), ['model' => 'sonar', 'messages' => [['role' => 'user', 'content' => 'Hello']]]); } } diff --git a/src/platform/tests/Bridge/Perplexity/PerplexityTest.php b/src/platform/tests/Bridge/Perplexity/PerplexityTest.php index d177b47c6..5c9300861 100644 --- a/src/platform/tests/Bridge/Perplexity/PerplexityTest.php +++ b/src/platform/tests/Bridge/Perplexity/PerplexityTest.php @@ -21,17 +21,17 @@ final class PerplexityTest extends TestCase { public function testItCreatesPerplexityWithDefaultSettings() { - $perplexity = new Perplexity(Perplexity::SONAR); + $perplexity = new Perplexity('sonar'); - $this->assertSame(Perplexity::SONAR, $perplexity->getName()); + $this->assertSame('sonar', $perplexity->getName()); $this->assertSame([], $perplexity->getOptions()); } public function testItCreatesPerplexityWithCustomSettings() { - $perplexity = new Perplexity(Perplexity::SONAR_PRO, ['temperature' => 0.5, 'max_tokens' => 1000]); + $perplexity = new Perplexity('sonar-pro', options: ['temperature' => 0.5, 'max_tokens' => 1000]); - $this->assertSame(Perplexity::SONAR_PRO, $perplexity->getName()); + $this->assertSame('sonar-pro', $perplexity->getName()); $this->assertSame(['temperature' => 0.5, 'max_tokens' => 1000], $perplexity->getOptions()); } } diff --git a/src/platform/tests/Bridge/Replicate/ModelCatalogTest.php b/src/platform/tests/Bridge/Replicate/ModelCatalogTest.php new file mode 100644 index 000000000..619434be1 --- /dev/null +++ b/src/platform/tests/Bridge/Replicate/ModelCatalogTest.php @@ -0,0 +1,48 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Tests\Bridge\Replicate; + +use Symfony\AI\Platform\Bridge\Meta\Llama; +use Symfony\AI\Platform\Bridge\Replicate\ModelCatalog; +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; +use Symfony\AI\Platform\Tests\ModelCatalogTestCase; + +/** + * @author Oskar Stark + */ +final class ModelCatalogTest extends ModelCatalogTestCase +{ + public static function modelsProvider(): iterable + { + yield 'llama-3.3-70B-Instruct' => ['llama-3.3-70B-Instruct', Llama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'llama-3.2-90b-vision-instruct' => ['llama-3.2-90b-vision-instruct', Llama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'llama-3.2-11b-vision-instruct' => ['llama-3.2-11b-vision-instruct', Llama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'llama-3.2-3b' => ['llama-3.2-3b', Llama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'llama-3.2-3b-instruct' => ['llama-3.2-3b-instruct', Llama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'llama-3.2-1b' => ['llama-3.2-1b', Llama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'llama-3.2-1b-instruct' => ['llama-3.2-1b-instruct', Llama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'llama-3.1-405b-instruct' => ['llama-3.1-405b-instruct', Llama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'llama-3.1-70b' => ['llama-3.1-70b', Llama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'llama-3-70b-instruct' => ['llama-3-70b-instruct', Llama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'llama-3.1-8b' => ['llama-3.1-8b', Llama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'llama-3.1-8b-instruct' => ['llama-3.1-8b-instruct', Llama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'llama-3-70b' => ['llama-3-70b', Llama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'llama-3-8b-instruct' => ['llama-3-8b-instruct', Llama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + yield 'llama-3-8b' => ['llama-3-8b', Llama::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT]]; + } + + protected function createModelCatalog(): ModelCatalogInterface + { + return new ModelCatalog(); + } +} diff --git a/src/platform/tests/Bridge/Scaleway/Embeddings/ModelClientTest.php b/src/platform/tests/Bridge/Scaleway/Embeddings/ModelClientTest.php index 0d093c206..6085ab62b 100644 --- a/src/platform/tests/Bridge/Scaleway/Embeddings/ModelClientTest.php +++ b/src/platform/tests/Bridge/Scaleway/Embeddings/ModelClientTest.php @@ -43,7 +43,7 @@ public function testItIsSupportingTheCorrectModel() { $modelClient = new ModelClient(new MockHttpClient(), 'scaleway-api-key'); - $this->assertTrue($modelClient->supports(new Embeddings(Embeddings::BAAI_BGE))); + $this->assertTrue($modelClient->supports(new Embeddings('bge-multilingual-gemma2'))); } public function testItIsExecutingTheCorrectRequest() @@ -58,7 +58,7 @@ public function testItIsExecutingTheCorrectRequest() }; $httpClient = new MockHttpClient([$resultCallback]); $modelClient = new ModelClient($httpClient, 'scaleway-api-key'); - $modelClient->request(new Embeddings(Embeddings::BAAI_BGE), 'test text', []); + $modelClient->request(new Embeddings('bge-multilingual-gemma2'), 'test text', []); } public function testItIsExecutingTheCorrectRequestWithCustomOptions() @@ -73,7 +73,7 @@ public function testItIsExecutingTheCorrectRequestWithCustomOptions() }; $httpClient = new MockHttpClient([$resultCallback]); $modelClient = new ModelClient($httpClient, 'scaleway-api-key'); - $modelClient->request(new Embeddings(Embeddings::BAAI_BGE), 'test text', ['dimensions' => 256]); + $modelClient->request(new Embeddings('bge-multilingual-gemma2'), 'test text', ['dimensions' => 256]); } public function testItIsExecutingTheCorrectRequestWithArrayInput() @@ -88,6 +88,6 @@ public function testItIsExecutingTheCorrectRequestWithArrayInput() }; $httpClient = new MockHttpClient([$resultCallback]); $modelClient = new ModelClient($httpClient, 'scaleway-api-key'); - $modelClient->request(new Embeddings(Embeddings::BAAI_BGE), ['text1', 'text2', 'text3'], []); + $modelClient->request(new Embeddings('bge-multilingual-gemma2'), ['text1', 'text2', 'text3'], []); } } diff --git a/src/platform/tests/Bridge/Scaleway/EmbeddingsTest.php b/src/platform/tests/Bridge/Scaleway/EmbeddingsTest.php index a5cf3bfb1..0f3dd10bc 100644 --- a/src/platform/tests/Bridge/Scaleway/EmbeddingsTest.php +++ b/src/platform/tests/Bridge/Scaleway/EmbeddingsTest.php @@ -21,9 +21,9 @@ final class EmbeddingsTest extends TestCase { public function testItCreatesEmbeddingsWithDefaultSettings() { - $embeddings = new Embeddings(Embeddings::BAAI_BGE); + $embeddings = new Embeddings('bge-multilingual-gemma2'); - $this->assertSame(Embeddings::BAAI_BGE, $embeddings->getName()); + $this->assertSame('bge-multilingual-gemma2', $embeddings->getName()); $this->assertSame([], $embeddings->getOptions()); } } diff --git a/src/platform/tests/Bridge/Scaleway/Llm/ModelClientTest.php b/src/platform/tests/Bridge/Scaleway/Llm/ModelClientTest.php index 94ae24321..fe5b0d8fa 100644 --- a/src/platform/tests/Bridge/Scaleway/Llm/ModelClientTest.php +++ b/src/platform/tests/Bridge/Scaleway/Llm/ModelClientTest.php @@ -51,7 +51,7 @@ public function testItIsSupportingTheCorrectModel() { $modelClient = new ModelClient(new MockHttpClient(), 'sk-api-key'); - $this->assertTrue($modelClient->supports(new Scaleway(Scaleway::DEEPSEEK))); + $this->assertTrue($modelClient->supports(new Scaleway('deepseek-r1-distill-llama-70b'))); } public function testItIsExecutingTheCorrectRequest() @@ -66,7 +66,7 @@ public function testItIsExecutingTheCorrectRequest() }; $httpClient = new MockHttpClient([$resultCallback]); $modelClient = new ModelClient($httpClient, 'scaleway-api-key'); - $modelClient->request(new Scaleway(Scaleway::DEEPSEEK), ['model' => 'deepseek-r1-distill-llama-70b', 'messages' => [['role' => 'user', 'content' => 'test message']]], ['temperature' => 1]); + $modelClient->request(new Scaleway('deepseek-r1-distill-llama-70b'), ['model' => 'deepseek-r1-distill-llama-70b', 'messages' => [['role' => 'user', 'content' => 'test message']]], ['temperature' => 1]); } public function testItIsExecutingTheCorrectRequestWithArrayPayload() @@ -81,7 +81,7 @@ public function testItIsExecutingTheCorrectRequestWithArrayPayload() }; $httpClient = new MockHttpClient([$resultCallback]); $modelClient = new ModelClient($httpClient, 'scaleway-api-key'); - $modelClient->request(new Scaleway(Scaleway::DEEPSEEK), ['model' => 'deepseek-r1-distill-llama-70b', 'messages' => [['role' => 'user', 'content' => 'Hello']]], ['temperature' => 0.7]); + $modelClient->request(new Scaleway('deepseek-r1-distill-llama-70b'), ['model' => 'deepseek-r1-distill-llama-70b', 'messages' => [['role' => 'user', 'content' => 'Hello']]], ['temperature' => 0.7]); } public function testItUsesCorrectBaseUrl() @@ -95,6 +95,6 @@ public function testItUsesCorrectBaseUrl() }; $httpClient = new MockHttpClient([$resultCallback]); $modelClient = new ModelClient($httpClient, 'scaleway-api-key'); - $modelClient->request(new Scaleway(Scaleway::DEEPSEEK), ['messages' => []], []); + $modelClient->request(new Scaleway('deepseek-r1-distill-llama-70b'), ['messages' => []], []); } } diff --git a/src/platform/tests/Bridge/Scaleway/ModelCatalogTest.php b/src/platform/tests/Bridge/Scaleway/ModelCatalogTest.php new file mode 100644 index 000000000..9a23094c4 --- /dev/null +++ b/src/platform/tests/Bridge/Scaleway/ModelCatalogTest.php @@ -0,0 +1,49 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Tests\Bridge\Scaleway; + +use Symfony\AI\Platform\Bridge\Scaleway\Embeddings; +use Symfony\AI\Platform\Bridge\Scaleway\ModelCatalog; +use Symfony\AI\Platform\Bridge\Scaleway\Scaleway; +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; +use Symfony\AI\Platform\Tests\ModelCatalogTestCase; + +/** + * @author Oskar Stark + */ +final class ModelCatalogTest extends ModelCatalogTestCase +{ + public static function modelsProvider(): iterable + { + // Scaleway models + yield 'deepseek-r1-distill-llama-70b' => ['deepseek-r1-distill-llama-70b', Scaleway::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::OUTPUT_STRUCTURED]]; + yield 'gemma-3-27b-it' => ['gemma-3-27b-it', Scaleway::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::OUTPUT_STRUCTURED]]; + yield 'llama-3.1-8b-instruct' => ['llama-3.1-8b-instruct', Scaleway::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::OUTPUT_STRUCTURED]]; + yield 'llama-3.3-70b-instruct' => ['llama-3.3-70b-instruct', Scaleway::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::OUTPUT_STRUCTURED]]; + yield 'devstral-small-2505' => ['devstral-small-2505', Scaleway::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::OUTPUT_STRUCTURED]]; + yield 'mistral-nemo-instruct-2407' => ['mistral-nemo-instruct-2407', Scaleway::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::OUTPUT_STRUCTURED]]; + yield 'pixtral-12b-2409' => ['pixtral-12b-2409', Scaleway::class, [Capability::INPUT_MESSAGES, Capability::INPUT_IMAGE, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::OUTPUT_STRUCTURED]]; + yield 'mistral-small-3.2-24b-instruct-2506' => ['mistral-small-3.2-24b-instruct-2506', Scaleway::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::OUTPUT_STRUCTURED]]; + yield 'gpt-oss-120b' => ['gpt-oss-120b', Scaleway::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::OUTPUT_STRUCTURED]]; + yield 'qwen3-coder-30b-a3b-instruct' => ['qwen3-coder-30b-a3b-instruct', Scaleway::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::OUTPUT_STRUCTURED]]; + yield 'qwen3-235b-a22b-instruct-2507' => ['qwen3-235b-a22b-instruct-2507', Scaleway::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::OUTPUT_STRUCTURED]]; + + // Embedding models + yield 'bge-multilingual-gemma2' => ['bge-multilingual-gemma2', Embeddings::class, [Capability::INPUT_TEXT]]; + } + + protected function createModelCatalog(): ModelCatalogInterface + { + return new ModelCatalog(); + } +} diff --git a/src/platform/tests/Bridge/TransformersPhp/ModelCatalogTest.php b/src/platform/tests/Bridge/TransformersPhp/ModelCatalogTest.php new file mode 100644 index 000000000..9a2ba0f52 --- /dev/null +++ b/src/platform/tests/Bridge/TransformersPhp/ModelCatalogTest.php @@ -0,0 +1,38 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Tests\Bridge\TransformersPhp; + +use Symfony\AI\Platform\Bridge\TransformersPhp\ModelCatalog; +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\Model; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; +use Symfony\AI\Platform\Tests\ModelCatalogTestCase; + +/** + * @author Oskar Stark + */ +final class ModelCatalogTest extends ModelCatalogTestCase +{ + public static function modelsProvider(): iterable + { + // TransformersPhp can use various models from HuggingFace, so we test with example model names + // Since it extends DynamicModelCatalog, all capabilities are provided + yield 'microsoft/DialoGPT-medium' => ['microsoft/DialoGPT-medium', Model::class, Capability::cases()]; + yield 'sentence-transformers/all-MiniLM-L6-v2' => ['sentence-transformers/all-MiniLM-L6-v2', Model::class, Capability::cases()]; + yield 'xenova/text-generation-webui' => ['xenova/text-generation-webui', Model::class, Capability::cases()]; + } + + protected function createModelCatalog(): ModelCatalogInterface + { + return new ModelCatalog(); + } +} diff --git a/src/platform/tests/Bridge/VertexAi/Contract/AssistantMessageNormalizerTest.php b/src/platform/tests/Bridge/VertexAi/Contract/AssistantMessageNormalizerTest.php index 35c50b90f..0e356fb2a 100644 --- a/src/platform/tests/Bridge/VertexAi/Contract/AssistantMessageNormalizerTest.php +++ b/src/platform/tests/Bridge/VertexAi/Contract/AssistantMessageNormalizerTest.php @@ -26,7 +26,7 @@ public function testSupportsNormalization() $normalizer = new AssistantMessageNormalizer(); $this->assertTrue($normalizer->supportsNormalization(new AssistantMessage('Hello'), context: [ - Contract::CONTEXT_MODEL => new Model(Model::GEMINI_2_5_PRO), + Contract::CONTEXT_MODEL => new Model('gemini-2.5-pro'), ])); $this->assertFalse($normalizer->supportsNormalization('not an assistant message')); } diff --git a/src/platform/tests/Bridge/VertexAi/Contract/MessageBagNormalizerTest.php b/src/platform/tests/Bridge/VertexAi/Contract/MessageBagNormalizerTest.php index a185ebb2a..6292a99b9 100644 --- a/src/platform/tests/Bridge/VertexAi/Contract/MessageBagNormalizerTest.php +++ b/src/platform/tests/Bridge/VertexAi/Contract/MessageBagNormalizerTest.php @@ -32,7 +32,7 @@ public function testSupportsNormalization() $normalizer = new MessageBagNormalizer(); $this->assertTrue($normalizer->supportsNormalization(new MessageBag(), context: [ - Contract::CONTEXT_MODEL => new Model(Model::GEMINI_2_5_PRO), + Contract::CONTEXT_MODEL => new Model('gemini-2.5-pro'), ])); $this->assertFalse($normalizer->supportsNormalization('not a message bag')); } diff --git a/src/platform/tests/Bridge/VertexAi/Contract/ToolCallMessageNormalizerTest.php b/src/platform/tests/Bridge/VertexAi/Contract/ToolCallMessageNormalizerTest.php index d59edb8c1..20ee5ba77 100644 --- a/src/platform/tests/Bridge/VertexAi/Contract/ToolCallMessageNormalizerTest.php +++ b/src/platform/tests/Bridge/VertexAi/Contract/ToolCallMessageNormalizerTest.php @@ -26,7 +26,7 @@ public function testSupportsNormalization() $normalizer = new ToolCallMessageNormalizer(); $this->assertTrue($normalizer->supportsNormalization(new ToolCallMessage(new ToolCall('', '', []), ''), context: [ - Contract::CONTEXT_MODEL => new Model(Model::GEMINI_2_5_PRO), + Contract::CONTEXT_MODEL => new Model('gemini-2.5-pro'), ])); $this->assertFalse($normalizer->supportsNormalization('not a tool call')); } diff --git a/src/platform/tests/Bridge/VertexAi/Contract/ToolNormalizerTest.php b/src/platform/tests/Bridge/VertexAi/Contract/ToolNormalizerTest.php index 096990278..53b819a7f 100644 --- a/src/platform/tests/Bridge/VertexAi/Contract/ToolNormalizerTest.php +++ b/src/platform/tests/Bridge/VertexAi/Contract/ToolNormalizerTest.php @@ -28,7 +28,7 @@ public function testSupportsNormalization() $normalizer = new ToolNormalizer(); $this->assertTrue($normalizer->supportsNormalization(new Tool(new ExecutionReference(ToolNoParams::class), 'test', 'test'), context: [ - Contract::CONTEXT_MODEL => new Model(Model::GEMINI_2_5_PRO), + Contract::CONTEXT_MODEL => new Model('gemini-2.5-pro'), ])); $this->assertFalse($normalizer->supportsNormalization('not a tool')); } diff --git a/src/platform/tests/Bridge/VertexAi/Contract/UserMessageNormalizerTest.php b/src/platform/tests/Bridge/VertexAi/Contract/UserMessageNormalizerTest.php index 3ef6616a7..4c46f6858 100644 --- a/src/platform/tests/Bridge/VertexAi/Contract/UserMessageNormalizerTest.php +++ b/src/platform/tests/Bridge/VertexAi/Contract/UserMessageNormalizerTest.php @@ -30,7 +30,7 @@ public function testSupportsNormalization() $normalizer = new UserMessageNormalizer(); $this->assertTrue($normalizer->supportsNormalization(new UserMessage(new Text('Hello')), context: [ - Contract::CONTEXT_MODEL => new Model(Model::GEMINI_2_5_PRO), + Contract::CONTEXT_MODEL => new Model('gemini-2.5-pro'), ])); $this->assertFalse($normalizer->supportsNormalization('not a user message')); } diff --git a/src/platform/tests/Bridge/VertexAi/Embeddings/ModelClientTest.php b/src/platform/tests/Bridge/VertexAi/Embeddings/ModelClientTest.php index 36ea4da48..a78e0c67b 100644 --- a/src/platform/tests/Bridge/VertexAi/Embeddings/ModelClientTest.php +++ b/src/platform/tests/Bridge/VertexAi/Embeddings/ModelClientTest.php @@ -31,7 +31,7 @@ public function testItGeneratesTheEmbeddingSuccessfully() $client = new ModelClient($httpClient, 'global', 'test'); - $model = new Model(Model::GEMINI_EMBEDDING_001, ['outputDimensionality' => 1536, 'task_type' => TaskType::CLASSIFICATION]); + $model = new Model('gemini-embedding-001', options: ['outputDimensionality' => 1536, 'task_type' => TaskType::CLASSIFICATION]); $result = $client->request($model, 'test payload'); diff --git a/src/platform/tests/Bridge/VertexAi/Gemini/ModelClientTest.php b/src/platform/tests/Bridge/VertexAi/Gemini/ModelClientTest.php index bb97bd230..8af8f4b54 100644 --- a/src/platform/tests/Bridge/VertexAi/Gemini/ModelClientTest.php +++ b/src/platform/tests/Bridge/VertexAi/Gemini/ModelClientTest.php @@ -35,7 +35,7 @@ public function testItInvokesTheTextModelsSuccessfully() $client = new ModelClient($httpClient, 'global', 'test'); - $result = $client->request(new Model(Model::GEMINI_2_0_FLASH), $payload); + $result = $client->request(new Model('gemini-2.0-flash'), $payload); $data = $result->getData(); $info = $result->getObject()->getInfo(); @@ -77,6 +77,6 @@ function ($method, $url, $options) { ); $client = new ModelClient($httpClient, 'global', 'test'); - $client->request(new Model(Model::GEMINI_2_0_FLASH), $payload, ['server_tools' => ['google_search' => true]]); + $client->request(new Model('gemini-2.0-flash'), $payload, ['server_tools' => ['google_search' => true]]); } } diff --git a/src/platform/tests/Bridge/VertexAi/ModelCatalogTest.php b/src/platform/tests/Bridge/VertexAi/ModelCatalogTest.php new file mode 100644 index 000000000..2f0b9eff4 --- /dev/null +++ b/src/platform/tests/Bridge/VertexAi/ModelCatalogTest.php @@ -0,0 +1,45 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Tests\Bridge\VertexAi; + +use Symfony\AI\Platform\Bridge\VertexAi\Embeddings\Model as EmbeddingsModel; +use Symfony\AI\Platform\Bridge\VertexAi\Gemini\Model as GeminiModel; +use Symfony\AI\Platform\Bridge\VertexAi\ModelCatalog; +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; +use Symfony\AI\Platform\Tests\ModelCatalogTestCase; + +/** + * @author Oskar Stark + */ +final class ModelCatalogTest extends ModelCatalogTestCase +{ + public static function modelsProvider(): iterable + { + // Gemini models + yield 'gemini-2.5-pro' => ['gemini-2.5-pro', GeminiModel::class, [Capability::INPUT_MESSAGES, Capability::INPUT_IMAGE, Capability::INPUT_AUDIO, Capability::INPUT_PDF, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::OUTPUT_STRUCTURED, Capability::TOOL_CALLING]]; + yield 'gemini-2.5-flash' => ['gemini-2.5-flash', GeminiModel::class, [Capability::INPUT_MESSAGES, Capability::INPUT_IMAGE, Capability::INPUT_AUDIO, Capability::INPUT_PDF, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::OUTPUT_STRUCTURED, Capability::TOOL_CALLING]]; + yield 'gemini-2.0-flash' => ['gemini-2.0-flash', GeminiModel::class, [Capability::INPUT_MESSAGES, Capability::INPUT_IMAGE, Capability::INPUT_AUDIO, Capability::INPUT_PDF, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::OUTPUT_STRUCTURED, Capability::TOOL_CALLING]]; + yield 'gemini-2.5-flash-lite' => ['gemini-2.5-flash-lite', GeminiModel::class, [Capability::INPUT_MESSAGES, Capability::INPUT_IMAGE, Capability::INPUT_AUDIO, Capability::INPUT_PDF, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::OUTPUT_STRUCTURED, Capability::TOOL_CALLING]]; + yield 'gemini-2.0-flash-lite' => ['gemini-2.0-flash-lite', GeminiModel::class, [Capability::INPUT_MESSAGES, Capability::INPUT_IMAGE, Capability::INPUT_AUDIO, Capability::INPUT_PDF, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::OUTPUT_STRUCTURED, Capability::TOOL_CALLING]]; + + // Embeddings models + yield 'gemini-embedding-001' => ['gemini-embedding-001', EmbeddingsModel::class, [Capability::INPUT_TEXT, Capability::INPUT_MULTIPLE]]; + yield 'text-embedding-005' => ['text-embedding-005', EmbeddingsModel::class, [Capability::INPUT_TEXT, Capability::INPUT_MULTIPLE]]; + yield 'text-multilingual-embedding-002' => ['text-multilingual-embedding-002', EmbeddingsModel::class, [Capability::INPUT_TEXT, Capability::INPUT_MULTIPLE]]; + } + + protected function createModelCatalog(): ModelCatalogInterface + { + return new ModelCatalog(); + } +} diff --git a/src/platform/tests/Bridge/Voyage/ModelCatalogTest.php b/src/platform/tests/Bridge/Voyage/ModelCatalogTest.php new file mode 100644 index 000000000..4f81782d0 --- /dev/null +++ b/src/platform/tests/Bridge/Voyage/ModelCatalogTest.php @@ -0,0 +1,40 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Tests\Bridge\Voyage; + +use Symfony\AI\Platform\Bridge\Voyage\ModelCatalog; +use Symfony\AI\Platform\Bridge\Voyage\Voyage; +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; +use Symfony\AI\Platform\Tests\ModelCatalogTestCase; + +final class ModelCatalogTest extends ModelCatalogTestCase +{ + public static function modelsProvider(): iterable + { + yield 'voyage-3.5' => ['voyage-3.5', Voyage::class, [Capability::INPUT_MULTIPLE]]; + yield 'voyage-3.5-lite' => ['voyage-3.5-lite', Voyage::class, [Capability::INPUT_MULTIPLE]]; + yield 'voyage-3' => ['voyage-3', Voyage::class, [Capability::INPUT_MULTIPLE]]; + yield 'voyage-3-lite' => ['voyage-3-lite', Voyage::class, [Capability::INPUT_MULTIPLE]]; + yield 'voyage-3-large' => ['voyage-3-large', Voyage::class, [Capability::INPUT_MULTIPLE]]; + yield 'voyage-finance-2' => ['voyage-finance-2', Voyage::class, [Capability::INPUT_MULTIPLE]]; + yield 'voyage-multilingual-2' => ['voyage-multilingual-2', Voyage::class, [Capability::INPUT_MULTIPLE]]; + yield 'voyage-law-2' => ['voyage-law-2', Voyage::class, [Capability::INPUT_MULTIPLE]]; + yield 'voyage-code-3' => ['voyage-code-3', Voyage::class, [Capability::INPUT_MULTIPLE]]; + yield 'voyage-code-2' => ['voyage-code-2', Voyage::class, [Capability::INPUT_MULTIPLE]]; + } + + protected function createModelCatalog(): ModelCatalogInterface + { + return new ModelCatalog(); + } +} diff --git a/src/platform/tests/Bridge/Voyage/ResultConverterTest.php b/src/platform/tests/Bridge/Voyage/ResultConverterTest.php index df5107b2a..88f9da9ec 100644 --- a/src/platform/tests/Bridge/Voyage/ResultConverterTest.php +++ b/src/platform/tests/Bridge/Voyage/ResultConverterTest.php @@ -11,7 +11,6 @@ namespace Symfony\AI\Platform\Tests\Bridge\Voyage; -use PHPUnit\Framework\Attributes\DataProvider; use PHPUnit\Framework\TestCase; use Symfony\AI\Platform\Bridge\Voyage\ResultConverter; use Symfony\AI\Platform\Bridge\Voyage\Voyage; @@ -85,25 +84,10 @@ public function testItThrowsExceptionWhenResponseDoesNotContainData() $converter->convert(new RawHttpResult($result)); } - #[DataProvider('voyageModelsProvider')] - public function testItSupportsVoyageModel(string $modelName) + public function testItSupportsVoyageModel() { $converter = new ResultConverter(); - $this->assertTrue($converter->supports(new Voyage($modelName))); - } - - public static function voyageModelsProvider(): iterable - { - yield 'V3_5' => [Voyage::V3_5]; - yield 'V3_5_LITE' => [Voyage::V3_5_LITE]; - yield 'V3' => [Voyage::V3]; - yield 'V3_LITE' => [Voyage::V3_LITE]; - yield 'V3_LARGE' => [Voyage::V3_LARGE]; - yield 'FINANCE_2' => [Voyage::FINANCE_2]; - yield 'MULTILINGUAL_2' => [Voyage::MULTILINGUAL_2]; - yield 'LAW_2' => [Voyage::LAW_2]; - yield 'CODE_3' => [Voyage::CODE_3]; - yield 'CODE_2' => [Voyage::CODE_2]; + $this->assertTrue($converter->supports(new Voyage('voyage-3-5'))); } } diff --git a/src/platform/tests/Contract/Normalizer/Message/MessageBagNormalizerTest.php b/src/platform/tests/Contract/Normalizer/Message/MessageBagNormalizerTest.php index 8007499bf..7587db501 100644 --- a/src/platform/tests/Contract/Normalizer/Message/MessageBagNormalizerTest.php +++ b/src/platform/tests/Contract/Normalizer/Message/MessageBagNormalizerTest.php @@ -85,7 +85,7 @@ public function testNormalizeWithModel() $innerNormalizer = $this->createMock(NormalizerInterface::class); $innerNormalizer->expects($this->once()) ->method('normalize') - ->with($messages, null, [Contract::CONTEXT_MODEL => new Gpt(Gpt::GPT_4O)]) + ->with($messages, null, [Contract::CONTEXT_MODEL => new Gpt('gpt-4o')]) ->willReturn([ ['role' => 'system', 'content' => 'You are a helpful assistant'], ['role' => 'user', 'content' => 'Hello'], @@ -102,7 +102,7 @@ public function testNormalizeWithModel() ]; $this->assertSame($expected, $this->normalizer->normalize($messageBag, context: [ - Contract::CONTEXT_MODEL => new Gpt(Gpt::GPT_4O), + Contract::CONTEXT_MODEL => new Gpt('gpt-4o'), ])); } } diff --git a/src/platform/tests/ContractTest.php b/src/platform/tests/ContractTest.php index f671acd5e..f53d1f083 100644 --- a/src/platform/tests/ContractTest.php +++ b/src/platform/tests/ContractTest.php @@ -52,7 +52,7 @@ public function testCreateRequestPayload(Model $model, array|string|object $inpu public static function providePayloadTestCases(): iterable { yield 'MessageBag with Gpt' => [ - 'model' => new Gpt(Gpt::GPT_4O), + 'model' => new Gpt('gpt-4o'), 'input' => new MessageBag( Message::forSystem('System message'), Message::ofUser('User message'), @@ -70,7 +70,7 @@ public static function providePayloadTestCases(): iterable $audio = Audio::fromFile(\dirname(__DIR__, 3).'/fixtures/audio.mp3'); yield 'Audio within MessageBag with Gpt' => [ - 'model' => new Gpt(Gpt::GPT_4O), + 'model' => new Gpt('gpt-4o'), 'input' => new MessageBag(Message::ofUser('What is this recording about?', $audio)), 'expected' => [ 'messages' => [ @@ -94,7 +94,7 @@ public static function providePayloadTestCases(): iterable $image = Image::fromFile(\dirname(__DIR__, 3).'/fixtures/image.jpg'); yield 'Image within MessageBag with Gpt' => [ - 'model' => new Gpt(Gpt::GPT_4O), + 'model' => new Gpt('gpt-4o'), 'input' => new MessageBag( Message::forSystem('You are an image analyzer bot that helps identify the content of images.'), Message::ofUser('Describe the image as a comedian would do it.', $image), @@ -118,7 +118,7 @@ public static function providePayloadTestCases(): iterable ]; yield 'ImageUrl within MessageBag with Gpt' => [ - 'model' => new Gpt(Gpt::GPT_4O), + 'model' => new Gpt('gpt-4o'), 'input' => new MessageBag( Message::forSystem('You are an image analyzer bot that helps identify the content of images.'), Message::ofUser('Describe the image as a comedian would do it.', new ImageUrl('https://example.com/image.jpg')), @@ -142,13 +142,13 @@ public static function providePayloadTestCases(): iterable ]; yield 'Text Input with Embeddings' => [ - 'model' => new Embeddings(Embeddings::TEXT_3_SMALL), + 'model' => new Embeddings('text-embedding-3-small'), 'input' => 'This is a test input.', 'expected' => 'This is a test input.', ]; yield 'Longer Conversation with Gpt' => [ - 'model' => new Gpt(Gpt::GPT_4O), + 'model' => new Gpt('gpt-4o'), 'input' => new MessageBag( Message::forSystem('My amazing system prompt.'), Message::ofAssistant('It is time to sleep.'), @@ -192,7 +192,7 @@ public function jsonSerialize(): array }; yield 'MessageBag with custom message from Gpt' => [ - 'model' => new Gpt(Gpt::GPT_4O), + 'model' => new Gpt('gpt-4o'), 'input' => new MessageBag($customSerializableMessage), 'expected' => [ 'messages' => [ @@ -209,7 +209,7 @@ public function testExtendedContractHandlesWhisper() $audio = Audio::fromFile(\dirname(__DIR__, 3).'/fixtures/audio.mp3'); - $actual = $contract->createRequestPayload(new Whisper(Whisper::WHISPER_1), $audio); + $actual = $contract->createRequestPayload(new Whisper('whisper-1'), $audio); $this->assertArrayHasKey('model', $actual); $this->assertSame('whisper-1', $actual['model']); diff --git a/src/platform/tests/DynamicModelCatalogTest.php b/src/platform/tests/DynamicModelCatalogTest.php new file mode 100644 index 000000000..91f6064e9 --- /dev/null +++ b/src/platform/tests/DynamicModelCatalogTest.php @@ -0,0 +1,66 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Tests; + +use PHPUnit\Framework\Attributes\TestWith; +use PHPUnit\Framework\TestCase; +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\Model; +use Symfony\AI\Platform\ModelCatalog\DynamicModelCatalog; + +/** + * @author Oskar Stark + */ +final class DynamicModelCatalogTest extends TestCase +{ + public function testGetModelReturnsModelWithAllCapabilities() + { + $catalog = new DynamicModelCatalog(); + $model = $catalog->getModel('test-model'); + + $this->assertInstanceOf(Model::class, $model); + $this->assertSame('test-model', $model->getName()); + + // Check that all capabilities are present + foreach (Capability::cases() as $capability) { + $this->assertTrue($model->supports($capability), \sprintf('Model should have capability %s', $capability->value)); + } + } + + public function testGetModelWithOptions() + { + $catalog = new DynamicModelCatalog(); + $model = $catalog->getModel('test-model?temperature=0.7&max_tokens=1000'); + + $this->assertInstanceOf(Model::class, $model); + $this->assertSame('test-model', $model->getName()); + + $options = $model->getOptions(); + $this->assertSame('0.7', $options['temperature']); + $this->assertSame('1000', $options['max_tokens']); + } + + #[TestWith(['gpt-4'])] + #[TestWith(['claude-3-opus'])] + #[TestWith(['mistral-large'])] + #[TestWith(['some/random/model:v1.0'])] + #[TestWith(['huggingface/model-name'])] + #[TestWith(['custom-local-model'])] + public function testGetModelAcceptsAnyModelName(string $modelName) + { + $catalog = new DynamicModelCatalog(); + $model = $catalog->getModel($modelName); + + $this->assertInstanceOf(Model::class, $model); + $this->assertSame($modelName, $model->getName()); + } +} diff --git a/src/platform/tests/InMemoryPlatformTest.php b/src/platform/tests/InMemoryPlatformTest.php index 0aeae7691..0ddfadc9f 100644 --- a/src/platform/tests/InMemoryPlatformTest.php +++ b/src/platform/tests/InMemoryPlatformTest.php @@ -20,7 +20,7 @@ class InMemoryPlatformTest extends TestCase public function testPlatformInvokeWithFixedResult() { $platform = new InMemoryPlatform('Mocked result'); - $result = $platform->invoke(new Model('test'), 'input'); + $result = $platform->invoke('test', 'input'); $this->assertSame('Mocked result', $result->asText()); $this->assertSame('Mocked result', $result->getResult()->getContent()); @@ -33,7 +33,7 @@ public function testPlatformInvokeWithCallableResult() return strtoupper((string) $input); }); - $result = $platform->invoke(new Model('test'), 'dynamic text'); + $result = $platform->invoke('test', 'dynamic text'); $this->assertSame('DYNAMIC TEXT', $result->asText()); } @@ -44,7 +44,7 @@ public function testPlatformInvokeWithVectorResultResponse() fn () => new VectorResult(new Vector([0.1, 0.1, 0.5])) ); - $result = $platform->invoke(new Model('test'), 'dynamic text'); + $result = $platform->invoke('test', 'dynamic text'); $this->assertEquals([0.1, 0.1, 0.5], $result->asVectors()[0]->getData()); } diff --git a/src/store/src/Document/Vectorizer.php b/src/store/src/Document/Vectorizer.php index c393913c7..8e10ef3e5 100644 --- a/src/store/src/Document/Vectorizer.php +++ b/src/store/src/Document/Vectorizer.php @@ -14,7 +14,6 @@ use Psr\Log\LoggerInterface; use Psr\Log\NullLogger; use Symfony\AI\Platform\Capability; -use Symfony\AI\Platform\Model; use Symfony\AI\Platform\PlatformInterface; use Symfony\AI\Platform\Vector\Vector; use Symfony\AI\Store\Exception\RuntimeException; @@ -23,7 +22,7 @@ { public function __construct( private PlatformInterface $platform, - private Model $model, + private string $model, private LoggerInterface $logger = new NullLogger(), ) { } @@ -33,7 +32,7 @@ public function vectorizeTextDocuments(array $documents, array $options = []): a $documentCount = \count($documents); $this->logger->info('Starting vectorization process', ['document_count' => $documentCount]); - if ($this->model->supports(Capability::INPUT_MULTIPLE)) { + if ($this->platform->getModelCatalog()->getModel($this->model)->supports(Capability::INPUT_MULTIPLE)) { $this->logger->debug('Using batch vectorization with model that supports multiple inputs'); $result = $this->platform->invoke($this->model, array_map(fn (TextDocument $document) => $document->content, $documents), $options); diff --git a/src/store/tests/Document/VectorizerTest.php b/src/store/tests/Document/VectorizerTest.php index 30e1740eb..67596ab22 100644 --- a/src/store/tests/Document/VectorizerTest.php +++ b/src/store/tests/Document/VectorizerTest.php @@ -17,15 +17,12 @@ use Symfony\AI\Platform\Bridge\OpenAi\Embeddings; use Symfony\AI\Platform\Capability; use Symfony\AI\Platform\Model; -use Symfony\AI\Platform\ModelClientInterface; -use Symfony\AI\Platform\Platform; +use Symfony\AI\Platform\ModelCatalog\AbstractModelCatalog; +use Symfony\AI\Platform\ModelCatalog\DynamicModelCatalog; use Symfony\AI\Platform\PlatformInterface; -use Symfony\AI\Platform\Result\RawHttpResult; use Symfony\AI\Platform\Result\RawResultInterface; -use Symfony\AI\Platform\Result\ResultInterface; use Symfony\AI\Platform\Result\ResultPromise; use Symfony\AI\Platform\Result\VectorResult; -use Symfony\AI\Platform\ResultConverterInterface; use Symfony\AI\Platform\Vector\Vector; use Symfony\AI\Store\Document\Metadata; use Symfony\AI\Store\Document\TextDocument; @@ -33,7 +30,6 @@ use Symfony\AI\Store\Document\Vectorizer; use Symfony\AI\Store\Exception\RuntimeException; use Symfony\AI\Store\Tests\Double\PlatformTestHandler; -use Symfony\Component\HttpClient\Response\MockResponse; use Symfony\Component\Uid\Uuid; #[TestDox('Tests for the Vectorizer class')] @@ -53,11 +49,22 @@ public function testVectorizeDocumentsWithBatchSupport() new Vector([0.7, 0.8, 0.9]), ]; - $platform = PlatformTestHandler::createPlatform(new VectorResult(...$vectors)); + // Create a test model catalog WITH INPUT_MULTIPLE capability + $modelCatalog = new class extends AbstractModelCatalog { + protected array $models = [ + 'test-embedding-with-batch' => [ + 'class' => Model::class, + 'capabilities' => [ + Capability::INPUT_TEXT, + Capability::INPUT_MULTIPLE, // Explicitly including batch support + ], + ], + ]; + }; - $model = new Embeddings(Embeddings::TEXT_3_SMALL); + $platform = PlatformTestHandler::createPlatform(new VectorResult(...$vectors), $modelCatalog); - $vectorizer = new Vectorizer($platform, $model); + $vectorizer = new Vectorizer($platform, 'test-embedding-with-batch'); $vectorDocuments = $vectorizer->vectorizeTextDocuments($documents); $this->assertCount(3, $vectorDocuments); @@ -76,9 +83,7 @@ public function testVectorizeDocumentsWithSingleDocument() $vector = new Vector([0.1, 0.2, 0.3]); $platform = PlatformTestHandler::createPlatform(new VectorResult($vector)); - $model = new Embeddings(Embeddings::TEXT_3_SMALL); - - $vectorizer = new Vectorizer($platform, $model); + $vectorizer = new Vectorizer($platform, 'text-embedding-3-small'); $vectorDocuments = $vectorizer->vectorizeTextDocuments([$document]); $this->assertCount(1, $vectorDocuments); @@ -91,9 +96,7 @@ public function testVectorizeDocumentsWithSingleDocument() public function testVectorizeEmptyDocumentsArray() { $platform = PlatformTestHandler::createPlatform(new VectorResult()); - $model = new Embeddings(Embeddings::TEXT_3_SMALL); - - $vectorizer = new Vectorizer($platform, $model); + $vectorizer = new Vectorizer($platform, 'text-embedding-3-small'); $vectorDocuments = $vectorizer->vectorizeTextDocuments([]); $this->assertSame([], $vectorDocuments); @@ -115,9 +118,7 @@ public function testVectorizeDocumentsPreservesMetadata() ]; $platform = PlatformTestHandler::createPlatform(new VectorResult(...$vectors)); - $model = new Embeddings(Embeddings::TEXT_3_SMALL); - - $vectorizer = new Vectorizer($platform, $model); + $vectorizer = new Vectorizer($platform, 'text-embedding-3-small'); $vectorDocuments = $vectorizer->vectorizeTextDocuments($documents); $this->assertCount(2, $vectorDocuments); @@ -146,9 +147,7 @@ public function testVectorizeDocumentsPreservesDocumentIds() ]; $platform = PlatformTestHandler::createPlatform(new VectorResult(...$vectors)); - $model = new Embeddings(Embeddings::TEXT_3_SMALL); - - $vectorizer = new Vectorizer($platform, $model); + $vectorizer = new Vectorizer($platform, 'text-embedding-3-small'); $vectorDocuments = $vectorizer->vectorizeTextDocuments($documents); $this->assertCount(3, $vectorDocuments); @@ -175,9 +174,7 @@ public function testVectorizeVariousDocumentCounts(int $count) $platform = PlatformTestHandler::createPlatform( $count > 0 ? new VectorResult(...$vectors) : new VectorResult() ); - $model = new Embeddings(Embeddings::TEXT_3_SMALL); - - $vectorizer = new Vectorizer($platform, $model); + $vectorizer = new Vectorizer($platform, 'text-embedding-3-small'); $vectorDocuments = $vectorizer->vectorizeTextDocuments($documents); $this->assertCount($count, $vectorDocuments); @@ -214,9 +211,7 @@ public function testVectorizeDocumentsWithLargeVectors() $vector = new Vector($dimensions); $platform = PlatformTestHandler::createPlatform(new VectorResult($vector)); - $model = new Embeddings(Embeddings::TEXT_3_SMALL); - - $vectorizer = new Vectorizer($platform, $model); + $vectorizer = new Vectorizer($platform, 'text-embedding-3-small'); $vectorDocuments = $vectorizer->vectorizeTextDocuments([$document]); $this->assertCount(1, $vectorDocuments); @@ -238,9 +233,7 @@ public function testVectorizeDocumentsWithSpecialCharacters() ]; $platform = PlatformTestHandler::createPlatform(new VectorResult(...$vectors)); - $model = new Embeddings(Embeddings::TEXT_3_SMALL); - - $vectorizer = new Vectorizer($platform, $model); + $vectorizer = new Vectorizer($platform, 'text-embedding-3-small'); $vectorDocuments = $vectorizer->vectorizeTextDocuments($documents); $this->assertCount(3, $vectorDocuments); @@ -253,57 +246,32 @@ public function testVectorizeDocumentsWithSpecialCharacters() public function testVectorizeDocumentsWithoutBatchSupportUsesNonBatchMode() { - // Test with a model that doesn't support batch processing - $model = $this->createMock(Model::class); - $model->expects($this->once()) - ->method('supports') - ->with(Capability::INPUT_MULTIPLE) - ->willReturn(false); - $documents = [ new TextDocument(Uuid::v4(), 'Document 1'), new TextDocument(Uuid::v4(), 'Document 2'), ]; - // When batch is not supported, the platform should be invoked once per document - // We simulate this by providing separate vectors for each invocation $vectors = [ new Vector([0.1, 0.2]), new Vector([0.3, 0.4]), ]; - // Create a custom platform handler for non-batch mode - $handler = new class($vectors) implements ModelClientInterface, ResultConverterInterface { - private int $callIndex = 0; - - /** - * @param Vector[] $vectors - */ - public function __construct( - private readonly array $vectors, - ) { - } - - public function supports(Model $model): bool - { - return true; - } - - public function request(Model $model, array|string|object $payload, array $options = []): RawHttpResult - { - return new RawHttpResult(new MockResponse()); - } - - public function convert(RawResultInterface $result, array $options = []): ResultInterface - { - // Return one vector at a time for non-batch mode - return new VectorResult($this->vectors[$this->callIndex++]); - } + // Create a test model catalog that explicitly does NOT have INPUT_MULTIPLE capability + $modelCatalog = new class extends AbstractModelCatalog { + protected array $models = [ + 'test-embedding-no-batch' => [ + 'class' => Model::class, + 'capabilities' => [ + Capability::INPUT_TEXT, + // Explicitly excluding INPUT_MULTIPLE capability + ], + ], + ]; }; - $platform = new Platform([$handler], [$handler]); + $platform = PlatformTestHandler::createPlatform(new VectorResult(...$vectors), $modelCatalog); - $vectorizer = new Vectorizer($platform, $model); + $vectorizer = new Vectorizer($platform, 'test-embedding-no-batch'); $vectorDocuments = $vectorizer->vectorizeTextDocuments($documents); $this->assertCount(2, $vectorDocuments); @@ -317,9 +285,7 @@ public function testVectorizeString() $vector = new Vector([0.1, 0.2, 0.3]); $platform = PlatformTestHandler::createPlatform(new VectorResult($vector)); - $model = new Embeddings(Embeddings::TEXT_3_SMALL); - - $vectorizer = new Vectorizer($platform, $model); + $vectorizer = new Vectorizer($platform, 'text-embedding-3-small'); $result = $vectorizer->vectorize($text); $this->assertInstanceOf(Vector::class, $result); @@ -332,9 +298,7 @@ public function testVectorizeStringWithSpecialCharacters() $vector = new Vector([0.5, 0.6, 0.7]); $platform = PlatformTestHandler::createPlatform(new VectorResult($vector)); - $model = new Embeddings(Embeddings::TEXT_3_SMALL); - - $vectorizer = new Vectorizer($platform, $model); + $vectorizer = new Vectorizer($platform, 'text-embedding-3-small'); $result = $vectorizer->vectorize($text); $this->assertInstanceOf(Vector::class, $result); @@ -347,9 +311,7 @@ public function testVectorizeEmptyString() $vector = new Vector([0.0, 0.0, 0.0]); $platform = PlatformTestHandler::createPlatform(new VectorResult($vector)); - $model = new Embeddings(Embeddings::TEXT_3_SMALL); - - $vectorizer = new Vectorizer($platform, $model); + $vectorizer = new Vectorizer($platform, 'text-embedding-3-small'); $result = $vectorizer->vectorize($text); $this->assertInstanceOf(Vector::class, $result); @@ -361,9 +323,7 @@ public function testVectorizeStringThrowsExceptionWhenNoVectorReturned() $text = 'Test string'; $platform = PlatformTestHandler::createPlatform(new VectorResult()); - $model = new Embeddings(Embeddings::TEXT_3_SMALL); - - $vectorizer = new Vectorizer($platform, $model); + $vectorizer = new Vectorizer($platform, 'text-embedding-3-small'); $this->expectException(RuntimeException::class); $this->expectExceptionMessage('No vector returned for string vectorization.'); @@ -380,19 +340,10 @@ public function testVectorizeTextDocumentsPassesOptionsToInvoke() $vector = new Vector([0.1, 0.2, 0.3]); $options = ['max_tokens' => 1000, 'temperature' => 0.5]; - $platform = $this->createMock(PlatformInterface::class); - $platform->expects($this->once()) - ->method('invoke') - ->with( - $this->isInstanceOf(Model::class), - $this->equalTo('Test document'), - $this->equalTo($options) - ) - ->willReturn(new ResultPromise(fn () => new VectorResult($vector), $this->createMock(RawResultInterface::class))); - - $model = new Embeddings(Embeddings::TEXT_3_SMALL); - - $vectorizer = new Vectorizer($platform, $model); + // Use DynamicModelCatalog which provides all capabilities including INPUT_MULTIPLE + // This ensures batch mode is used and the test expectation matches the behavior + $platform = PlatformTestHandler::createPlatform(new VectorResult($vector)); + $vectorizer = new Vectorizer($platform, 'test-embedding-with-batch'); $result = $vectorizer->vectorizeTextDocuments($documents, $options); $this->assertCount(1, $result); @@ -407,19 +358,10 @@ public function testVectorizeTextDocumentsWithEmptyOptions() $vector = new Vector([0.1, 0.2, 0.3]); - $platform = $this->createMock(PlatformInterface::class); - $platform->expects($this->once()) - ->method('invoke') - ->with( - $this->isInstanceOf(Model::class), - $this->equalTo('Test document'), - $this->equalTo([]) - ) - ->willReturn(new ResultPromise(fn () => new VectorResult($vector), $this->createMock(RawResultInterface::class))); - - $model = new Embeddings(Embeddings::TEXT_3_SMALL); - - $vectorizer = new Vectorizer($platform, $model); + // Use DynamicModelCatalog which provides all capabilities including INPUT_MULTIPLE + // This ensures batch mode is used and the test expectation matches the behavior + $platform = PlatformTestHandler::createPlatform(new VectorResult($vector)); + $vectorizer = new Vectorizer($platform, 'test-embedding-with-batch'); $result = $vectorizer->vectorizeTextDocuments($documents); $this->assertCount(1, $result); @@ -436,15 +378,13 @@ public function testVectorizeStringPassesOptionsToInvoke() $platform->expects($this->once()) ->method('invoke') ->with( - $this->isInstanceOf(Model::class), + $this->equalTo('text-embedding-3-small'), $this->equalTo($text), $this->equalTo($options) ) ->willReturn(new ResultPromise(fn () => new VectorResult($vector), $this->createMock(RawResultInterface::class))); - $model = new Embeddings(Embeddings::TEXT_3_SMALL); - - $vectorizer = new Vectorizer($platform, $model); + $vectorizer = new Vectorizer($platform, 'text-embedding-3-small'); $result = $vectorizer->vectorize($text, $options); $this->assertEquals($vector, $result); @@ -459,15 +399,13 @@ public function testVectorizeStringWithEmptyOptions() $platform->expects($this->once()) ->method('invoke') ->with( - $this->isInstanceOf(Model::class), + $this->equalTo('text-embedding-3-small'), $this->equalTo($text), $this->equalTo([]) ) ->willReturn(new ResultPromise(fn () => new VectorResult($vector), $this->createMock(RawResultInterface::class))); - $model = new Embeddings(Embeddings::TEXT_3_SMALL); - - $vectorizer = new Vectorizer($platform, $model); + $vectorizer = new Vectorizer($platform, 'text-embedding-3-small'); $result = $vectorizer->vectorize($text); $this->assertEquals($vector, $result); @@ -475,12 +413,6 @@ public function testVectorizeStringWithEmptyOptions() public function testVectorizeTextDocumentsWithoutBatchSupportPassesOptions() { - $model = $this->createMock(Model::class); - $model->expects($this->once()) - ->method('supports') - ->with(Capability::INPUT_MULTIPLE) - ->willReturn(false); - $documents = [ new TextDocument(Uuid::v4(), 'Document 1'), new TextDocument(Uuid::v4(), 'Document 2'), @@ -493,24 +425,22 @@ public function testVectorizeTextDocumentsWithoutBatchSupportPassesOptions() $options = ['max_tokens' => 2000]; - $platform = $this->createMock(PlatformInterface::class); - - $invokeCallCount = 0; - $platform->expects($this->exactly(2)) - ->method('invoke') - ->willReturnCallback(function ($passedModel, $passedContent, $passedOptions) use ($options, $vectors, &$invokeCallCount) { - $this->assertInstanceOf(Model::class, $passedModel); - $this->assertEquals($options, $passedOptions); - - $expectedContent = 0 === $invokeCallCount ? 'Document 1' : 'Document 2'; - $this->assertEquals($expectedContent, $passedContent); - - $vector = $vectors[$invokeCallCount++]; + // Create a test model catalog without INPUT_MULTIPLE capability + $modelCatalog = new class extends AbstractModelCatalog { + protected array $models = [ + 'test-embedding-no-batch-with-options' => [ + 'class' => Model::class, + 'capabilities' => [ + Capability::INPUT_TEXT, + // No INPUT_MULTIPLE capability + ], + ], + ]; + }; - return new ResultPromise(fn () => new VectorResult($vector), $this->createMock(RawResultInterface::class)); - }); + $platform = PlatformTestHandler::createPlatform(new VectorResult(...$vectors), $modelCatalog); - $vectorizer = new Vectorizer($platform, $model); + $vectorizer = new Vectorizer($platform, 'test-embedding-no-batch-with-options'); $result = $vectorizer->vectorizeTextDocuments($documents, $options); $this->assertCount(2, $result); diff --git a/src/store/tests/Double/PlatformTestHandler.php b/src/store/tests/Double/PlatformTestHandler.php index 0a9406f35..349695d51 100644 --- a/src/store/tests/Double/PlatformTestHandler.php +++ b/src/store/tests/Double/PlatformTestHandler.php @@ -12,6 +12,8 @@ namespace Symfony\AI\Store\Tests\Double; use Symfony\AI\Platform\Model; +use Symfony\AI\Platform\ModelCatalog\DynamicModelCatalog; +use Symfony\AI\Platform\ModelCatalog\ModelCatalogInterface; use Symfony\AI\Platform\ModelClientInterface; use Symfony\AI\Platform\Platform; use Symfony\AI\Platform\Result\RawHttpResult; @@ -31,11 +33,11 @@ public function __construct( ) { } - public static function createPlatform(?ResultInterface $create = null): Platform + public static function createPlatform(?ResultInterface $create = null, ModelCatalogInterface $modelCatalog = new DynamicModelCatalog()): Platform { $handler = new self($create); - return new Platform([$handler], [$handler]); + return new Platform([$handler], [$handler], $modelCatalog); } public function supports(Model $model): bool diff --git a/src/store/tests/IndexerTest.php b/src/store/tests/IndexerTest.php index 5a5f19617..0f71668fc 100644 --- a/src/store/tests/IndexerTest.php +++ b/src/store/tests/IndexerTest.php @@ -12,7 +12,6 @@ namespace Symfony\AI\Store\Tests; use PHPUnit\Framework\TestCase; -use Symfony\AI\Platform\Bridge\OpenAi\Embeddings; use Symfony\AI\Platform\Result\VectorResult; use Symfony\AI\Platform\Vector\Vector; use Symfony\AI\Store\Document\Filter\TextContainsFilter; @@ -35,7 +34,7 @@ public function testIndexSingleDocument() $document = new TextDocument($id = Uuid::v4(), 'Test content'); $vector = new Vector([0.1, 0.2, 0.3]); $loader = new InMemoryLoader([$document]); - $vectorizer = new Vectorizer(PlatformTestHandler::createPlatform(new VectorResult($vector)), new Embeddings(Embeddings::TEXT_3_SMALL)); + $vectorizer = new Vectorizer(PlatformTestHandler::createPlatform(new VectorResult($vector)), 'text-embedding-3-small'); $indexer = new Indexer($loader, $vectorizer, $store = new TestStore()); $indexer->index(); @@ -49,7 +48,7 @@ public function testIndexSingleDocument() public function testIndexEmptyDocumentList() { $loader = new InMemoryLoader([]); - $vectorizer = new Vectorizer(PlatformTestHandler::createPlatform(), new Embeddings(Embeddings::TEXT_3_SMALL)); + $vectorizer = new Vectorizer(PlatformTestHandler::createPlatform(), 'text-embedding-3-small'); $indexer = new Indexer($loader, $vectorizer, $store = new TestStore()); $indexer->index(); @@ -63,7 +62,7 @@ public function testIndexDocumentWithMetadata() $document = new TextDocument($id = Uuid::v4(), 'Test content', $metadata); $vector = new Vector([0.1, 0.2, 0.3]); $loader = new InMemoryLoader([$document]); - $vectorizer = new Vectorizer(PlatformTestHandler::createPlatform(new VectorResult($vector)), new Embeddings(Embeddings::TEXT_3_SMALL)); + $vectorizer = new Vectorizer(PlatformTestHandler::createPlatform(new VectorResult($vector)), 'text-embedding-3-small'); $indexer = new Indexer($loader, $vectorizer, $store = new TestStore()); $indexer->index(); @@ -83,7 +82,7 @@ public function testWithSource() // InMemoryLoader doesn't use source parameter, so we'll test withSource method's immutability $loader = new InMemoryLoader([$document1]); - $vectorizer = new Vectorizer(PlatformTestHandler::createPlatform(new VectorResult($vector)), new Embeddings(Embeddings::TEXT_3_SMALL)); + $vectorizer = new Vectorizer(PlatformTestHandler::createPlatform(new VectorResult($vector)), 'text-embedding-3-small'); $indexer = new Indexer($loader, $vectorizer, $store = new TestStore(), 'source1'); @@ -105,11 +104,17 @@ public function testWithSourceArray() { $document1 = new TextDocument(Uuid::v4(), 'Document 1'); $document2 = new TextDocument(Uuid::v4(), 'Document 2'); - $vector = new Vector([0.1, 0.2, 0.3]); + $vector1 = new Vector([0.1, 0.2, 0.3]); + $vector2 = new Vector([0.4, 0.5, 0.6]); + $vector3 = new Vector([0.7, 0.8, 0.9]); + $vector4 = new Vector([1.0, 1.1, 1.2]); + $vector5 = new Vector([1.3, 1.4, 1.5]); + $vector6 = new Vector([1.6, 1.7, 1.8]); // InMemoryLoader returns all documents regardless of source $loader = new InMemoryLoader([$document1, $document2]); - $vectorizer = new Vectorizer(PlatformTestHandler::createPlatform(new VectorResult($vector)), new Embeddings(Embeddings::TEXT_3_SMALL)); + // Need 6 vectors total: 2 for first indexer, then 2 for each source in the second indexer (2 sources * 2 docs = 4) + $vectorizer = new Vectorizer(PlatformTestHandler::createPlatform(new VectorResult($vector1, $vector2, $vector3, $vector4, $vector5, $vector6)), 'test-embedding-model'); // Create indexer with single source $indexer = new Indexer($loader, $vectorizer, $store1 = new TestStore(), 'source1'); @@ -138,9 +143,11 @@ public function testIndexWithTextContainsFilter() new TextDocument(Uuid::v4(), 'Week of Symfony news roundup'), new TextDocument(Uuid::v4(), 'Another regular post'), ]; - $vector = new Vector([0.1, 0.2, 0.3]); + // Filter will remove the "Week of Symfony" document, leaving 2 documents + $vector1 = new Vector([0.1, 0.2, 0.3]); + $vector2 = new Vector([0.4, 0.5, 0.6]); $loader = new InMemoryLoader($documents); - $vectorizer = new Vectorizer(PlatformTestHandler::createPlatform(new VectorResult($vector)), new Embeddings(Embeddings::TEXT_3_SMALL)); + $vectorizer = new Vectorizer(PlatformTestHandler::createPlatform(new VectorResult($vector1, $vector2)), 'test-embedding-model'); $filter = new TextContainsFilter('Week of Symfony'); $indexer = new Indexer($loader, $vectorizer, $store = new TestStore(), null, [$filter]); @@ -158,9 +165,11 @@ public function testIndexWithMultipleFilters() new TextDocument(Uuid::v4(), 'SPAM content here'), new TextDocument(Uuid::v4(), 'Good content'), ]; - $vector = new Vector([0.1, 0.2, 0.3]); + // Filters will remove "Week of Symfony" and "SPAM" documents, leaving 2 documents + $vector1 = new Vector([0.1, 0.2, 0.3]); + $vector2 = new Vector([0.4, 0.5, 0.6]); $loader = new InMemoryLoader($documents); - $vectorizer = new Vectorizer(PlatformTestHandler::createPlatform(new VectorResult($vector)), new Embeddings(Embeddings::TEXT_3_SMALL)); + $vectorizer = new Vectorizer(PlatformTestHandler::createPlatform(new VectorResult($vector1, $vector2)), 'test-embedding-model'); $filters = [ new TextContainsFilter('Week of Symfony'), new TextContainsFilter('SPAM'), @@ -180,9 +189,11 @@ public function testIndexWithFiltersAndTransformers() new TextDocument(Uuid::v4(), 'Week of Symfony news'), new TextDocument(Uuid::v4(), 'Good content'), ]; - $vector = new Vector([0.1, 0.2, 0.3]); + // Filter will remove "Week of Symfony" document, leaving 2 documents + $vector1 = new Vector([0.1, 0.2, 0.3]); + $vector2 = new Vector([0.4, 0.5, 0.6]); $loader = new InMemoryLoader($documents); - $vectorizer = new Vectorizer(PlatformTestHandler::createPlatform(new VectorResult($vector)), new Embeddings(Embeddings::TEXT_3_SMALL)); + $vectorizer = new Vectorizer(PlatformTestHandler::createPlatform(new VectorResult($vector1, $vector2)), 'test-embedding-model'); $filter = new TextContainsFilter('Week of Symfony'); $transformer = new class implements TransformerInterface { public function transform(iterable $documents, array $options = []): iterable @@ -214,9 +225,11 @@ public function testIndexWithFiltersAndTransformersAppliesBoth() new TextDocument(Uuid::v4(), 'Remove this content'), // Will be filtered out new TextDocument(Uuid::v4(), 'Also keep this one'), ]; - $vector = new Vector([0.1, 0.2, 0.3]); + // Filter will remove the "Remove" document, leaving 2 documents + $vector1 = new Vector([0.1, 0.2, 0.3]); + $vector2 = new Vector([0.4, 0.5, 0.6]); $loader = new InMemoryLoader($documents); - $vectorizer = new Vectorizer(PlatformTestHandler::createPlatform(new VectorResult($vector)), new Embeddings(Embeddings::TEXT_3_SMALL)); + $vectorizer = new Vectorizer(PlatformTestHandler::createPlatform(new VectorResult($vector1, $vector2)), 'test-embedding-model'); $filter = new class implements FilterInterface { public function filter(iterable $documents, array $options = []): iterable @@ -257,7 +270,7 @@ public function testIndexWithNoFilters() $document = new TextDocument(Uuid::v4(), 'Test content'); $vector = new Vector([0.1, 0.2, 0.3]); $loader = new InMemoryLoader([$document]); - $vectorizer = new Vectorizer(PlatformTestHandler::createPlatform(new VectorResult($vector)), new Embeddings(Embeddings::TEXT_3_SMALL)); + $vectorizer = new Vectorizer(PlatformTestHandler::createPlatform(new VectorResult($vector)), 'text-embedding-3-small'); $indexer = new Indexer($loader, $vectorizer, $store = new TestStore(), null, []); $indexer->index(); @@ -270,7 +283,7 @@ public function testWithSourcePreservesFilters() $document = new TextDocument(Uuid::v4(), 'Test content'); $vector = new Vector([0.1, 0.2, 0.3]); $loader = new InMemoryLoader([$document]); - $vectorizer = new Vectorizer(PlatformTestHandler::createPlatform(new VectorResult($vector)), new Embeddings(Embeddings::TEXT_3_SMALL)); + $vectorizer = new Vectorizer(PlatformTestHandler::createPlatform(new VectorResult($vector)), 'text-embedding-3-small'); $filter = new TextContainsFilter('nonexistent'); $indexer = new Indexer($loader, $vectorizer, $store = new TestStore(), 'source1', [$filter]);