Closed
Description
Model "ollama-llama3.1:8b" cannot be selected according to config file. The following code was executed in the latest version of VS Code.
sample code
import semantic_kernel as sk
kernel = sk.Kernel()
from semantic_kernel.connectors.ai.ollama import OllamaChatCompletion
kernel.add_service(
OllamaChatCompletion(
service_id="ollama-qwen3:30b",
host="http://localhost:11434",
ai_model_id="qwen3:30b",
)
)
kernel.add_service(
OllamaChatCompletion(
service_id="ollama-llama3.1:8b",
host="http://localhost:11434",
ai_model_id="llama3.1:8b",
)
)
jokes_plugin = kernel.add_plugin(None, parent_directory="plugins", plugin_name="jokes")
theme = "Dishes"
from semantic_kernel.functions.kernel_arguments import KernelArguments
knock_joke = await kernel.invoke(jokes_plugin["knock_knock_joke"], KernelArguments(input=theme))
print(knock_joke)
config.json
{
"schema": 1,
"type": "completion",
"description": "Generates a knock-knock joke based on user input",
"default_services": [
"ollama-llama3.1:8b"
],
"execution_settings": {
"default": {
"temperature": 0.8,
"number_of_responses": 1,
"top_p": 1,
"max_tokens": 4000,
"presence_penalty": 0.0,
"frequency_penalty": 0.0
}
},
"input_variables": [
{
"name": "input",
"description": "The topic that the joke should be written about",
"required": true
}
]
}
Metadata
Metadata
Assignees
Type
Projects
Status
Bug