-
Notifications
You must be signed in to change notification settings - Fork 3k
/
kernel.py
820 lines (712 loc) · 36.3 KB
/
kernel.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
# Copyright (c) Microsoft. All rights reserved.
from __future__ import annotations
import logging
from copy import copy
from functools import singledispatchmethod
from typing import TYPE_CHECKING, Any, AsyncGenerator, AsyncIterable, Literal, Type, TypeVar, Union
from pydantic import Field, field_validator
from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings
from semantic_kernel.const import METADATA_EXCEPTION_KEY
from semantic_kernel.contents.streaming_content_mixin import StreamingContentMixin
from semantic_kernel.exceptions import (
KernelFunctionAlreadyExistsError,
KernelFunctionNotFoundError,
KernelInvokeException,
KernelPluginNotFoundError,
KernelServiceNotFoundError,
OperationCancelledException,
ServiceInvalidTypeError,
TemplateSyntaxError,
)
from semantic_kernel.functions.function_result import FunctionResult
from semantic_kernel.functions.kernel_arguments import KernelArguments
from semantic_kernel.functions.kernel_function_from_prompt import KernelFunctionFromPrompt
from semantic_kernel.functions.kernel_function_metadata import KernelFunctionMetadata
from semantic_kernel.functions.kernel_plugin import KernelPlugin
from semantic_kernel.kernel_extensions.kernel_filters_extension import KernelFilterExtension
from semantic_kernel.prompt_template.const import KERNEL_TEMPLATE_FORMAT_NAME, TEMPLATE_FORMAT_TYPES
from semantic_kernel.prompt_template.prompt_template_base import PromptTemplateBase
from semantic_kernel.prompt_template.prompt_template_config import PromptTemplateConfig
from semantic_kernel.reliability.pass_through_without_retry import PassThroughWithoutRetry
from semantic_kernel.reliability.retry_mechanism_base import RetryMechanismBase
from semantic_kernel.services.ai_service_client_base import AIServiceClientBase
from semantic_kernel.services.ai_service_selector import AIServiceSelector
if TYPE_CHECKING:
from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase
from semantic_kernel.connectors.ai.embeddings.embedding_generator_base import EmbeddingGeneratorBase
from semantic_kernel.connectors.ai.text_completion_client_base import TextCompletionClientBase
from semantic_kernel.connectors.openai_plugin.openai_function_execution_parameters import (
OpenAIFunctionExecutionParameters,
)
from semantic_kernel.connectors.openapi_plugin.openapi_function_execution_parameters import (
OpenAPIFunctionExecutionParameters,
)
from semantic_kernel.functions.kernel_function import KernelFunction
from semantic_kernel.functions.types import KERNEL_FUNCTION_TYPE
T = TypeVar("T")
AI_SERVICE_CLIENT_TYPE = TypeVar("AI_SERVICE_CLIENT_TYPE", bound=AIServiceClientBase)
ALL_SERVICE_TYPES = Union["TextCompletionClientBase", "ChatCompletionClientBase", "EmbeddingGeneratorBase"]
logger: logging.Logger = logging.getLogger(__name__)
class Kernel(KernelFilterExtension):
"""
The Kernel class is the main entry point for the Semantic Kernel. It provides the ability to run
semantic/native functions, and manage plugins, memory, and AI services.
Attributes:
plugins (dict[str, KernelPlugin] | None): The plugins to be used by the kernel
services (dict[str, AIServiceClientBase]): The services to be used by the kernel
ai_service_selector (AIServiceSelector): The AI service selector to be used by the kernel
retry_mechanism (RetryMechanismBase): The retry mechanism to be used by the kernel
"""
# region Init
plugins: dict[str, KernelPlugin] = Field(default_factory=dict)
services: dict[str, AIServiceClientBase] = Field(default_factory=dict)
ai_service_selector: AIServiceSelector = Field(default_factory=AIServiceSelector)
retry_mechanism: RetryMechanismBase = Field(default_factory=PassThroughWithoutRetry)
def __init__(
self,
plugins: KernelPlugin | dict[str, KernelPlugin] | list[KernelPlugin] | None = None,
services: (
AI_SERVICE_CLIENT_TYPE | list[AI_SERVICE_CLIENT_TYPE] | dict[str, AI_SERVICE_CLIENT_TYPE] | None
) = None,
ai_service_selector: AIServiceSelector | None = None,
**kwargs: Any,
) -> None:
"""
Initialize a new instance of the Kernel class.
Args:
plugins (KernelPlugin | dict[str, KernelPlugin] | list[KernelPlugin] | None):
The plugins to be used by the kernel, will be rewritten to a dict with plugin name as key
services (AIServiceClientBase | list[AIServiceClientBase] | dict[str, AIServiceClientBase] | None:
The services to be used by the kernel, will be rewritten to a dict with service_id as key
ai_service_selector (AIServiceSelector | None): The AI service selector to be used by the kernel,
default is based on order of execution settings.
**kwargs (Any): Additional fields to be passed to the Kernel model,
these are limited to retry_mechanism and function_invoking_handlers
and function_invoked_handlers, the best way to add function_invoking_handlers
and function_invoked_handlers is to use the add_function_invoking_handler
and add_function_invoked_handler methods.
"""
args = {
"services": services,
"plugins": plugins,
**kwargs,
}
if ai_service_selector:
args["ai_service_selector"] = ai_service_selector
super().__init__(**args)
@field_validator("plugins", mode="before")
@classmethod
def rewrite_plugins(
cls, plugins: KernelPlugin | list[KernelPlugin] | dict[str, KernelPlugin] | None = None
) -> dict[str, KernelPlugin]:
"""Rewrite plugins to a dictionary."""
if not plugins:
return {}
if isinstance(plugins, KernelPlugin):
return {plugins.name: plugins}
if isinstance(plugins, list):
return {p.name: p for p in plugins}
return plugins
@field_validator("services", mode="before")
@classmethod
def rewrite_services(
cls,
services: (
AI_SERVICE_CLIENT_TYPE | list[AI_SERVICE_CLIENT_TYPE] | dict[str, AI_SERVICE_CLIENT_TYPE] | None
) = None,
) -> dict[str, AI_SERVICE_CLIENT_TYPE]:
"""Rewrite services to a dictionary."""
if not services:
return {}
if isinstance(services, AIServiceClientBase):
return {services.service_id if services.service_id else "default": services} # type: ignore
if isinstance(services, list):
return {s.service_id if s.service_id else "default": s for s in services}
return services
# endregion
# region Invoke Functions
async def invoke_stream(
self,
function: "KernelFunction" | None = None,
arguments: KernelArguments | None = None,
function_name: str | None = None,
plugin_name: str | None = None,
metadata: dict[str, Any] = {},
return_function_results: bool = False,
**kwargs: Any,
) -> AsyncGenerator[list["StreamingContentMixin"] | FunctionResult | list[FunctionResult], Any]:
"""Execute one or more stream functions.
This will execute the functions in the order they are provided, if a list of functions is provided.
When multiple functions are provided only the last one is streamed, the rest is executed as a pipeline.
Arguments:
functions (KernelFunction): The function or functions to execute,
this value has precedence when supplying both this and using function_name and plugin_name,
if this is none, function_name and plugin_name are used and cannot be None.
arguments (KernelArguments): The arguments to pass to the function(s), optional
function_name (str | None): The name of the function to execute
plugin_name (str | None): The name of the plugin to execute
metadata (dict[str, Any]): The metadata to pass to the function(s)
return_function_results (bool): If True, the function results are yielded as a list[FunctionResult]
in addition to the streaming content, otherwise only the streaming content is yielded.
kwargs (dict[str, Any]): arguments that can be used instead of supplying KernelArguments
Yields:
StreamingContentMixin: The content of the stream of the last function provided.
"""
if arguments is None:
arguments = KernelArguments(**kwargs)
if not function:
if not function_name or not plugin_name:
raise KernelFunctionNotFoundError("No function(s) or function- and plugin-name provided")
function = self.get_function(plugin_name, function_name)
function_result: list[list["StreamingContentMixin"] | Any] = []
async for stream_message in function.invoke_stream(self, arguments):
if isinstance(stream_message, FunctionResult) and (
exception := stream_message.metadata.get(METADATA_EXCEPTION_KEY, None)
):
raise KernelInvokeException(
f"Error occurred while invoking function: '{function.fully_qualified_name}'"
) from exception
function_result.append(stream_message)
yield stream_message
if return_function_results:
output_function_result: list["StreamingContentMixin"] = []
for result in function_result:
for choice in result:
if not isinstance(choice, StreamingContentMixin):
continue
if len(output_function_result) <= choice.choice_index:
output_function_result.append(copy(choice))
else:
output_function_result[choice.choice_index] += choice
yield FunctionResult(function=function.metadata, value=output_function_result)
async def invoke(
self,
function: "KernelFunction" | None = None,
arguments: KernelArguments | None = None,
function_name: str | None = None,
plugin_name: str | None = None,
metadata: dict[str, Any] = {},
**kwargs: Any,
) -> FunctionResult | None:
"""Execute one or more functions.
When multiple functions are passed the FunctionResult of each is put into a list.
Arguments:
function (KernelFunction): The function or functions to execute,
this value has precedence when supplying both this and using function_name and plugin_name,
if this is none, function_name and plugin_name are used and cannot be None.
arguments (KernelArguments): The arguments to pass to the function(s), optional
function_name (str | None): The name of the function to execute
plugin_name (str | None): The name of the plugin to execute
metadata (dict[str, Any]): The metadata to pass to the function(s)
kwargs (dict[str, Any]): arguments that can be used instead of supplying KernelArguments
Returns:
FunctionResult | list[FunctionResult] | None: The result of the function(s)
"""
if arguments is None:
arguments = KernelArguments(**kwargs)
else:
arguments.update(kwargs)
if not function:
if not function_name or not plugin_name:
raise KernelFunctionNotFoundError("No function, or function name and plugin name provided")
function = self.get_function(plugin_name, function_name)
try:
return await function.invoke(kernel=self, arguments=arguments, metadata=metadata)
except OperationCancelledException as exc:
logger.info(f"Operation cancelled during function invocation. Message: {exc}")
return None
except Exception as exc:
logger.error(
"Something went wrong in function invocation. During function invocation:"
f" '{function.fully_qualified_name}'. Error description: '{str(exc)}'"
)
raise KernelInvokeException(
f"Error occurred while invoking function: '{function.fully_qualified_name}'"
) from exc
async def invoke_prompt(
self,
function_name: str,
plugin_name: str,
prompt: str,
arguments: KernelArguments | None = None,
template_format: Literal[
"semantic-kernel",
"handlebars",
"jinja2",
] = KERNEL_TEMPLATE_FORMAT_NAME,
**kwargs: Any,
) -> FunctionResult | None:
"""
Invoke a function from the provided prompt
Args:
function_name (str): The name of the function
plugin_name (str): The name of the plugin
prompt (str): The prompt to use
arguments (KernelArguments | None): The arguments to pass to the function(s), optional
template_format (str | None): The format of the prompt template
kwargs (dict[str, Any]): arguments that can be used instead of supplying KernelArguments
Returns:
FunctionResult | list[FunctionResult] | None: The result of the function(s)
"""
if not arguments:
arguments = KernelArguments(**kwargs)
if not prompt:
raise TemplateSyntaxError("The prompt is either null or empty.")
function = KernelFunctionFromPrompt(
function_name=function_name,
plugin_name=plugin_name,
prompt=prompt,
template_format=template_format,
)
return await self.invoke(function=function, arguments=arguments)
async def invoke_prompt_stream(
self,
function_name: str,
plugin_name: str,
prompt: str,
arguments: KernelArguments | None = None,
template_format: Literal[
"semantic-kernel",
"handlebars",
"jinja2",
] = KERNEL_TEMPLATE_FORMAT_NAME,
return_function_results: bool | None = False,
**kwargs: Any,
) -> AsyncIterable[list["StreamingContentMixin"] | FunctionResult | list[FunctionResult]]:
"""
Invoke a function from the provided prompt and stream the results
Args:
function_name (str): The name of the function
plugin_name (str): The name of the plugin
prompt (str): The prompt to use
arguments (KernelArguments | None): The arguments to pass to the function(s), optional
template_format (str | None): The format of the prompt template
kwargs (dict[str, Any]): arguments that can be used instead of supplying KernelArguments
Returns:
AsyncIterable[StreamingContentMixin]: The content of the stream of the last function provided.
"""
if not arguments:
arguments = KernelArguments(**kwargs)
if not prompt:
raise TemplateSyntaxError("The prompt is either null or empty.")
from semantic_kernel.functions.kernel_function_from_prompt import KernelFunctionFromPrompt
function = KernelFunctionFromPrompt(
function_name=function_name,
plugin_name=plugin_name,
prompt=prompt,
template_format=template_format,
)
function_result: list[list["StreamingContentMixin"] | Any] = []
async for stream_message in self.invoke_stream(function=function, arguments=arguments):
if isinstance(stream_message, FunctionResult) and (
exception := stream_message.metadata.get(METADATA_EXCEPTION_KEY, None)
):
raise KernelInvokeException(
f"Error occurred while invoking function: '{function.fully_qualified_name}'"
) from exception
function_result.append(stream_message)
yield stream_message
if return_function_results:
output_function_result: list["StreamingContentMixin"] = []
for result in function_result:
for choice in result:
if not isinstance(choice, StreamingContentMixin):
continue
if len(output_function_result) <= choice.choice_index:
output_function_result.append(copy(choice))
else:
output_function_result[choice.choice_index] += choice
yield FunctionResult(function=function.metadata, value=output_function_result)
# endregion
# region Plugins & Functions
def add_plugin(
self,
plugin: KernelPlugin | object | dict[str, Any] | None = None,
plugin_name: str | None = None,
parent_directory: str | None = None,
description: str | None = None,
class_init_arguments: dict[str, dict[str, Any]] | None = None,
) -> "KernelPlugin":
"""
Adds a plugin to the kernel's collection of plugins. If a plugin is provided,
it uses that instance instead of creating a new KernelPlugin.
See KernelPlugin.from_directory for more details on how the directory is parsed.
Args:
plugin (KernelPlugin | Any | dict[str, Any]): The plugin to add.
This can be a KernelPlugin, in which case it is added straightaway and other parameters are ignored,
a custom class that contains methods with the kernel_function decorator
or a dictionary of functions with the kernel_function decorator for one or
several methods.
plugin_name (str | None): The name of the plugin, used if the plugin is not a KernelPlugin,
if the plugin is None and the parent_directory is set,
KernelPlugin.from_directory is called with those parameters,
see `KernelPlugin.from_directory` for details.
parent_directory (str | None): The parent directory path where the plugin directory resides
description (str | None): The description of the plugin, used if the plugin is not a KernelPlugin.
class_init_arguments (dict[str, dict[str, Any]] | None): The class initialization arguments
Returns:
KernelPlugin: The plugin that was added.
Raises:
ValidationError: If a KernelPlugin needs to be created, but it is not valid.
"""
if isinstance(plugin, KernelPlugin):
self.plugins[plugin.name] = plugin
return self.plugins[plugin.name]
if not plugin_name:
raise ValueError("plugin_name must be provided if a plugin is not supplied.")
if plugin:
self.plugins[plugin_name] = KernelPlugin.from_object(
plugin_name=plugin_name, plugin_instance=plugin, description=description
)
return self.plugins[plugin_name]
if plugin is None and parent_directory is not None:
self.plugins[plugin_name] = KernelPlugin.from_directory(
plugin_name=plugin_name,
parent_directory=parent_directory,
description=description,
class_init_arguments=class_init_arguments,
)
return self.plugins[plugin_name]
raise ValueError("plugin or parent_directory must be provided.")
def add_plugins(self, plugins: list[KernelPlugin] | dict[str, KernelPlugin | object]) -> None:
"""
Adds a list of plugins to the kernel's collection of plugins.
Args:
plugins (list[KernelPlugin] | dict[str, KernelPlugin]): The plugins to add to the kernel
"""
if isinstance(plugins, list):
for plug in plugins:
self.add_plugin(plug)
return
for name, plugin in plugins.items():
self.add_plugin(plugin, plugin_name=name)
def add_function(
self,
plugin_name: str,
function: KERNEL_FUNCTION_TYPE | None = None,
function_name: str | None = None,
description: str | None = None,
prompt: str | None = None,
prompt_template_config: PromptTemplateConfig | None = None,
prompt_execution_settings: (
PromptExecutionSettings | list[PromptExecutionSettings] | dict[str, PromptExecutionSettings] | None
) = None,
template_format: TEMPLATE_FORMAT_TYPES = KERNEL_TEMPLATE_FORMAT_NAME,
prompt_template: PromptTemplateBase | None = None,
return_plugin: bool = False,
**kwargs: Any,
) -> "KernelFunction | KernelPlugin":
"""
Adds a function to the specified plugin.
Args:
plugin_name (str): The name of the plugin to add the function to
function (KernelFunction | Callable[..., Any]): The function to add
function_name (str): The name of the function
plugin_name (str): The name of the plugin
description (str | None): The description of the function
prompt (str | None): The prompt template.
prompt_template_config (PromptTemplateConfig | None): The prompt template configuration
prompt_execution_settings (PromptExecutionSettings | list[PromptExecutionSettings]
| dict[str, PromptExecutionSettings] | None):
The execution settings, will be parsed into a dict.
template_format (str | None): The format of the prompt template
prompt_template (PromptTemplateBase | None): The prompt template
return_plugin (bool): If True, the plugin is returned instead of the function
kwargs (Any): Additional arguments
Returns:
KernelFunction | KernelPlugin: The function that was added, or the plugin if return_plugin is True
"""
from semantic_kernel.functions.kernel_function import KernelFunction
if function is None:
if not function_name or (not prompt and not prompt_template_config and not prompt_template):
raise ValueError(
"function_name and prompt, prompt_template_config or prompt_template must be provided if a function is not supplied." # noqa: E501
)
if prompt_execution_settings is None and (
prompt_template_config is None or prompt_template_config.execution_settings is None
):
prompt_execution_settings = PromptExecutionSettings(extension_data=kwargs)
function = KernelFunction.from_prompt(
function_name=function_name,
plugin_name=plugin_name,
description=description,
prompt=prompt,
template_format=template_format,
prompt_template=prompt_template,
prompt_template_config=prompt_template_config,
prompt_execution_settings=prompt_execution_settings,
)
elif not isinstance(function, KernelFunction):
function = KernelFunction.from_method(plugin_name=plugin_name, method=function)
if plugin_name not in self.plugins:
plugin = KernelPlugin(name=plugin_name, functions=function)
self.add_plugin(plugin)
return plugin if return_plugin else plugin[function.name]
self.plugins[plugin_name][function.name] = function
return self.plugins[plugin_name] if return_plugin else self.plugins[plugin_name][function.name]
def add_functions(
self,
plugin_name: str,
functions: list[KERNEL_FUNCTION_TYPE] | dict[str, KERNEL_FUNCTION_TYPE],
) -> "KernelPlugin":
"""
Adds a list of functions to the specified plugin.
Args:
plugin_name (str): The name of the plugin to add the functions to
functions (list[KernelFunction] | dict[str, KernelFunction]): The functions to add
Returns:
KernelPlugin: The plugin that the functions were added to.
"""
if plugin_name in self.plugins:
self.plugins[plugin_name].update(functions)
return self.plugins[plugin_name]
return self.add_plugin(KernelPlugin(name=plugin_name, functions=functions)) # type: ignore
def add_plugin_from_openapi(
self,
plugin_name: str,
openapi_document_path: str,
execution_settings: "OpenAPIFunctionExecutionParameters | None" = None,
description: str | None = None,
) -> KernelPlugin:
"""Add a plugin from the Open AI manifest.
Args:
plugin_name (str): The name of the plugin
plugin_url (str | None): The URL of the plugin
plugin_str (str | None): The JSON string of the plugin
execution_parameters (OpenAIFunctionExecutionParameters | None): The execution parameters
Returns:
KernelPlugin: The imported plugin
Raises:
PluginInitializationError: if the plugin URL or plugin JSON/YAML is not provided
"""
return self.add_plugin(
KernelPlugin.from_openapi(
plugin_name=plugin_name,
openapi_document_path=openapi_document_path,
execution_settings=execution_settings,
description=description,
)
)
async def add_plugin_from_openai(
self,
plugin_name: str,
plugin_url: str | None = None,
plugin_str: str | None = None,
execution_parameters: "OpenAIFunctionExecutionParameters | None" = None,
description: str | None = None,
) -> KernelPlugin:
"""Add a plugin from an OpenAPI document.
Args:
plugin_name (str): The name of the plugin
plugin_url (str | None): The URL of the plugin
plugin_str (str | None): The JSON string of the plugin
execution_parameters (OpenAIFunctionExecutionParameters | None): The execution parameters
description (str | None): The description of the plugin
Returns:
KernelPlugin: The imported plugin
Raises:
PluginInitializationError: if the plugin URL or plugin JSON/YAML is not provided
"""
return self.add_plugin(
await KernelPlugin.from_openai(
plugin_name=plugin_name,
plugin_url=plugin_url,
plugin_str=plugin_str,
execution_parameters=execution_parameters,
description=description,
)
)
def get_plugin(self, plugin_name: str) -> "KernelPlugin":
"""Get a plugin by name.
Args:
plugin_name (str): The name of the plugin
Returns:
KernelPlugin: The plugin
Raises:
KernelPluginNotFoundError: If the plugin is not found
"""
if plugin_name not in self.plugins:
raise KernelPluginNotFoundError(f"Plugin '{plugin_name}' not found")
return self.plugins[plugin_name]
def get_function(self, plugin_name: str | None, function_name: str) -> "KernelFunction":
"""Get a function by plugin_name and function_name.
Args:
plugin_name (str | None): The name of the plugin
function_name (str): The name of the function
Returns:
KernelFunction: The function
Raises:
KernelPluginNotFoundError: If the plugin is not found
KernelFunctionNotFoundError: If the function is not found
"""
if plugin_name is None:
for plugin in self.plugins.values():
if function_name in plugin:
return plugin[function_name]
raise KernelFunctionNotFoundError(f"Function '{function_name}' not found in any plugin.")
if plugin_name not in self.plugins:
raise KernelPluginNotFoundError(f"Plugin '{plugin_name}' not found")
if function_name not in self.plugins[plugin_name]:
raise KernelFunctionNotFoundError(f"Function '{function_name}' not found in plugin '{plugin_name}'")
return self.plugins[plugin_name][function_name]
def get_function_from_fully_qualified_function_name(self, fully_qualified_function_name: str) -> "KernelFunction":
"""Get a function by its fully qualified name (<plugin_name>-<function_name>).
Args:
fully_qualified_function_name (str): The fully qualified name of the function,
if there is no '-' in the name, it is assumed that it is only a function_name.
Returns:
KernelFunction: The function
Raises:
KernelPluginNotFoundError: If the plugin is not found
KernelFunctionNotFoundError: If the function is not found
"""
names = fully_qualified_function_name.split("-", maxsplit=1)
if len(names) == 1:
plugin_name = None
function_name = names[0]
else:
plugin_name = names[0]
function_name = names[1]
return self.get_function(plugin_name, function_name)
def get_full_list_of_function_metadata(self) -> list["KernelFunctionMetadata"]:
"""Get a list of all function metadata in the plugins."""
if not self.plugins:
return []
return [func.metadata for plugin in self.plugins.values() for func in plugin]
@singledispatchmethod
def get_list_of_function_metadata(self, *args: Any, **kwargs: Any) -> list["KernelFunctionMetadata"]:
"""Get a list of all function metadata in the plugin collection."""
raise NotImplementedError("This method is not implemented for the provided arguments.")
@get_list_of_function_metadata.register(bool)
def get_list_of_function_metadata_bool(
self, include_prompt: bool = True, include_native: bool = True
) -> list["KernelFunctionMetadata"]:
"""
Get a list of the function metadata in the plugin collection
Args:
include_prompt (bool): Whether to include semantic functions in the list.
include_native (bool): Whether to include native functions in the list.
Returns:
A list of KernelFunctionMetadata objects in the collection.
"""
if not self.plugins:
return []
return [
func.metadata
for plugin in self.plugins.values()
for func in plugin.functions.values()
if (include_prompt and func.is_prompt) or (include_native and not func.is_prompt)
]
@get_list_of_function_metadata.register(dict)
def get_list_of_function_metadata_filters(
self,
filters: dict[
Literal["excluded_plugins", "included_plugins", "excluded_functions", "included_functions"], list[str]
],
) -> list["KernelFunctionMetadata"]:
"""Get a list of Kernel Function Metadata based on filters.
Args:
filters (dict[str, list[str]]): The filters to apply to the function list.
The keys are:
- included_plugins: A list of plugin names to include.
- excluded_plugins: A list of plugin names to exclude.
- included_functions: A list of function names to include.
- excluded_functions: A list of function names to exclude.
The included and excluded parameters are mutually exclusive.
The function names are checked against the fully qualified name of a function.
Returns:
list[KernelFunctionMetadata]: The list of Kernel Function Metadata that match the filters.
"""
if not self.plugins:
return []
included_plugins = filters.get("included_plugins", None)
excluded_plugins = filters.get("excluded_plugins", [])
included_functions = filters.get("included_functions", None)
excluded_functions = filters.get("excluded_functions", [])
if included_plugins and excluded_plugins:
raise ValueError("Cannot use both included_plugins and excluded_plugins at the same time.")
if included_functions and excluded_functions:
raise ValueError("Cannot use both included_functions and excluded_functions at the same time.")
result: list["KernelFunctionMetadata"] = []
for plugin_name, plugin in self.plugins.items():
if plugin_name in excluded_plugins or (included_plugins and plugin_name not in included_plugins):
continue
for function in plugin:
if function.fully_qualified_name in excluded_functions or (
included_functions and function.fully_qualified_name not in included_functions
):
continue
result.append(function.metadata)
return result
# endregion
# region Services
def select_ai_service(
self, function: "KernelFunction", arguments: KernelArguments
) -> tuple[ALL_SERVICE_TYPES, PromptExecutionSettings]:
"""Uses the AI service selector to select a service for the function."""
return self.ai_service_selector.select_ai_service(self, function, arguments)
def get_service(
self,
service_id: str | None = None,
type: Type[ALL_SERVICE_TYPES] | None = None,
) -> "AIServiceClientBase":
"""Get a service by service_id and type.
Type is optional and when not supplied, no checks are done.
Type should be
TextCompletionClientBase, ChatCompletionClientBase, EmbeddingGeneratorBase
or a subclass of one.
You can also check for multiple types in one go,
by using TextCompletionClientBase | ChatCompletionClientBase.
If type and service_id are both None, the first service is returned.
Args:
service_id (str | None): The service id,
if None, the default service is returned or the first service is returned.
type (Type[ALL_SERVICE_TYPES] | None): The type of the service, if None, no checks are done.
Returns:
ALL_SERVICE_TYPES: The service.
Raises:
ValueError: If no service is found that matches the type.
"""
service: "AIServiceClientBase | None" = None
if not service_id or service_id == "default":
if not type:
if default_service := self.services.get("default"):
return default_service
return list(self.services.values())[0]
if default_service := self.services.get("default"):
if isinstance(default_service, type):
return default_service
for service in self.services.values():
if isinstance(service, type):
return service
raise KernelServiceNotFoundError(f"No service found of type {type}")
if not (service := self.services.get(service_id)):
raise KernelServiceNotFoundError(f"Service with service_id '{service_id}' does not exist")
if type and not isinstance(service, type):
raise ServiceInvalidTypeError(f"Service with service_id '{service_id}' is not of type {type}")
return service
def get_services_by_type(self, type: type[ALL_SERVICE_TYPES]) -> dict[str, ALL_SERVICE_TYPES]:
return {service.service_id: service for service in self.services.values() if isinstance(service, type)} # type: ignore
def get_prompt_execution_settings_from_service_id(
self, service_id: str, type: Type[ALL_SERVICE_TYPES] | None = None
) -> PromptExecutionSettings:
"""Get the specific request settings from the service, instantiated with the service_id and ai_model_id."""
service = self.get_service(service_id, type=type)
return service.instantiate_prompt_execution_settings(
service_id=service_id,
extension_data={"ai_model_id": service.ai_model_id},
)
def add_service(self, service: AIServiceClientBase, overwrite: bool = False) -> None:
if service.service_id not in self.services or overwrite:
self.services[service.service_id] = service
else:
raise KernelFunctionAlreadyExistsError(f"Service with service_id '{service.service_id}' already exists")
def remove_service(self, service_id: str) -> None:
"""Delete a single service from the Kernel."""
if service_id not in self.services:
raise KernelServiceNotFoundError(f"Service with service_id '{service_id}' does not exist")
del self.services[service_id]
def remove_all_services(self) -> None:
"""Removes the services from the Kernel, does not delete them."""
self.services.clear()
# endregion