diff --git a/dotnetv4/Bedrock-runtime/.gitignore b/dotnetv4/Bedrock-runtime/.gitignore
new file mode 100644
index 00000000000..ba964e2a8e7
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/.gitignore
@@ -0,0 +1,2 @@
+/.vs/
+/Tools/
diff --git a/dotnetv4/Bedrock-runtime/Actions/BedrockRuntimeActions.csproj b/dotnetv4/Bedrock-runtime/Actions/BedrockRuntimeActions.csproj
new file mode 100644
index 00000000000..de3bdff3ad1
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Actions/BedrockRuntimeActions.csproj
@@ -0,0 +1,14 @@
+
+
+
+ Exe
+ net8.0
+ enable
+ enable
+
+
+
+
+
+
+
diff --git a/dotnetv4/Bedrock-runtime/Actions/HelloBedrockRuntime.cs b/dotnetv4/Bedrock-runtime/Actions/HelloBedrockRuntime.cs
new file mode 100644
index 00000000000..1ee5a0bf9ea
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Actions/HelloBedrockRuntime.cs
@@ -0,0 +1,34 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+namespace BedrockRuntimeActions
+{
+ internal class HelloBedrockRuntime
+ {
+ private static readonly string CLAUDE = "anthropic.claude-v2";
+
+ static async Task Main(string[] args)
+ {
+ await TextToText();
+ }
+
+ private static async Task TextToText()
+ {
+ string prompt = "In one sentence, what is a large-language model?";
+ await Invoke(CLAUDE, prompt);
+ }
+
+ private static async Task Invoke(string modelId, string prompt)
+ {
+ switch (modelId)
+ {
+ case var _ when modelId == CLAUDE:
+ Console.WriteLine(await InvokeModelAsync.InvokeClaudeAsync(prompt));
+ break;
+ default:
+ Console.WriteLine($"Unknown model ID: {modelId}. Valid model IDs are: {CLAUDE}.");
+ break;
+ };
+ }
+ }
+}
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Actions/InvokeModelAsync.cs b/dotnetv4/Bedrock-runtime/Actions/InvokeModelAsync.cs
new file mode 100644
index 00000000000..cf27b03b6d6
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Actions/InvokeModelAsync.cs
@@ -0,0 +1,585 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+using System.Runtime.CompilerServices;
+using System.Text.Json;
+using System.Text.Json.Nodes;
+using System.Threading.Channels;
+using Amazon;
+using Amazon.BedrockRuntime;
+using Amazon.BedrockRuntime.Model;
+using Amazon.Runtime.EventStreams;
+using Amazon.Util;
+
+namespace BedrockRuntimeActions
+{
+ public static class InvokeModelAsync
+ {
+ // snippet-start:[BedrockRuntime.dotnetv4.BedrockRuntimeActions.InvokeModelAsync.Claude]
+
+ ///
+ /// Asynchronously invokes the Anthropic Claude 2 model to run an inference based on the provided input.
+ ///
+ /// The prompt that you want Claude to complete.
+ /// The inference response from the model
+ ///
+ /// The different model providers have individual request and response formats.
+ /// For the format, ranges, and default values for Anthropic Claude, refer to:
+ /// https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-claude.html
+ ///
+ public static async Task InvokeClaudeAsync(string prompt)
+ {
+ string claudeModelId = "anthropic.claude-v2";
+
+ // Claude requires you to enclose the prompt as follows:
+ string enclosedPrompt = "Human: " + prompt + "\n\nAssistant:";
+
+ AmazonBedrockRuntimeClient client = new(RegionEndpoint.USEast1);
+
+ string payload = new JsonObject()
+ {
+ { "prompt", enclosedPrompt },
+ { "max_tokens_to_sample", 200 },
+ { "temperature", 0.5 },
+ { "stop_sequences", new JsonArray("\n\nHuman:") }
+ }.ToJsonString();
+
+ string generatedText = "";
+ try
+ {
+ InvokeModelResponse response = await client.InvokeModelAsync(new InvokeModelRequest()
+ {
+ ModelId = claudeModelId,
+ Body = AWSSDKUtils.GenerateMemoryStreamFromString(payload),
+ ContentType = "application/json",
+ Accept = "application/json"
+ });
+
+ if (response.HttpStatusCode == System.Net.HttpStatusCode.OK)
+ {
+ return JsonNode.ParseAsync(response.Body).Result?["completion"]?.GetValue() ?? "";
+ }
+ else
+ {
+ Console.WriteLine("InvokeModelAsync failed with status code " + response.HttpStatusCode);
+ }
+ }
+ catch (AmazonBedrockRuntimeException e)
+ {
+ Console.WriteLine(e.Message);
+ }
+ return generatedText;
+ }
+
+ // snippet-end:[BedrockRuntime.dotnetv4.BedrockRuntimeActions.InvokeModelAsync.Claude]
+
+ // snippet-start:[BedrockRuntime.dotnetv4.BedrockRuntimeActions.InvokeModelAsync.ClaudeWithResponseStream]
+
+ ///
+ /// Asynchronously invokes the Anthropic Claude 2 model to run an inference based on the provided input and process the response stream.
+ ///
+ /// The prompt that you want Claude to complete.
+ /// The inference response from the model
+ ///
+ /// The different model providers have individual request and response formats.
+ /// For the format, ranges, and default values for Anthropic Claude, refer to:
+ /// https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-claude.html
+ ///
+ public static async IAsyncEnumerable InvokeClaudeWithResponseStreamAsync(string prompt, [EnumeratorCancellation] CancellationToken cancellationToken = default)
+ {
+ string claudeModelId = "anthropic.claude-v2";
+
+ // Claude requires you to enclose the prompt as follows:
+ string enclosedPrompt = "Human: " + prompt + "\n\nAssistant:";
+
+ AmazonBedrockRuntimeClient client = new(RegionEndpoint.USEast1);
+
+ string payload = new JsonObject()
+ {
+ { "prompt", enclosedPrompt },
+ { "max_tokens_to_sample", 200 },
+ { "temperature", 0.5 },
+ { "stop_sequences", new JsonArray("\n\nHuman:") }
+ }.ToJsonString();
+
+ InvokeModelWithResponseStreamResponse? response = null;
+
+ try
+ {
+ response = await client.InvokeModelWithResponseStreamAsync(new InvokeModelWithResponseStreamRequest()
+ {
+ ModelId = claudeModelId,
+ Body = AWSSDKUtils.GenerateMemoryStreamFromString(payload),
+ ContentType = "application/json",
+ Accept = "application/json"
+ });
+ }
+ catch (AmazonBedrockRuntimeException e)
+ {
+ Console.WriteLine(e.Message);
+ }
+
+ if (response is not null && response.HttpStatusCode == System.Net.HttpStatusCode.OK)
+ {
+ // create a buffer to write the event in to move from a push mode to a pull mode
+ Channel buffer = Channel.CreateUnbounded();
+ bool isStreaming = true;
+
+ response.Body.ChunkReceived += BodyOnChunkReceived;
+ response.Body.StartProcessing();
+
+ while ((!cancellationToken.IsCancellationRequested && isStreaming) || (!cancellationToken.IsCancellationRequested && buffer.Reader.Count > 0))
+ {
+ // pull the completion from the buffer and add it to the IAsyncEnumerable collection
+ yield return await buffer.Reader.ReadAsync(cancellationToken);
+ }
+ response.Body.ChunkReceived -= BodyOnChunkReceived;
+
+ yield break;
+
+ // handle the ChunkReceived events
+ async void BodyOnChunkReceived(object? sender, EventStreamEventReceivedArgs e)
+ {
+ var streamResponse = JsonSerializer.Deserialize(e.EventStreamEvent.Bytes) ?? throw new NullReferenceException($"Unable to deserialize {nameof(e.EventStreamEvent.Bytes)}");
+
+ if (streamResponse["stop_reason"]?.GetValue() != null)
+ {
+ isStreaming = false;
+ }
+
+ // write the received completion chunk into the buffer
+ await buffer.Writer.WriteAsync(streamResponse["completion"]?.GetValue(), cancellationToken);
+ }
+ }
+ else if (response is not null)
+ {
+ Console.WriteLine("InvokeModelAsync failed with status code " + response.HttpStatusCode);
+ }
+
+ yield break;
+ }
+
+ // snippet-end:[BedrockRuntime.dotnetv4.BedrockRuntimeActions.InvokeModelAsync.ClaudeWithResponseStream]
+
+ // snippet-start:[BedrockRuntime.dotnetv4.BedrockRuntimeActions.InvokeModelAsync.Jurassic2]
+
+ ///
+ /// Asynchronously invokes the AI21 Labs Jurassic-2 model to run an inference based on the provided input.
+ ///
+ /// The prompt that you want Claude to complete.
+ /// The inference response from the model
+ ///
+ /// The different model providers have individual request and response formats.
+ /// For the format, ranges, and default values for AI21 Labs Jurassic-2, refer to:
+ /// https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-jurassic2.html
+ ///
+ public static async Task InvokeJurassic2Async(string prompt)
+ {
+ string jurassic2ModelId = "ai21.j2-mid-v1";
+
+ AmazonBedrockRuntimeClient client = new(RegionEndpoint.USEast1);
+
+ string payload = new JsonObject()
+ {
+ { "prompt", prompt },
+ { "maxTokens", 200 },
+ { "temperature", 0.5 }
+ }.ToJsonString();
+
+ string generatedText = "";
+ try
+ {
+ InvokeModelResponse response = await client.InvokeModelAsync(new InvokeModelRequest()
+ {
+ ModelId = jurassic2ModelId,
+ Body = AWSSDKUtils.GenerateMemoryStreamFromString(payload),
+ ContentType = "application/json",
+ Accept = "application/json"
+ });
+
+ if (response.HttpStatusCode == System.Net.HttpStatusCode.OK)
+ {
+ return JsonNode.ParseAsync(response.Body)
+ .Result?["completions"]?
+ .AsArray()[0]?["data"]?
+ .AsObject()["text"]?.GetValue() ?? "";
+ }
+ else
+ {
+ Console.WriteLine("InvokeModelAsync failed with status code " + response.HttpStatusCode);
+ }
+ }
+ catch (AmazonBedrockRuntimeException e)
+ {
+ Console.WriteLine(e.Message);
+ }
+ return generatedText;
+ }
+
+ // snippet-end:[BedrockRuntime.dotnetv4.BedrockRuntimeActions.InvokeModelAsync.Jurassic2]
+
+ // snippet-start:[BedrockRuntime.dotnetv4.BedrockRuntimeActions.InvokeModelAsync.Llama2]
+
+ ///
+ /// Asynchronously invokes the Meta Llama 2 Chat model to run an inference based on the provided input.
+ ///
+ /// The prompt that you want Llama 2 to complete.
+ /// The inference response from the model
+ ///
+ /// The different model providers have individual request and response formats.
+ /// For the format, ranges, and default values for Meta Llama 2 Chat, refer to:
+ /// https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-meta.html
+ ///
+ public static async Task InvokeLlama2Async(string prompt)
+ {
+ string llama2ModelId = "meta.llama2-13b-chat-v1";
+
+ AmazonBedrockRuntimeClient client = new(RegionEndpoint.USEast1);
+
+ string payload = new JsonObject()
+ {
+ { "prompt", prompt },
+ { "max_gen_len", 512 },
+ { "temperature", 0.5 },
+ { "top_p", 0.9 }
+ }.ToJsonString();
+
+ string generatedText = "";
+ try
+ {
+ InvokeModelResponse response = await client.InvokeModelAsync(new InvokeModelRequest()
+ {
+ ModelId = llama2ModelId,
+ Body = AWSSDKUtils.GenerateMemoryStreamFromString(payload),
+ ContentType = "application/json",
+ Accept = "application/json"
+ });
+
+ if (response.HttpStatusCode == System.Net.HttpStatusCode.OK)
+ {
+ return JsonNode.ParseAsync(response.Body)
+ .Result?["generation"]?.GetValue() ?? "";
+ }
+ else
+ {
+ Console.WriteLine("InvokeModelAsync failed with status code " + response.HttpStatusCode);
+ }
+ }
+ catch (AmazonBedrockRuntimeException e)
+ {
+ Console.WriteLine(e.Message);
+ }
+ return generatedText;
+ }
+
+ // snippet-end:[BedrockRuntime.dotnetv4.BedrockRuntimeActions.InvokeModelAsync.Llama2]
+
+ // snippet-start:[BedrockRuntime.dotnetv4.BedrockRuntimeActions.InvokeModelAsync.TitanTextG1]
+
+ ///
+ /// Asynchronously invokes the Amazon Titan Text G1 Express model to run an inference based on the provided input.
+ ///
+ /// The prompt that you want Amazon Titan Text G1 Express to complete.
+ /// The inference response from the model
+ ///
+ /// The different model providers have individual request and response formats.
+ /// For the format, ranges, and default values for Amazon Titan Text G1 Express, refer to:
+ /// https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-titan-text.html
+ ///
+ public static async Task InvokeTitanTextG1Async(string prompt)
+ {
+ string titanTextG1ModelId = "amazon.titan-text-express-v1";
+
+ AmazonBedrockRuntimeClient client = new(RegionEndpoint.USEast1);
+
+ string payload = new JsonObject()
+ {
+ { "inputText", prompt },
+ { "textGenerationConfig", new JsonObject()
+ {
+ { "maxTokenCount", 512 },
+ { "temperature", 0f },
+ { "topP", 1f }
+ }
+ }
+ }.ToJsonString();
+
+ string generatedText = "";
+ try
+ {
+ InvokeModelResponse response = await client.InvokeModelAsync(new InvokeModelRequest()
+ {
+ ModelId = titanTextG1ModelId,
+ Body = AWSSDKUtils.GenerateMemoryStreamFromString(payload),
+ ContentType = "application/json",
+ Accept = "application/json"
+ });
+
+ if (response.HttpStatusCode == System.Net.HttpStatusCode.OK)
+ {
+ var results = JsonNode.ParseAsync(response.Body).Result?["results"]?.AsArray();
+
+ return results is null ? "" : string.Join(" ", results.Select(x => x?["outputText"]?.GetValue()));
+ }
+ else
+ {
+ Console.WriteLine("InvokeModelAsync failed with status code " + response.HttpStatusCode);
+ }
+ }
+ catch (AmazonBedrockRuntimeException e)
+ {
+ Console.WriteLine(e.Message);
+ }
+ return generatedText;
+ }
+
+ // snippet-end:[BedrockRuntime.dotnetv4.BedrockRuntimeActions.InvokeModelAsync.TitanTextG1]
+
+ // snippet-start:[BedrockRuntime.dotnetv4.BedrockRuntimeActions.InvokeModelAsync.Mistral7B]
+
+ ///
+ /// Asynchronously invokes the Mistral 7B model to run an inference based on the provided input.
+ ///
+ /// The prompt that you want Mistral 7B to complete.
+ /// The inference response from the model
+ ///
+ /// The different model providers have individual request and response formats.
+ /// For the format, ranges, and default values for Mistral 7B, refer to:
+ /// https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-mistral.html
+ ///
+ public static async Task> InvokeMistral7BAsync(string prompt)
+ {
+ string mistralModelId = "mistral.mistral-7b-instruct-v0:2";
+
+ AmazonBedrockRuntimeClient client = new(RegionEndpoint.USWest2);
+
+ string payload = new JsonObject()
+ {
+ { "prompt", prompt },
+ { "max_tokens", 200 },
+ { "temperature", 0.5 }
+ }.ToJsonString();
+
+ List? generatedText = null;
+ try
+ {
+ InvokeModelResponse response = await client.InvokeModelAsync(new InvokeModelRequest()
+ {
+ ModelId = mistralModelId,
+ Body = AWSSDKUtils.GenerateMemoryStreamFromString(payload),
+ ContentType = "application/json",
+ Accept = "application/json"
+ });
+
+ if (response.HttpStatusCode == System.Net.HttpStatusCode.OK)
+ {
+ var results = JsonNode.ParseAsync(response.Body).Result?["outputs"]?.AsArray();
+
+ generatedText = results?.Select(x => x?["text"]?.GetValue())?.ToList();
+ }
+ else
+ {
+ Console.WriteLine("InvokeModelAsync failed with status code " + response.HttpStatusCode);
+ }
+ }
+ catch (AmazonBedrockRuntimeException e)
+ {
+ Console.WriteLine(e.Message);
+ }
+ return generatedText ?? [];
+ }
+
+ // snippet-end:[BedrockRuntime.dotnetv4.BedrockRuntimeActions.InvokeModelAsync.Mistral7B]
+
+ // snippet-start:[BedrockRuntime.dotnetv4.BedrockRuntimeActions.InvokeModelAsync.Mixtral8x7B]
+
+ ///
+ /// Asynchronously invokes the Mixtral 8x7B model to run an inference based on the provided input.
+ ///
+ /// The prompt that you want Mixtral 8x7B to complete.
+ /// The inference response from the model
+ ///
+ /// The different model providers have individual request and response formats.
+ /// For the format, ranges, and default values for Mixtral 8x7B, refer to:
+ /// https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-mistral.html
+ ///
+ public static async Task> InvokeMixtral8x7BAsync(string prompt)
+ {
+ string mixtralModelId = "mistral.mixtral-8x7b-instruct-v0:1";
+
+ AmazonBedrockRuntimeClient client = new(RegionEndpoint.USWest2);
+
+ string payload = new JsonObject()
+ {
+ { "prompt", prompt },
+ { "max_tokens", 200 },
+ { "temperature", 0.5 }
+ }.ToJsonString();
+
+ List? generatedText = null;
+ try
+ {
+ InvokeModelResponse response = await client.InvokeModelAsync(new InvokeModelRequest()
+ {
+ ModelId = mixtralModelId,
+ Body = AWSSDKUtils.GenerateMemoryStreamFromString(payload),
+ ContentType = "application/json",
+ Accept = "application/json"
+ });
+
+ if (response.HttpStatusCode == System.Net.HttpStatusCode.OK)
+ {
+ var results = JsonNode.ParseAsync(response.Body).Result?["outputs"]?.AsArray();
+
+ generatedText = results?.Select(x => x?["text"]?.GetValue())?.ToList();
+ }
+ else
+ {
+ Console.WriteLine("InvokeModelAsync failed with status code " + response.HttpStatusCode);
+ }
+ }
+ catch (AmazonBedrockRuntimeException e)
+ {
+ Console.WriteLine(e.Message);
+ }
+ return generatedText ?? [];
+ }
+
+ // snippet-end:[BedrockRuntime.dotnetv4.BedrockRuntimeActions.InvokeModelAsync.Mixtral8x7B]
+
+ // snippet-start:[BedrockRuntime.dotnetv4.BedrockRuntimeActions.InvokeModelAsync.TitanImageGeneratorG1]
+
+ ///
+ /// Asynchronously invokes the Amazon Titan Image Generator G1 model to run an inference based on the provided input.
+ ///
+ /// The prompt that describes the image Amazon Titan Image Generator G1 has to generate.
+ /// A base-64 encoded image generated by model
+ ///
+ /// The different model providers have individual request and response formats.
+ /// For the format, ranges, and default values for Amazon Titan Image Generator G1, refer to:
+ /// https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-titan-image.html
+ ///
+ public static async Task InvokeTitanImageGeneratorG1Async(string prompt, int seed)
+ {
+ string titanImageGeneratorG1ModelId = "amazon.titan-image-generator-v1";
+
+ AmazonBedrockRuntimeClient client = new(RegionEndpoint.USEast1);
+
+ string payload = new JsonObject()
+ {
+ { "taskType", "TEXT_IMAGE" },
+ { "textToImageParams", new JsonObject()
+ {
+ { "text", prompt }
+ }
+ },
+ { "imageGenerationConfig", new JsonObject()
+ {
+ { "numberOfImages", 1 },
+ { "quality", "standard" },
+ { "cfgScale", 8.0f },
+ { "height", 512 },
+ { "width", 512 },
+ { "seed", seed }
+ }
+ }
+ }.ToJsonString();
+
+ try
+ {
+ InvokeModelResponse response = await client.InvokeModelAsync(new InvokeModelRequest()
+ {
+ ModelId = titanImageGeneratorG1ModelId,
+ Body = AWSSDKUtils.GenerateMemoryStreamFromString(payload),
+ ContentType = "application/json",
+ Accept = "application/json"
+ });
+
+ if (response.HttpStatusCode == System.Net.HttpStatusCode.OK)
+ {
+ var results = JsonNode.ParseAsync(response.Body).Result?["images"]?.AsArray();
+
+ return results?[0]?.GetValue();
+ }
+ else
+ {
+ Console.WriteLine("InvokeModelAsync failed with status code " + response.HttpStatusCode);
+ }
+ }
+ catch (AmazonBedrockRuntimeException e)
+ {
+ Console.WriteLine(e.Message);
+ }
+ return null;
+ }
+
+ // snippet-end:[BedrockRuntime.dotnetv4.BedrockRuntimeActions.InvokeModelAsync.TitanImageGeneratorG1]
+
+ // snippet-start:[BedrockRuntime.dotnetv4.BedrockRuntimeActions.InvokeModelAsync.StableDiffusionXL]
+
+ ///
+ /// Asynchronously invokes the Stability.ai Stable Diffusion XLmodel to run an inference based on the provided input.
+ ///
+ /// The prompt that describes the image Stability.ai Stable Diffusion XL has to generate.
+ /// A base-64 encoded image generated by model
+ ///
+ /// The different model providers have individual request and response formats.
+ /// For the format, ranges, and default values for Stability.ai Stable Diffusion XL, refer to:
+ /// https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-stability-diffusion.html
+ ///
+ public static async Task InvokeStableDiffusionXLG1Async(string prompt, int seed, string? stylePreset = null)
+ {
+ string stableDiffusionXLModelId = "stability.stable-diffusion-xl";
+
+ AmazonBedrockRuntimeClient client = new(RegionEndpoint.USEast1);
+
+ var jsonPayload = new JsonObject()
+ {
+ { "text_prompts", new JsonArray() {
+ new JsonObject()
+ {
+ { "text", prompt }
+ }
+ }
+ },
+ { "seed", seed }
+ };
+
+ if (!string.IsNullOrEmpty(stylePreset))
+ {
+ jsonPayload.Add("style_preset", stylePreset);
+ }
+
+ string payload = jsonPayload.ToString();
+
+ try
+ {
+ InvokeModelResponse response = await client.InvokeModelAsync(new InvokeModelRequest()
+ {
+ ModelId = stableDiffusionXLModelId,
+ Body = AWSSDKUtils.GenerateMemoryStreamFromString(payload),
+ ContentType = "application/json",
+ Accept = "application/json"
+ });
+
+ if (response.HttpStatusCode == System.Net.HttpStatusCode.OK)
+ {
+ var results = JsonNode.ParseAsync(response.Body).Result?["artifacts"]?.AsArray();
+
+ return results?[0]?["base64"]?.GetValue();
+ }
+ else
+ {
+ Console.WriteLine("InvokeModelAsync failed with status code " + response.HttpStatusCode);
+ }
+ }
+ catch (AmazonBedrockRuntimeException e)
+ {
+ Console.WriteLine(e.Message);
+ }
+ return null;
+ }
+
+ // snippet-end:[BedrockRuntime.dotnetv4.BedrockRuntimeActions.InvokeModelAsync.StableDiffusionXL]
+ }
+}
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/BedrockRuntimeExamples.sln b/dotnetv4/Bedrock-runtime/BedrockRuntimeExamples.sln
new file mode 100644
index 00000000000..86bfe5a13a5
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/BedrockRuntimeExamples.sln
@@ -0,0 +1,243 @@
+
+Microsoft Visual Studio Solution File, Format Version 12.00
+# Visual Studio Version 17
+VisualStudioVersion = 17.8.34330.188
+MinimumVisualStudioVersion = 10.0.40219.1
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Tests", "Tests", "{E2AD9F91-E6D4-412D-A07A-E004042ADF30}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "BedrockRuntimeTests", "Tests\BedrockRuntimeTests.csproj", "{6DF749A5-67E4-4F4E-BA64-A2D3F00E4700}"
+ ProjectSection(ProjectDependencies) = postProject
+ {02823466-F5FF-43A2-B70A-EF3482A0CBDD} = {02823466-F5FF-43A2-B70A-EF3482A0CBDD}
+ {0574B2F4-D4BE-4155-902B-BF3D7CE4804E} = {0574B2F4-D4BE-4155-902B-BF3D7CE4804E}
+ {1E62D4FB-CC59-4F1E-BB22-574CEC08C94B} = {1E62D4FB-CC59-4F1E-BB22-574CEC08C94B}
+ {2A6989CB-B273-4841-BD3E-7B1BBA4DD25F} = {2A6989CB-B273-4841-BD3E-7B1BBA4DD25F}
+ {3D6441FC-0FE8-4D0C-910D-3D9310599C71} = {3D6441FC-0FE8-4D0C-910D-3D9310599C71}
+ {4B5A00D6-B9F1-449F-A9D2-80E860D6BD75} = {4B5A00D6-B9F1-449F-A9D2-80E860D6BD75}
+ {52CDA3F4-F090-4224-978A-5F42388DCF92} = {52CDA3F4-F090-4224-978A-5F42388DCF92}
+ {63984664-8230-40F3-BFF5-7AC4988D7FE7} = {63984664-8230-40F3-BFF5-7AC4988D7FE7}
+ {8D40F644-A2EB-46F0-B0A0-C7B7B563E6BA} = {8D40F644-A2EB-46F0-B0A0-C7B7B563E6BA}
+ {9018AC00-1D39-41DB-9B3F-A0A7FC60AF18} = {9018AC00-1D39-41DB-9B3F-A0A7FC60AF18}
+ {A992A4DD-FEEE-40B6-8EA1-AEA3EF612698} = {A992A4DD-FEEE-40B6-8EA1-AEA3EF612698}
+ {AAE5224C-4947-44C2-8BFB-70AF1F91A6EE} = {AAE5224C-4947-44C2-8BFB-70AF1F91A6EE}
+ {B6924BBB-9993-44C1-BEF9-DEDEA42A12B2} = {B6924BBB-9993-44C1-BEF9-DEDEA42A12B2}
+ {B753CEB9-EA53-4AE1-997E-B7D54A299D58} = {B753CEB9-EA53-4AE1-997E-B7D54A299D58}
+ {BCC66C37-4980-484F-819D-066D2FF2669C} = {BCC66C37-4980-484F-819D-066D2FF2669C}
+ {CDF1A045-0888-418C-8656-2BF5E3348A48} = {CDF1A045-0888-418C-8656-2BF5E3348A48}
+ {D1B0719F-4F84-4DBC-BCAD-E856FB3193D7} = {D1B0719F-4F84-4DBC-BCAD-E856FB3193D7}
+ {D3BA31F5-FF20-4321-9494-3F01439C4F61} = {D3BA31F5-FF20-4321-9494-3F01439C4F61}
+ {D96E9BC2-3143-4F95-835C-5F3AAC414B9C} = {D96E9BC2-3143-4F95-835C-5F3AAC414B9C}
+ {E153B47E-DBBF-4793-A2C8-89792CB8C6EE} = {E153B47E-DBBF-4793-A2C8-89792CB8C6EE}
+ {EFC7D088-EF45-464B-97CD-0BBA486B224A} = {EFC7D088-EF45-464B-97CD-0BBA486B224A}
+ {F6E2F781-D0C6-4912-8E2F-F6C36FDE4785} = {F6E2F781-D0C6-4912-8E2F-F6C36FDE4785}
+ {FCA6CE8F-531C-4C53-A7C1-3E3028A98CC4} = {FCA6CE8F-531C-4C53-A7C1-3E3028A98CC4}
+ EndProjectSection
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Models", "Models", "{41B69207-8F29-41BC-9114-78EE740485C8}"
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Ai21LabsJurassic2", "Ai21LabsJurassic2", "{E88B3BB6-894F-48F7-8D0B-6244F5E2AD29}"
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "AnthropicClaude", "AnthropicClaude", "{8BAC2322-AD3C-484A-B51D-8263BC4E6646}"
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "AmazonTitanText", "AmazonTitanText", "{3F96ECB4-1644-43E8-8643-2CDCF9E679F1}"
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "CohereCommand", "CohereCommand", "{EF45C0B9-ED76-4B7A-A0A7-F102E979B71C}"
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "MetaLlama", "MetaLlama", "{65504C76-7E32-4A12-A42E-BCDA4FE79BC1}"
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Mistral", "Mistral", "{BBB79D3E-5DF2-4FF6-B467-52D0EEB91C4B}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Converse", "Models\Ai21LabsJurassic2\Converse\Converse.csproj", "{E153B47E-DBBF-4793-A2C8-89792CB8C6EE}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Converse", "Models\AmazonTitanText\Converse\Converse.csproj", "{D96E9BC2-3143-4F95-835C-5F3AAC414B9C}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Converse", "Models\AnthropicClaude\Converse\Converse.csproj", "{8D40F644-A2EB-46F0-B0A0-C7B7B563E6BA}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Converse", "Models\CohereCommand\Converse\Converse.csproj", "{CDF1A045-0888-418C-8656-2BF5E3348A48}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Converse", "Models\MetaLlama\Converse\Converse.csproj", "{F6E2F781-D0C6-4912-8E2F-F6C36FDE4785}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Converse", "Models\Mistral\Converse\Converse.csproj", "{9018AC00-1D39-41DB-9B3F-A0A7FC60AF18}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ConverseStream", "Models\AmazonTitanText\ConverseStream\ConverseStream.csproj", "{FCA6CE8F-531C-4C53-A7C1-3E3028A98CC4}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ConverseStream", "Models\AnthropicClaude\ConverseStream\ConverseStream.csproj", "{AAE5224C-4947-44C2-8BFB-70AF1F91A6EE}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ConverseStream", "Models\CohereCommand\ConverseStream\ConverseStream.csproj", "{0574B2F4-D4BE-4155-902B-BF3D7CE4804E}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ConverseStream", "Models\MetaLlama\ConverseStream\ConverseStream.csproj", "{D3BA31F5-FF20-4321-9494-3F01439C4F61}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ConverseStream", "Models\Mistral\ConverseStream\ConverseStream.csproj", "{B6924BBB-9993-44C1-BEF9-DEDEA42A12B2}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "InvokeModel", "Models\Ai21LabsJurassic2\InvokeModel\InvokeModel.csproj", "{A992A4DD-FEEE-40B6-8EA1-AEA3EF612698}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "InvokeModel", "Models\AmazonTitanText\InvokeModel\InvokeModel.csproj", "{3D6441FC-0FE8-4D0C-910D-3D9310599C71}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "InvokeModel", "Models\AnthropicClaude\InvokeModel\InvokeModel.csproj", "{D1B0719F-4F84-4DBC-BCAD-E856FB3193D7}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "InvokeModel", "Models\Mistral\InvokeModel\InvokeModel.csproj", "{1E62D4FB-CC59-4F1E-BB22-574CEC08C94B}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Llama3_InvokeModel", "Models\MetaLlama\Llama3_InvokeModel\Llama3_InvokeModel.csproj", "{B753CEB9-EA53-4AE1-997E-B7D54A299D58}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Command_InvokeModel", "Models\CohereCommand\Command_InvokeModel\Command_InvokeModel.csproj", "{2A6989CB-B273-4841-BD3E-7B1BBA4DD25F}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Command_R_InvokeModel", "Models\CohereCommand\Command_R_InvokeModel\Command_R_InvokeModel.csproj", "{BCC66C37-4980-484F-819D-066D2FF2669C}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "InvokeModelWithResponseStream", "Models\AmazonTitanText\InvokeModelWithResponseStream\InvokeModelWithResponseStream.csproj", "{52CDA3F4-F090-4224-978A-5F42388DCF92}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Command_InvokeModelWithResponseStream", "Models\CohereCommand\Command_InvokeModelWithResponseStream\Command_InvokeModelWithResponseStream.csproj", "{63984664-8230-40F3-BFF5-7AC4988D7FE7}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Command_R_InvokeModelWithResponseStream", "Models\CohereCommand\Command_R_InvokeModelWithResponseStream\Command_R_InvokeModelWithResponseStream.csproj", "{02823466-F5FF-43A2-B70A-EF3482A0CBDD}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Llama3_InvokeModelWithResponseStream", "Models\MetaLlama\Llama3_InvokeModelWithResponseStream\Llama3_InvokeModelWithResponseStream.csproj", "{4B5A00D6-B9F1-449F-A9D2-80E860D6BD75}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "InvokeModelWithResponseStream", "Models\Mistral\InvokeModelWithResponseStream\InvokeModelWithResponseStream.csproj", "{EFC7D088-EF45-464B-97CD-0BBA486B224A}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "InvokeModelWithResponseStream", "Models\AnthropicClaude\InvokeModelWithResponseStream\InvokeModelWithResponseStream.csproj", "{C75F2BBE-7C84-4B01-9836-7279DAE41499}"
+EndProject
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|Any CPU = Debug|Any CPU
+ Release|Any CPU = Release|Any CPU
+ EndGlobalSection
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+ {6DF749A5-67E4-4F4E-BA64-A2D3F00E4700}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {6DF749A5-67E4-4F4E-BA64-A2D3F00E4700}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {6DF749A5-67E4-4F4E-BA64-A2D3F00E4700}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {6DF749A5-67E4-4F4E-BA64-A2D3F00E4700}.Release|Any CPU.Build.0 = Release|Any CPU
+ {E153B47E-DBBF-4793-A2C8-89792CB8C6EE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {E153B47E-DBBF-4793-A2C8-89792CB8C6EE}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {E153B47E-DBBF-4793-A2C8-89792CB8C6EE}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {E153B47E-DBBF-4793-A2C8-89792CB8C6EE}.Release|Any CPU.Build.0 = Release|Any CPU
+ {D96E9BC2-3143-4F95-835C-5F3AAC414B9C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {D96E9BC2-3143-4F95-835C-5F3AAC414B9C}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {D96E9BC2-3143-4F95-835C-5F3AAC414B9C}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {D96E9BC2-3143-4F95-835C-5F3AAC414B9C}.Release|Any CPU.Build.0 = Release|Any CPU
+ {8D40F644-A2EB-46F0-B0A0-C7B7B563E6BA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {8D40F644-A2EB-46F0-B0A0-C7B7B563E6BA}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {8D40F644-A2EB-46F0-B0A0-C7B7B563E6BA}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {8D40F644-A2EB-46F0-B0A0-C7B7B563E6BA}.Release|Any CPU.Build.0 = Release|Any CPU
+ {CDF1A045-0888-418C-8656-2BF5E3348A48}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {CDF1A045-0888-418C-8656-2BF5E3348A48}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {CDF1A045-0888-418C-8656-2BF5E3348A48}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {CDF1A045-0888-418C-8656-2BF5E3348A48}.Release|Any CPU.Build.0 = Release|Any CPU
+ {F6E2F781-D0C6-4912-8E2F-F6C36FDE4785}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {F6E2F781-D0C6-4912-8E2F-F6C36FDE4785}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {F6E2F781-D0C6-4912-8E2F-F6C36FDE4785}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {F6E2F781-D0C6-4912-8E2F-F6C36FDE4785}.Release|Any CPU.Build.0 = Release|Any CPU
+ {9018AC00-1D39-41DB-9B3F-A0A7FC60AF18}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {9018AC00-1D39-41DB-9B3F-A0A7FC60AF18}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {9018AC00-1D39-41DB-9B3F-A0A7FC60AF18}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {9018AC00-1D39-41DB-9B3F-A0A7FC60AF18}.Release|Any CPU.Build.0 = Release|Any CPU
+ {FCA6CE8F-531C-4C53-A7C1-3E3028A98CC4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {FCA6CE8F-531C-4C53-A7C1-3E3028A98CC4}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {FCA6CE8F-531C-4C53-A7C1-3E3028A98CC4}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {FCA6CE8F-531C-4C53-A7C1-3E3028A98CC4}.Release|Any CPU.Build.0 = Release|Any CPU
+ {AAE5224C-4947-44C2-8BFB-70AF1F91A6EE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {AAE5224C-4947-44C2-8BFB-70AF1F91A6EE}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {AAE5224C-4947-44C2-8BFB-70AF1F91A6EE}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {AAE5224C-4947-44C2-8BFB-70AF1F91A6EE}.Release|Any CPU.Build.0 = Release|Any CPU
+ {0574B2F4-D4BE-4155-902B-BF3D7CE4804E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {0574B2F4-D4BE-4155-902B-BF3D7CE4804E}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {0574B2F4-D4BE-4155-902B-BF3D7CE4804E}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {0574B2F4-D4BE-4155-902B-BF3D7CE4804E}.Release|Any CPU.Build.0 = Release|Any CPU
+ {D3BA31F5-FF20-4321-9494-3F01439C4F61}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {D3BA31F5-FF20-4321-9494-3F01439C4F61}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {D3BA31F5-FF20-4321-9494-3F01439C4F61}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {D3BA31F5-FF20-4321-9494-3F01439C4F61}.Release|Any CPU.Build.0 = Release|Any CPU
+ {B6924BBB-9993-44C1-BEF9-DEDEA42A12B2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {B6924BBB-9993-44C1-BEF9-DEDEA42A12B2}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {B6924BBB-9993-44C1-BEF9-DEDEA42A12B2}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {B6924BBB-9993-44C1-BEF9-DEDEA42A12B2}.Release|Any CPU.Build.0 = Release|Any CPU
+ {A992A4DD-FEEE-40B6-8EA1-AEA3EF612698}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {A992A4DD-FEEE-40B6-8EA1-AEA3EF612698}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {A992A4DD-FEEE-40B6-8EA1-AEA3EF612698}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {A992A4DD-FEEE-40B6-8EA1-AEA3EF612698}.Release|Any CPU.Build.0 = Release|Any CPU
+ {3D6441FC-0FE8-4D0C-910D-3D9310599C71}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {3D6441FC-0FE8-4D0C-910D-3D9310599C71}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {3D6441FC-0FE8-4D0C-910D-3D9310599C71}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {3D6441FC-0FE8-4D0C-910D-3D9310599C71}.Release|Any CPU.Build.0 = Release|Any CPU
+ {D1B0719F-4F84-4DBC-BCAD-E856FB3193D7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {D1B0719F-4F84-4DBC-BCAD-E856FB3193D7}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {D1B0719F-4F84-4DBC-BCAD-E856FB3193D7}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {D1B0719F-4F84-4DBC-BCAD-E856FB3193D7}.Release|Any CPU.Build.0 = Release|Any CPU
+ {1E62D4FB-CC59-4F1E-BB22-574CEC08C94B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {1E62D4FB-CC59-4F1E-BB22-574CEC08C94B}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {1E62D4FB-CC59-4F1E-BB22-574CEC08C94B}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {1E62D4FB-CC59-4F1E-BB22-574CEC08C94B}.Release|Any CPU.Build.0 = Release|Any CPU
+ {B753CEB9-EA53-4AE1-997E-B7D54A299D58}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {B753CEB9-EA53-4AE1-997E-B7D54A299D58}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {B753CEB9-EA53-4AE1-997E-B7D54A299D58}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {B753CEB9-EA53-4AE1-997E-B7D54A299D58}.Release|Any CPU.Build.0 = Release|Any CPU
+ {2A6989CB-B273-4841-BD3E-7B1BBA4DD25F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {2A6989CB-B273-4841-BD3E-7B1BBA4DD25F}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {2A6989CB-B273-4841-BD3E-7B1BBA4DD25F}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {2A6989CB-B273-4841-BD3E-7B1BBA4DD25F}.Release|Any CPU.Build.0 = Release|Any CPU
+ {BCC66C37-4980-484F-819D-066D2FF2669C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {BCC66C37-4980-484F-819D-066D2FF2669C}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {BCC66C37-4980-484F-819D-066D2FF2669C}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {BCC66C37-4980-484F-819D-066D2FF2669C}.Release|Any CPU.Build.0 = Release|Any CPU
+ {52CDA3F4-F090-4224-978A-5F42388DCF92}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {52CDA3F4-F090-4224-978A-5F42388DCF92}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {52CDA3F4-F090-4224-978A-5F42388DCF92}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {52CDA3F4-F090-4224-978A-5F42388DCF92}.Release|Any CPU.Build.0 = Release|Any CPU
+ {63984664-8230-40F3-BFF5-7AC4988D7FE7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {63984664-8230-40F3-BFF5-7AC4988D7FE7}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {63984664-8230-40F3-BFF5-7AC4988D7FE7}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {63984664-8230-40F3-BFF5-7AC4988D7FE7}.Release|Any CPU.Build.0 = Release|Any CPU
+ {02823466-F5FF-43A2-B70A-EF3482A0CBDD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {02823466-F5FF-43A2-B70A-EF3482A0CBDD}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {02823466-F5FF-43A2-B70A-EF3482A0CBDD}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {02823466-F5FF-43A2-B70A-EF3482A0CBDD}.Release|Any CPU.Build.0 = Release|Any CPU
+ {4B5A00D6-B9F1-449F-A9D2-80E860D6BD75}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {4B5A00D6-B9F1-449F-A9D2-80E860D6BD75}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {4B5A00D6-B9F1-449F-A9D2-80E860D6BD75}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {4B5A00D6-B9F1-449F-A9D2-80E860D6BD75}.Release|Any CPU.Build.0 = Release|Any CPU
+ {EFC7D088-EF45-464B-97CD-0BBA486B224A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {EFC7D088-EF45-464B-97CD-0BBA486B224A}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {EFC7D088-EF45-464B-97CD-0BBA486B224A}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {EFC7D088-EF45-464B-97CD-0BBA486B224A}.Release|Any CPU.Build.0 = Release|Any CPU
+ {C75F2BBE-7C84-4B01-9836-7279DAE41499}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {C75F2BBE-7C84-4B01-9836-7279DAE41499}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {C75F2BBE-7C84-4B01-9836-7279DAE41499}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {C75F2BBE-7C84-4B01-9836-7279DAE41499}.Release|Any CPU.Build.0 = Release|Any CPU
+ EndGlobalSection
+ GlobalSection(SolutionProperties) = preSolution
+ HideSolutionNode = FALSE
+ EndGlobalSection
+ GlobalSection(NestedProjects) = preSolution
+ {6DF749A5-67E4-4F4E-BA64-A2D3F00E4700} = {E2AD9F91-E6D4-412D-A07A-E004042ADF30}
+ {E88B3BB6-894F-48F7-8D0B-6244F5E2AD29} = {41B69207-8F29-41BC-9114-78EE740485C8}
+ {8BAC2322-AD3C-484A-B51D-8263BC4E6646} = {41B69207-8F29-41BC-9114-78EE740485C8}
+ {3F96ECB4-1644-43E8-8643-2CDCF9E679F1} = {41B69207-8F29-41BC-9114-78EE740485C8}
+ {EF45C0B9-ED76-4B7A-A0A7-F102E979B71C} = {41B69207-8F29-41BC-9114-78EE740485C8}
+ {65504C76-7E32-4A12-A42E-BCDA4FE79BC1} = {41B69207-8F29-41BC-9114-78EE740485C8}
+ {BBB79D3E-5DF2-4FF6-B467-52D0EEB91C4B} = {41B69207-8F29-41BC-9114-78EE740485C8}
+ {E153B47E-DBBF-4793-A2C8-89792CB8C6EE} = {E88B3BB6-894F-48F7-8D0B-6244F5E2AD29}
+ {D96E9BC2-3143-4F95-835C-5F3AAC414B9C} = {3F96ECB4-1644-43E8-8643-2CDCF9E679F1}
+ {8D40F644-A2EB-46F0-B0A0-C7B7B563E6BA} = {8BAC2322-AD3C-484A-B51D-8263BC4E6646}
+ {CDF1A045-0888-418C-8656-2BF5E3348A48} = {EF45C0B9-ED76-4B7A-A0A7-F102E979B71C}
+ {F6E2F781-D0C6-4912-8E2F-F6C36FDE4785} = {65504C76-7E32-4A12-A42E-BCDA4FE79BC1}
+ {9018AC00-1D39-41DB-9B3F-A0A7FC60AF18} = {BBB79D3E-5DF2-4FF6-B467-52D0EEB91C4B}
+ {FCA6CE8F-531C-4C53-A7C1-3E3028A98CC4} = {3F96ECB4-1644-43E8-8643-2CDCF9E679F1}
+ {AAE5224C-4947-44C2-8BFB-70AF1F91A6EE} = {8BAC2322-AD3C-484A-B51D-8263BC4E6646}
+ {0574B2F4-D4BE-4155-902B-BF3D7CE4804E} = {EF45C0B9-ED76-4B7A-A0A7-F102E979B71C}
+ {D3BA31F5-FF20-4321-9494-3F01439C4F61} = {65504C76-7E32-4A12-A42E-BCDA4FE79BC1}
+ {B6924BBB-9993-44C1-BEF9-DEDEA42A12B2} = {BBB79D3E-5DF2-4FF6-B467-52D0EEB91C4B}
+ {A992A4DD-FEEE-40B6-8EA1-AEA3EF612698} = {E88B3BB6-894F-48F7-8D0B-6244F5E2AD29}
+ {3D6441FC-0FE8-4D0C-910D-3D9310599C71} = {3F96ECB4-1644-43E8-8643-2CDCF9E679F1}
+ {D1B0719F-4F84-4DBC-BCAD-E856FB3193D7} = {8BAC2322-AD3C-484A-B51D-8263BC4E6646}
+ {1E62D4FB-CC59-4F1E-BB22-574CEC08C94B} = {BBB79D3E-5DF2-4FF6-B467-52D0EEB91C4B}
+ {B753CEB9-EA53-4AE1-997E-B7D54A299D58} = {65504C76-7E32-4A12-A42E-BCDA4FE79BC1}
+ {2A6989CB-B273-4841-BD3E-7B1BBA4DD25F} = {EF45C0B9-ED76-4B7A-A0A7-F102E979B71C}
+ {BCC66C37-4980-484F-819D-066D2FF2669C} = {EF45C0B9-ED76-4B7A-A0A7-F102E979B71C}
+ {52CDA3F4-F090-4224-978A-5F42388DCF92} = {3F96ECB4-1644-43E8-8643-2CDCF9E679F1}
+ {63984664-8230-40F3-BFF5-7AC4988D7FE7} = {EF45C0B9-ED76-4B7A-A0A7-F102E979B71C}
+ {02823466-F5FF-43A2-B70A-EF3482A0CBDD} = {EF45C0B9-ED76-4B7A-A0A7-F102E979B71C}
+ {4B5A00D6-B9F1-449F-A9D2-80E860D6BD75} = {65504C76-7E32-4A12-A42E-BCDA4FE79BC1}
+ {EFC7D088-EF45-464B-97CD-0BBA486B224A} = {BBB79D3E-5DF2-4FF6-B467-52D0EEB91C4B}
+ {C75F2BBE-7C84-4B01-9836-7279DAE41499} = {8BAC2322-AD3C-484A-B51D-8263BC4E6646}
+ EndGlobalSection
+ GlobalSection(ExtensibilityGlobals) = postSolution
+ SolutionGuid = {E48A5088-1BBB-4A8B-9AB2-CC5CE0482466}
+ EndGlobalSection
+EndGlobal
diff --git a/dotnetv4/Bedrock-runtime/Models/Ai21LabsJurassic2/Converse/Converse.cs b/dotnetv4/Bedrock-runtime/Models/Ai21LabsJurassic2/Converse/Converse.cs
new file mode 100644
index 00000000000..ecdb3f88f0e
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/Ai21LabsJurassic2/Converse/Converse.cs
@@ -0,0 +1,60 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+// snippet-start:[BedrockRuntime.dotnetv4.Converse_Ai21LabsJurassic2]
+// Use the Converse API to send a text message to AI21 Labs Jurassic-2.
+
+using System;
+using System.Collections.Generic;
+using Amazon;
+using Amazon.BedrockRuntime;
+using Amazon.BedrockRuntime.Model;
+
+// Create a Bedrock Runtime client in the AWS Region you want to use.
+var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1);
+
+// Set the model ID, e.g., Jurassic-2 Mid.
+var modelId = "ai21.j2-mid-v1";
+
+// Define the user message.
+var userMessage = "Describe the purpose of a 'hello world' program in one line.";
+
+// Create a request with the model ID, the user message, and an inference configuration.
+var request = new ConverseRequest
+{
+ ModelId = modelId,
+ Messages = new List
+ {
+ new Message
+ {
+ Role = ConversationRole.User,
+ Content = new List { new ContentBlock { Text = userMessage } }
+ }
+ },
+ InferenceConfig = new InferenceConfiguration()
+ {
+ MaxTokens = 512,
+ Temperature = 0.5F,
+ TopP = 0.9F
+ }
+};
+
+try
+{
+ // Send the request to the Bedrock Runtime and wait for the result.
+ var response = await client.ConverseAsync(request);
+
+ // Extract and print the response text.
+ string responseText = response?.Output?.Message?.Content?[0]?.Text ?? "";
+ Console.WriteLine(responseText);
+}
+catch (AmazonBedrockRuntimeException e)
+{
+ Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}");
+ throw;
+}
+
+// snippet-end:[BedrockRuntime.dotnetv4.Converse_Ai21LabsJurassic2]
+
+// Create a partial class to make the top-level script testable.
+namespace Ai21LabsJurassic2 { public partial class Converse { } }
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Models/Ai21LabsJurassic2/Converse/Converse.csproj b/dotnetv4/Bedrock-runtime/Models/Ai21LabsJurassic2/Converse/Converse.csproj
new file mode 100644
index 00000000000..b56610d48bd
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/Ai21LabsJurassic2/Converse/Converse.csproj
@@ -0,0 +1,12 @@
+
+
+ Exe
+ net8.0
+ Ai21LabsJurassic2.$(MSBuildProjectName)
+
+
+
+
+
+
+
diff --git a/dotnetv4/Bedrock-runtime/Models/Ai21LabsJurassic2/InvokeModel/InvokeModel.cs b/dotnetv4/Bedrock-runtime/Models/Ai21LabsJurassic2/InvokeModel/InvokeModel.cs
new file mode 100644
index 00000000000..9e323cd8312
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/Ai21LabsJurassic2/InvokeModel/InvokeModel.cs
@@ -0,0 +1,61 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+// snippet-start:[BedrockRuntime.dotnetv4.InvokeModel_Ai21LabsJurassic2]
+// Use the native inference API to send a text message to AI21 Labs Jurassic-2.
+
+using System;
+using System.IO;
+using System.Text.Json;
+using System.Text.Json.Nodes;
+using Amazon;
+using Amazon.BedrockRuntime;
+using Amazon.BedrockRuntime.Model;
+
+// Create a Bedrock Runtime client in the AWS Region you want to use.
+var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1);
+
+// Set the model ID, e.g., Jurassic-2 Mid.
+var modelId = "ai21.j2-mid-v1";
+
+// Define the user message.
+var userMessage = "Describe the purpose of a 'hello world' program in one line.";
+
+//Format the request payload using the model's native structure.
+var nativeRequest = JsonSerializer.Serialize(new
+{
+ prompt = userMessage,
+ maxTokens = 512,
+ temperature = 0.5
+});
+
+// Create a request with the model ID and the model's native request payload.
+var request = new InvokeModelRequest()
+{
+ ModelId = modelId,
+ Body = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(nativeRequest)),
+ ContentType = "application/json"
+};
+
+try
+{
+ // Send the request to the Bedrock Runtime and wait for the response.
+ var response = await client.InvokeModelAsync(request);
+
+ // Decode the response body.
+ var modelResponse = await JsonNode.ParseAsync(response.Body);
+
+ // Extract and print the response text.
+ var responseText = modelResponse["completions"]?[0]?["data"]?["text"] ?? "";
+ Console.WriteLine(responseText);
+}
+catch (AmazonBedrockRuntimeException e)
+{
+ Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}");
+ throw;
+}
+
+// snippet-end:[BedrockRuntime.dotnetv4.InvokeModel_Ai21LabsJurassic2]
+
+// Create a partial class to make the top-level script testable.
+namespace Ai21LabsJurassic2 { public partial class InvokeModel { } }
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Models/Ai21LabsJurassic2/InvokeModel/InvokeModel.csproj b/dotnetv4/Bedrock-runtime/Models/Ai21LabsJurassic2/InvokeModel/InvokeModel.csproj
new file mode 100644
index 00000000000..b224d8131fb
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/Ai21LabsJurassic2/InvokeModel/InvokeModel.csproj
@@ -0,0 +1,12 @@
+
+
+ Exe
+ net8.0
+ Ai21LabsJurassic2.$(MSBuildProjectName)
+
+
+
+
+
+
+
diff --git a/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/Converse/Converse.cs b/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/Converse/Converse.cs
new file mode 100644
index 00000000000..0986193cb91
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/Converse/Converse.cs
@@ -0,0 +1,60 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+// snippet-start:[BedrockRuntime.dotnetv4.Converse_AmazonTitanText]
+// Use the Converse API to send a text message to Amazon Titan Text.
+
+using System;
+using System.Collections.Generic;
+using Amazon;
+using Amazon.BedrockRuntime;
+using Amazon.BedrockRuntime.Model;
+
+// Create a Bedrock Runtime client in the AWS Region you want to use.
+var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1);
+
+// Set the model ID, e.g., Titan Text Premier.
+var modelId = "amazon.titan-text-premier-v1:0";
+
+// Define the user message.
+var userMessage = "Describe the purpose of a 'hello world' program in one line.";
+
+// Create a request with the model ID, the user message, and an inference configuration.
+var request = new ConverseRequest
+{
+ ModelId = modelId,
+ Messages = new List
+ {
+ new Message
+ {
+ Role = ConversationRole.User,
+ Content = new List { new ContentBlock { Text = userMessage } }
+ }
+ },
+ InferenceConfig = new InferenceConfiguration()
+ {
+ MaxTokens = 512,
+ Temperature = 0.5F,
+ TopP = 0.9F
+ }
+};
+
+try
+{
+ // Send the request to the Bedrock Runtime and wait for the result.
+ var response = await client.ConverseAsync(request);
+
+ // Extract and print the response text.
+ string responseText = response?.Output?.Message?.Content?[0]?.Text ?? "";
+ Console.WriteLine(responseText);
+}
+catch (AmazonBedrockRuntimeException e)
+{
+ Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}");
+ throw;
+}
+
+// snippet-end:[BedrockRuntime.dotnetv4.Converse_AmazonTitanText]
+
+// Create a partial class to make the top-level script testable.
+namespace AmazonTitanText { public partial class Converse { } }
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/Converse/Converse.csproj b/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/Converse/Converse.csproj
new file mode 100644
index 00000000000..54fc07f593e
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/Converse/Converse.csproj
@@ -0,0 +1,12 @@
+
+
+ Exe
+ net8.0
+ AmazonTitanText.$(MSBuildProjectName)
+
+
+
+
+
+
+
diff --git a/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/ConverseStream/ConverseStream.cs b/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/ConverseStream/ConverseStream.cs
new file mode 100644
index 00000000000..80ab82e5b05
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/ConverseStream/ConverseStream.cs
@@ -0,0 +1,67 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+// snippet-start:[BedrockRuntime.dotnetv4.ConverseStream_AmazonTitanText]
+// Use the Converse API to send a text message to Amazon Titan Text
+// and print the response stream.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using Amazon;
+using Amazon.BedrockRuntime;
+using Amazon.BedrockRuntime.Model;
+
+// Create a Bedrock Runtime client in the AWS Region you want to use.
+var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1);
+
+// Set the model ID, e.g., Titan Text Premier.
+var modelId = "amazon.titan-text-premier-v1:0";
+
+// Define the user message.
+var userMessage = "Describe the purpose of a 'hello world' program in one line.";
+
+// Create a request with the model ID, the user message, and an inference configuration.
+var request = new ConverseStreamRequest
+{
+ ModelId = modelId,
+ Messages = new List
+ {
+ new Message
+ {
+ Role = ConversationRole.User,
+ Content = new List { new ContentBlock { Text = userMessage } }
+ }
+ },
+ InferenceConfig = new InferenceConfiguration()
+ {
+ MaxTokens = 512,
+ Temperature = 0.5F,
+ TopP = 0.9F
+ }
+};
+
+try
+{
+ // Send the request to the Bedrock Runtime and wait for the result.
+ var response = await client.ConverseStreamAsync(request);
+
+ // Extract and print the streamed response text in real-time.
+ foreach (var chunk in response.Stream.AsEnumerable())
+ {
+ if (chunk is ContentBlockDeltaEvent)
+ {
+ Console.Write((chunk as ContentBlockDeltaEvent).Delta.Text);
+ }
+ }
+}
+catch (AmazonBedrockRuntimeException e)
+{
+ Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}");
+ throw;
+}
+
+// snippet-end:[BedrockRuntime.dotnetv4.ConverseStream_AmazonTitanText]
+
+// Create a partial class to make the top-level script testable.
+namespace AmazonTitanText { public partial class ConverseStream { } }
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/ConverseStream/ConverseStream.csproj b/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/ConverseStream/ConverseStream.csproj
new file mode 100644
index 00000000000..6b33c77774e
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/ConverseStream/ConverseStream.csproj
@@ -0,0 +1,12 @@
+
+
+ Exe
+ net8.0
+ AmazonTitanText.$(MSBuildProjectName)
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/InvokeModel/InvokeModel.cs b/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/InvokeModel/InvokeModel.cs
new file mode 100644
index 00000000000..d5c19059944
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/InvokeModel/InvokeModel.cs
@@ -0,0 +1,64 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+// snippet-start:[BedrockRuntime.dotnetv4.InvokeModel_AmazonTitanText]
+// Use the native inference API to send a text message to Amazon Titan Text.
+
+using System;
+using System.IO;
+using System.Text.Json;
+using System.Text.Json.Nodes;
+using Amazon;
+using Amazon.BedrockRuntime;
+using Amazon.BedrockRuntime.Model;
+
+// Create a Bedrock Runtime client in the AWS Region you want to use.
+var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1);
+
+// Set the model ID, e.g., Titan Text Premier.
+var modelId = "amazon.titan-text-premier-v1:0";
+
+// Define the user message.
+var userMessage = "Describe the purpose of a 'hello world' program in one line.";
+
+//Format the request payload using the model's native structure.
+var nativeRequest = JsonSerializer.Serialize(new
+{
+ inputText = userMessage,
+ textGenerationConfig = new
+ {
+ maxTokenCount = 512,
+ temperature = 0.5
+ }
+});
+
+// Create a request with the model ID and the model's native request payload.
+var request = new InvokeModelRequest()
+{
+ ModelId = modelId,
+ Body = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(nativeRequest)),
+ ContentType = "application/json"
+};
+
+try
+{
+ // Send the request to the Bedrock Runtime and wait for the response.
+ var response = await client.InvokeModelAsync(request);
+
+ // Decode the response body.
+ var modelResponse = await JsonNode.ParseAsync(response.Body);
+
+ // Extract and print the response text.
+ var responseText = modelResponse["results"]?[0]?["outputText"] ?? "";
+ Console.WriteLine(responseText);
+}
+catch (AmazonBedrockRuntimeException e)
+{
+ Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}");
+ throw;
+}
+
+// snippet-end:[BedrockRuntime.dotnetv4.InvokeModel_AmazonTitanText]
+
+// Create a partial class to make the top-level script testable.
+namespace AmazonTitanText { public partial class InvokeModel { } }
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/InvokeModel/InvokeModel.csproj b/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/InvokeModel/InvokeModel.csproj
new file mode 100644
index 00000000000..6b33c77774e
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/InvokeModel/InvokeModel.csproj
@@ -0,0 +1,12 @@
+
+
+ Exe
+ net8.0
+ AmazonTitanText.$(MSBuildProjectName)
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs b/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs
new file mode 100644
index 00000000000..9aa49c27ba8
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs
@@ -0,0 +1,66 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+// snippet-start:[BedrockRuntime.dotnetv4.InvokeModelWithResponseStream_AmazonTitanText]
+// Use the native inference API to send a text message to Amazon Titan Text
+// and print the response stream.
+
+using System;
+using System.IO;
+using System.Text.Json;
+using System.Text.Json.Nodes;
+using Amazon;
+using Amazon.BedrockRuntime;
+using Amazon.BedrockRuntime.Model;
+
+// Create a Bedrock Runtime client in the AWS Region you want to use.
+var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1);
+
+// Set the model ID, e.g., Titan Text Premier.
+var modelId = "amazon.titan-text-premier-v1:0";
+
+// Define the user message.
+var userMessage = "Describe the purpose of a 'hello world' program in one line.";
+
+//Format the request payload using the model's native structure.
+var nativeRequest = JsonSerializer.Serialize(new
+{
+ inputText = userMessage,
+ textGenerationConfig = new
+ {
+ maxTokenCount = 512,
+ temperature = 0.5
+ }
+});
+
+// Create a request with the model ID and the model's native request payload.
+var request = new InvokeModelWithResponseStreamRequest()
+{
+ ModelId = modelId,
+ Body = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(nativeRequest)),
+ ContentType = "application/json"
+};
+
+try
+{
+ // Send the request to the Bedrock Runtime and wait for the response.
+ var streamingResponse = await client.InvokeModelWithResponseStreamAsync(request);
+
+ // Extract and print the streamed response text in real-time.
+ foreach (var item in streamingResponse.Body)
+ {
+ var chunk = JsonSerializer.Deserialize((item as PayloadPart).Bytes);
+ var text = chunk["outputText"] ?? "";
+ Console.Write(text);
+ }
+}
+catch (AmazonBedrockRuntimeException e)
+{
+ Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}");
+ throw;
+}
+
+// snippet-end:[BedrockRuntime.dotnetv4.InvokeModelWithResponseStream_AmazonTitanText]
+
+// Create a partial class to make the top-level script testable.
+namespace AmazonTitanText { public partial class InvokeModelWithResponseStream { } }
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj b/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj
new file mode 100644
index 00000000000..6b33c77774e
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/AmazonTitanText/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj
@@ -0,0 +1,12 @@
+
+
+ Exe
+ net8.0
+ AmazonTitanText.$(MSBuildProjectName)
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/Converse/Converse.cs b/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/Converse/Converse.cs
new file mode 100644
index 00000000000..51abb3a564a
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/Converse/Converse.cs
@@ -0,0 +1,63 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+// snippet-start:[BedrockRuntime.dotnetv4.Converse_AnthropicClaude]
+// Use the Converse API to send a text message to Anthropic Claude.
+
+using System;
+using System.Collections.Generic;
+using Amazon;
+using Amazon.BedrockRuntime;
+using Amazon.BedrockRuntime.Model;
+
+// Create a Bedrock Runtime client in the AWS Region you want to use.
+var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1);
+
+// Set the model ID, e.g., Claude 3 Haiku.
+var modelId = "anthropic.claude-3-haiku-20240307-v1:0";
+
+// Define the user message.
+var userMessage = "Describe the purpose of a 'hello world' program in one line.";
+
+// Create a request with the model ID, the user message, and an inference configuration.
+var request = new ConverseRequest
+{
+ ModelId = modelId,
+ Messages = new List
+ {
+ new Message
+ {
+ Role = ConversationRole.User,
+ Content = new List { new ContentBlock { Text = userMessage } }
+ }
+ },
+ InferenceConfig = new InferenceConfiguration()
+ {
+ MaxTokens = 512,
+ Temperature = 0.5F,
+ TopP = 0.9F
+ }
+};
+
+try
+{
+ // Send the request to the Bedrock Runtime and wait for the result.
+ var response = await client.ConverseAsync(request);
+
+ // Extract and print the response text.
+ string responseText = response?.Output?.Message?.Content?[0]?.Text ?? "";
+ Console.WriteLine(responseText);
+}
+catch (AmazonBedrockRuntimeException e)
+{
+ Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}");
+ throw;
+}
+
+// snippet-end:[BedrockRuntime.dotnetv4.Converse_AnthropicClaude]
+
+// Create a partial class to make the top-level script testable.
+namespace AnthropicClaude { public partial class Converse { } }
+
+// Create a partial class to make the top-level script testable.
+namespace AnthropicClaude { public partial class Converse { } }
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/Converse/Converse.csproj b/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/Converse/Converse.csproj
new file mode 100644
index 00000000000..8da69b86bc4
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/Converse/Converse.csproj
@@ -0,0 +1,12 @@
+
+
+ Exe
+ net8.0
+ AnthropicClaude.$(MSBuildProjectName)
+
+
+
+
+
+
+
diff --git a/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/ConverseStream/ConverseStream.cs b/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/ConverseStream/ConverseStream.cs
new file mode 100644
index 00000000000..64f7dae9241
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/ConverseStream/ConverseStream.cs
@@ -0,0 +1,67 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+// snippet-start:[BedrockRuntime.dotnetv4.ConverseStream_AnthropicClaude]
+// Use the Converse API to send a text message to Anthropic Claude
+// and print the response stream.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using Amazon;
+using Amazon.BedrockRuntime;
+using Amazon.BedrockRuntime.Model;
+
+// Create a Bedrock Runtime client in the AWS Region you want to use.
+var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1);
+
+// Set the model ID, e.g., Claude 3 Haiku.
+var modelId = "anthropic.claude-3-haiku-20240307-v1:0";
+
+// Define the user message.
+var userMessage = "Describe the purpose of a 'hello world' program in one line.";
+
+// Create a request with the model ID, the user message, and an inference configuration.
+var request = new ConverseStreamRequest
+{
+ ModelId = modelId,
+ Messages = new List
+ {
+ new Message
+ {
+ Role = ConversationRole.User,
+ Content = new List { new ContentBlock { Text = userMessage } }
+ }
+ },
+ InferenceConfig = new InferenceConfiguration()
+ {
+ MaxTokens = 512,
+ Temperature = 0.5F,
+ TopP = 0.9F
+ }
+};
+
+try
+{
+ // Send the request to the Bedrock Runtime and wait for the result.
+ var response = await client.ConverseStreamAsync(request);
+
+ // Extract and print the streamed response text in real-time.
+ foreach (var chunk in response.Stream.AsEnumerable())
+ {
+ if (chunk is ContentBlockDeltaEvent)
+ {
+ Console.Write((chunk as ContentBlockDeltaEvent).Delta.Text);
+ }
+ }
+}
+catch (AmazonBedrockRuntimeException e)
+{
+ Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}");
+ throw;
+}
+
+// snippet-end:[BedrockRuntime.dotnetv4.ConverseStream_AnthropicClaude]
+
+// Create a partial class to make the top-level script testable.
+namespace AnthropicClaude { public partial class ConverseStream { } }
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/ConverseStream/ConverseStream.csproj b/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/ConverseStream/ConverseStream.csproj
new file mode 100644
index 00000000000..81ab5d08f39
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/ConverseStream/ConverseStream.csproj
@@ -0,0 +1,12 @@
+
+
+ Exe
+ net8.0
+ AnthropicClaude.$(MSBuildProjectName)
+
+
+
+
+
+
+
diff --git a/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/InvokeModel/InvokeModel.cs b/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/InvokeModel/InvokeModel.cs
new file mode 100644
index 00000000000..808267aa4cc
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/InvokeModel/InvokeModel.cs
@@ -0,0 +1,65 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+// snippet-start:[BedrockRuntime.dotnetv4.InvokeModel_AnthropicClaude]
+// Use the native inference API to send a text message to Anthropic Claude.
+
+using System;
+using System.IO;
+using System.Text.Json;
+using System.Text.Json.Nodes;
+using Amazon;
+using Amazon.BedrockRuntime;
+using Amazon.BedrockRuntime.Model;
+
+// Create a Bedrock Runtime client in the AWS Region you want to use.
+var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1);
+
+// Set the model ID, e.g., Claude 3 Haiku.
+var modelId = "anthropic.claude-3-haiku-20240307-v1:0";
+
+// Define the user message.
+var userMessage = "Describe the purpose of a 'hello world' program in one line.";
+
+//Format the request payload using the model's native structure.
+var nativeRequest = JsonSerializer.Serialize(new
+{
+ anthropic_version = "bedrock-2023-05-31",
+ max_tokens = 512,
+ temperature = 0.5,
+ messages = new[]
+ {
+ new { role = "user", content = userMessage }
+ }
+});
+
+// Create a request with the model ID and the model's native request payload.
+var request = new InvokeModelRequest()
+{
+ ModelId = modelId,
+ Body = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(nativeRequest)),
+ ContentType = "application/json"
+};
+
+try
+{
+ // Send the request to the Bedrock Runtime and wait for the response.
+ var response = await client.InvokeModelAsync(request);
+
+ // Decode the response body.
+ var modelResponse = await JsonNode.ParseAsync(response.Body);
+
+ // Extract and print the response text.
+ var responseText = modelResponse["content"]?[0]?["text"] ?? "";
+ Console.WriteLine(responseText);
+}
+catch (AmazonBedrockRuntimeException e)
+{
+ Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}");
+ throw;
+}
+
+// snippet-end:[BedrockRuntime.dotnetv4.InvokeModel_AnthropicClaude]
+
+// Create a partial class to make the top-level script testable.
+namespace AnthropicClaude { public partial class InvokeModel { } }
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/InvokeModel/InvokeModel.csproj b/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/InvokeModel/InvokeModel.csproj
new file mode 100644
index 00000000000..81ab5d08f39
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/InvokeModel/InvokeModel.csproj
@@ -0,0 +1,12 @@
+
+
+ Exe
+ net8.0
+ AnthropicClaude.$(MSBuildProjectName)
+
+
+
+
+
+
+
diff --git a/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs b/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs
new file mode 100644
index 00000000000..64143af3888
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs
@@ -0,0 +1,67 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+// snippet-start:[BedrockRuntime.dotnetv4.InvokeModelWithResponseStream_AnthropicClaude]
+// Use the native inference API to send a text message to Anthropic Claude
+// and print the response stream.
+
+using System;
+using System.IO;
+using System.Text.Json;
+using System.Text.Json.Nodes;
+using Amazon;
+using Amazon.BedrockRuntime;
+using Amazon.BedrockRuntime.Model;
+
+// Create a Bedrock Runtime client in the AWS Region you want to use.
+var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1);
+
+// Set the model ID, e.g., Claude 3 Haiku.
+var modelId = "anthropic.claude-3-haiku-20240307-v1:0";
+
+// Define the user message.
+var userMessage = "Describe the purpose of a 'hello world' program in one line.";
+
+//Format the request payload using the model's native structure.
+var nativeRequest = JsonSerializer.Serialize(new
+{
+ anthropic_version = "bedrock-2023-05-31",
+ max_tokens = 512,
+ temperature = 0.5,
+ messages = new[]
+ {
+ new { role = "user", content = userMessage }
+ }
+});
+
+// Create a request with the model ID, the user message, and an inference configuration.
+var request = new InvokeModelWithResponseStreamRequest()
+{
+ ModelId = modelId,
+ Body = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(nativeRequest)),
+ ContentType = "application/json"
+};
+
+try
+{
+ // Send the request to the Bedrock Runtime and wait for the response.
+ var streamingResponse = await client.InvokeModelWithResponseStreamAsync(request);
+
+ // Extract and print the streamed response text in real-time.
+ foreach (var item in streamingResponse.Body)
+ {
+ var chunk = JsonSerializer.Deserialize((item as PayloadPart).Bytes);
+ var text = chunk["delta"]?["text"] ?? "";
+ Console.Write(text);
+ }
+}
+catch (AmazonBedrockRuntimeException e)
+{
+ Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}");
+ throw;
+}
+
+// snippet-end:[BedrockRuntime.dotnetv4.InvokeModelWithResponseStream_AnthropicClaude]
+
+// Create a partial class to make the top-level script testable.
+namespace AnthropicClaude { public partial class InvokeModelWithResponseStream { } }
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj b/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj
new file mode 100644
index 00000000000..49a65a040be
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/AnthropicClaude/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj
@@ -0,0 +1,12 @@
+
+
+ Exe
+ net8.0
+ AnthropicClaude.$(MSBuildProjectName)
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_InvokeModel/Command_InvokeModel.csproj b/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_InvokeModel/Command_InvokeModel.csproj
new file mode 100644
index 00000000000..058b325f013
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_InvokeModel/Command_InvokeModel.csproj
@@ -0,0 +1,12 @@
+
+
+ Exe
+ net8.0
+ CohereCommand.$(MSBuildProjectName)
+
+
+
+
+
+
+
diff --git a/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_InvokeModel/InvokeModel.cs b/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_InvokeModel/InvokeModel.cs
new file mode 100644
index 00000000000..e0b6b9399e7
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_InvokeModel/InvokeModel.cs
@@ -0,0 +1,61 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+// snippet-start:[BedrockRuntime.dotnetv4.InvokeModel_CohereCommand]
+// Use the native inference API to send a text message to Cohere Command.
+
+using System;
+using System.IO;
+using System.Text.Json;
+using System.Text.Json.Nodes;
+using Amazon;
+using Amazon.BedrockRuntime;
+using Amazon.BedrockRuntime.Model;
+
+// Create a Bedrock Runtime client in the AWS Region you want to use.
+var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1);
+
+// Set the model ID, e.g., Command Light.
+var modelId = "cohere.command-light-text-v14";
+
+// Define the user message.
+var userMessage = "Describe the purpose of a 'hello world' program in one line.";
+
+//Format the request payload using the model's native structure.
+var nativeRequest = JsonSerializer.Serialize(new
+{
+ prompt = userMessage,
+ max_tokens = 512,
+ temperature = 0.5
+});
+
+// Create a request with the model ID and the model's native request payload.
+var request = new InvokeModelRequest()
+{
+ ModelId = modelId,
+ Body = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(nativeRequest)),
+ ContentType = "application/json"
+};
+
+try
+{
+ // Send the request to the Bedrock Runtime and wait for the response.
+ var response = await client.InvokeModelAsync(request);
+
+ // Decode the response body.
+ var modelResponse = await JsonNode.ParseAsync(response.Body);
+
+ // Extract and print the response text.
+ var responseText = modelResponse["generations"]?[0]?["text"] ?? "";
+ Console.WriteLine(responseText);
+}
+catch (AmazonBedrockRuntimeException e)
+{
+ Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}");
+ throw;
+}
+
+// snippet-end:[BedrockRuntime.dotnetv4.InvokeModel_CohereCommand]
+
+// Create a partial class to make the top-level script testable.
+namespace CohereCommand { public partial class InvokeModel { } }
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_InvokeModelWithResponseStream/Command_InvokeModelWithResponseStream.csproj b/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_InvokeModelWithResponseStream/Command_InvokeModelWithResponseStream.csproj
new file mode 100644
index 00000000000..058b325f013
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_InvokeModelWithResponseStream/Command_InvokeModelWithResponseStream.csproj
@@ -0,0 +1,12 @@
+
+
+ Exe
+ net8.0
+ CohereCommand.$(MSBuildProjectName)
+
+
+
+
+
+
+
diff --git a/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs b/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs
new file mode 100644
index 00000000000..5562ff5e7e2
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs
@@ -0,0 +1,63 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+// snippet-start:[BedrockRuntime.dotnetv4.InvokeModelWithResponseStream_CohereCommand]
+// Use the native inference API to send a text message to Cohere Command
+// and print the response stream.
+
+using System;
+using System.IO;
+using System.Text.Json;
+using System.Text.Json.Nodes;
+using Amazon;
+using Amazon.BedrockRuntime;
+using Amazon.BedrockRuntime.Model;
+
+// Create a Bedrock Runtime client in the AWS Region you want to use.
+var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1);
+
+// Set the model ID, e.g., Command Light.
+var modelId = "cohere.command-light-text-v14";
+
+// Define the user message.
+var userMessage = "Describe the purpose of a 'hello world' program in one line.";
+
+//Format the request payload using the model's native structure.
+var nativeRequest = JsonSerializer.Serialize(new
+{
+ prompt = userMessage,
+ max_tokens = 512,
+ temperature = 0.5
+});
+
+// Create a request with the model ID and the model's native request payload.
+var request = new InvokeModelWithResponseStreamRequest()
+{
+ ModelId = modelId,
+ Body = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(nativeRequest)),
+ ContentType = "application/json"
+};
+
+try
+{
+ // Send the request to the Bedrock Runtime and wait for the response.
+ var streamingResponse = await client.InvokeModelWithResponseStreamAsync(request);
+
+ // Extract and print the streamed response text in real-time.
+ foreach (var item in streamingResponse.Body)
+ {
+ var chunk = JsonSerializer.Deserialize((item as PayloadPart).Bytes);
+ var text = chunk["generations"]?[0]?["text"] ?? "";
+ Console.Write(text);
+ }
+}
+catch (AmazonBedrockRuntimeException e)
+{
+ Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}");
+ throw;
+}
+
+// snippet-end:[BedrockRuntime.dotnetv4.InvokeModelWithResponseStream_CohereCommand]
+
+// Create a partial class to make the top-level script testable.
+namespace CohereCommand { public partial class InvokeModelWithResponseStream { } }
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModel/Command_R_InvokeModel.csproj b/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModel/Command_R_InvokeModel.csproj
new file mode 100644
index 00000000000..058b325f013
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModel/Command_R_InvokeModel.csproj
@@ -0,0 +1,12 @@
+
+
+ Exe
+ net8.0
+ CohereCommand.$(MSBuildProjectName)
+
+
+
+
+
+
+
diff --git a/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModel/InvokeModel.cs b/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModel/InvokeModel.cs
new file mode 100644
index 00000000000..567b9d859e2
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModel/InvokeModel.cs
@@ -0,0 +1,61 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+// snippet-start:[BedrockRuntime.dotnetv4.InvokeModel_CohereCommandR]
+// Use the native inference API to send a text message to Cohere Command R.
+
+using System;
+using System.IO;
+using System.Text.Json;
+using System.Text.Json.Nodes;
+using Amazon;
+using Amazon.BedrockRuntime;
+using Amazon.BedrockRuntime.Model;
+
+// Create a Bedrock Runtime client in the AWS Region you want to use.
+var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1);
+
+// Set the model ID, e.g., Command R.
+var modelId = "cohere.command-r-v1:0";
+
+// Define the user message.
+var userMessage = "Describe the purpose of a 'hello world' program in one line.";
+
+//Format the request payload using the model's native structure.
+var nativeRequest = JsonSerializer.Serialize(new
+{
+ message = userMessage,
+ max_tokens = 512,
+ temperature = 0.5
+});
+
+// Create a request with the model ID and the model's native request payload.
+var request = new InvokeModelRequest()
+{
+ ModelId = modelId,
+ Body = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(nativeRequest)),
+ ContentType = "application/json"
+};
+
+try
+{
+ // Send the request to the Bedrock Runtime and wait for the response.
+ var response = await client.InvokeModelAsync(request);
+
+ // Decode the response body.
+ var modelResponse = await JsonNode.ParseAsync(response.Body);
+
+ // Extract and print the response text.
+ var responseText = modelResponse["text"] ?? "";
+ Console.WriteLine(responseText);
+}
+catch (AmazonBedrockRuntimeException e)
+{
+ Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}");
+ throw;
+}
+
+// snippet-end:[BedrockRuntime.dotnetv4.InvokeModel_CohereCommandR]
+
+// Create a partial class to make the top-level script testable.
+namespace CohereCommandR { public partial class InvokeModel { } }
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModelWithResponseStream/Command_R_InvokeModelWithResponseStream.csproj b/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModelWithResponseStream/Command_R_InvokeModelWithResponseStream.csproj
new file mode 100644
index 00000000000..058b325f013
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModelWithResponseStream/Command_R_InvokeModelWithResponseStream.csproj
@@ -0,0 +1,12 @@
+
+
+ Exe
+ net8.0
+ CohereCommand.$(MSBuildProjectName)
+
+
+
+
+
+
+
diff --git a/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs b/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs
new file mode 100644
index 00000000000..3cac1fe0ac1
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs
@@ -0,0 +1,63 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+// snippet-start:[BedrockRuntime.dotnetv4.InvokeModelWithResponseStream_CohereCommandR]
+// Use the native inference API to send a text message to Cohere Command R
+// and print the response stream.
+
+using System;
+using System.IO;
+using System.Text.Json;
+using System.Text.Json.Nodes;
+using Amazon;
+using Amazon.BedrockRuntime;
+using Amazon.BedrockRuntime.Model;
+
+// Create a Bedrock Runtime client in the AWS Region you want to use.
+var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1);
+
+// Set the model ID, e.g., Command R.
+var modelId = "cohere.command-r-v1:0";
+
+// Define the user message.
+var userMessage = "Describe the purpose of a 'hello world' program in one line.";
+
+//Format the request payload using the model's native structure.
+var nativeRequest = JsonSerializer.Serialize(new
+{
+ message = userMessage,
+ max_tokens = 512,
+ temperature = 0.5
+});
+
+// Create a request with the model ID and the model's native request payload.
+var request = new InvokeModelWithResponseStreamRequest()
+{
+ ModelId = modelId,
+ Body = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(nativeRequest)),
+ ContentType = "application/json"
+};
+
+try
+{
+ // Send the request to the Bedrock Runtime and wait for the response.
+ var streamingResponse = await client.InvokeModelWithResponseStreamAsync(request);
+
+ // Extract and print the streamed response text in real-time.
+ foreach (var item in streamingResponse.Body)
+ {
+ var chunk = JsonSerializer.Deserialize((item as PayloadPart).Bytes);
+ var text = chunk["text"] ?? "";
+ Console.Write(text);
+ }
+}
+catch (AmazonBedrockRuntimeException e)
+{
+ Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}");
+ throw;
+}
+
+// snippet-end:[BedrockRuntime.dotnetv4.InvokeModelWithResponseStream_CohereCommandR]
+
+// Create a partial class to make the top-level script testable.
+namespace CohereCommandR { public partial class InvokeModelWithResponseStream { } }
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Models/CohereCommand/Converse/Converse.cs b/dotnetv4/Bedrock-runtime/Models/CohereCommand/Converse/Converse.cs
new file mode 100644
index 00000000000..c514dbbd1f9
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/CohereCommand/Converse/Converse.cs
@@ -0,0 +1,60 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+// snippet-start:[BedrockRuntime.dotnetv4.Converse_CohereCommand]
+// Use the Converse API to send a text message to Cohere Command.
+
+using System;
+using System.Collections.Generic;
+using Amazon;
+using Amazon.BedrockRuntime;
+using Amazon.BedrockRuntime.Model;
+
+// Create a Bedrock Runtime client in the AWS Region you want to use.
+var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1);
+
+// Set the model ID, e.g., Command R.
+var modelId = "cohere.command-r-v1:0";
+
+// Define the user message.
+var userMessage = "Describe the purpose of a 'hello world' program in one line.";
+
+// Create a request with the model ID, the user message, and an inference configuration.
+var request = new ConverseRequest
+{
+ ModelId = modelId,
+ Messages = new List
+ {
+ new Message
+ {
+ Role = ConversationRole.User,
+ Content = new List { new ContentBlock { Text = userMessage } }
+ }
+ },
+ InferenceConfig = new InferenceConfiguration()
+ {
+ MaxTokens = 512,
+ Temperature = 0.5F,
+ TopP = 0.9F
+ }
+};
+
+try
+{
+ // Send the request to the Bedrock Runtime and wait for the result.
+ var response = await client.ConverseAsync(request);
+
+ // Extract and print the response text.
+ string responseText = response?.Output?.Message?.Content?[0]?.Text ?? "";
+ Console.WriteLine(responseText);
+}
+catch (AmazonBedrockRuntimeException e)
+{
+ Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}");
+ throw;
+}
+
+// snippet-end:[BedrockRuntime.dotnetv4.Converse_CohereCommand]
+
+// Create a partial class to make the top-level script testable.
+namespace CohereCommand { public partial class Converse { } }
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Models/CohereCommand/Converse/Converse.csproj b/dotnetv4/Bedrock-runtime/Models/CohereCommand/Converse/Converse.csproj
new file mode 100644
index 00000000000..058b325f013
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/CohereCommand/Converse/Converse.csproj
@@ -0,0 +1,12 @@
+
+
+ Exe
+ net8.0
+ CohereCommand.$(MSBuildProjectName)
+
+
+
+
+
+
+
diff --git a/dotnetv4/Bedrock-runtime/Models/CohereCommand/ConverseStream/ConverseStream.cs b/dotnetv4/Bedrock-runtime/Models/CohereCommand/ConverseStream/ConverseStream.cs
new file mode 100644
index 00000000000..fadf5fa91ed
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/CohereCommand/ConverseStream/ConverseStream.cs
@@ -0,0 +1,67 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+// snippet-start:[BedrockRuntime.dotnetv4.ConverseStream_CohereCommand]
+// Use the Converse API to send a text message to Cohere Command
+// and print the response stream.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using Amazon;
+using Amazon.BedrockRuntime;
+using Amazon.BedrockRuntime.Model;
+
+// Create a Bedrock Runtime client in the AWS Region you want to use.
+var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1);
+
+// Set the model ID, e.g., Command R.
+var modelId = "cohere.command-r-v1:0";
+
+// Define the user message.
+var userMessage = "Describe the purpose of a 'hello world' program in one line.";
+
+// Create a request with the model ID, the user message, and an inference configuration.
+var request = new ConverseStreamRequest
+{
+ ModelId = modelId,
+ Messages = new List
+ {
+ new Message
+ {
+ Role = ConversationRole.User,
+ Content = new List { new ContentBlock { Text = userMessage } }
+ }
+ },
+ InferenceConfig = new InferenceConfiguration()
+ {
+ MaxTokens = 512,
+ Temperature = 0.5F,
+ TopP = 0.9F
+ }
+};
+
+try
+{
+ // Send the request to the Bedrock Runtime and wait for the result.
+ var response = await client.ConverseStreamAsync(request);
+
+ // Extract and print the streamed response text in real-time.
+ foreach (var chunk in response.Stream.AsEnumerable())
+ {
+ if (chunk is ContentBlockDeltaEvent)
+ {
+ Console.Write((chunk as ContentBlockDeltaEvent).Delta.Text);
+ }
+ }
+}
+catch (AmazonBedrockRuntimeException e)
+{
+ Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}");
+ throw;
+}
+
+// snippet-end:[BedrockRuntime.dotnetv4.ConverseStream_CohereCommand]
+
+// Create a partial class to make the top-level script testable.
+namespace CohereCommand { public partial class ConverseStream { } }
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Models/CohereCommand/ConverseStream/ConverseStream.csproj b/dotnetv4/Bedrock-runtime/Models/CohereCommand/ConverseStream/ConverseStream.csproj
new file mode 100644
index 00000000000..058b325f013
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/CohereCommand/ConverseStream/ConverseStream.csproj
@@ -0,0 +1,12 @@
+
+
+ Exe
+ net8.0
+ CohereCommand.$(MSBuildProjectName)
+
+
+
+
+
+
+
diff --git a/dotnetv4/Bedrock-runtime/Models/MetaLlama/Converse/Converse.cs b/dotnetv4/Bedrock-runtime/Models/MetaLlama/Converse/Converse.cs
new file mode 100644
index 00000000000..69603bebea3
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/MetaLlama/Converse/Converse.cs
@@ -0,0 +1,60 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+// snippet-start:[BedrockRuntime.dotnetv4.Converse_MetaLlama]
+// Use the Converse API to send a text message to Meta Llama.
+
+using System;
+using System.Collections.Generic;
+using Amazon;
+using Amazon.BedrockRuntime;
+using Amazon.BedrockRuntime.Model;
+
+// Create a Bedrock Runtime client in the AWS Region you want to use.
+var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1);
+
+// Set the model ID, e.g., Llama 3 8b Instruct.
+var modelId = "meta.llama3-8b-instruct-v1:0";
+
+// Define the user message.
+var userMessage = "Describe the purpose of a 'hello world' program in one line.";
+
+// Create a request with the model ID, the user message, and an inference configuration.
+var request = new ConverseRequest
+{
+ ModelId = modelId,
+ Messages = new List
+ {
+ new Message
+ {
+ Role = ConversationRole.User,
+ Content = new List { new ContentBlock { Text = userMessage } }
+ }
+ },
+ InferenceConfig = new InferenceConfiguration()
+ {
+ MaxTokens = 512,
+ Temperature = 0.5F,
+ TopP = 0.9F
+ }
+};
+
+try
+{
+ // Send the request to the Bedrock Runtime and wait for the result.
+ var response = await client.ConverseAsync(request);
+
+ // Extract and print the response text.
+ string responseText = response?.Output?.Message?.Content?[0]?.Text ?? "";
+ Console.WriteLine(responseText);
+}
+catch (AmazonBedrockRuntimeException e)
+{
+ Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}");
+ throw;
+}
+
+// snippet-end:[BedrockRuntime.dotnetv4.Converse_MetaLlama]
+
+// Create a partial class to make the top-level script testable.
+namespace MetaLlama { public partial class Converse { } }
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Models/MetaLlama/Converse/Converse.csproj b/dotnetv4/Bedrock-runtime/Models/MetaLlama/Converse/Converse.csproj
new file mode 100644
index 00000000000..a1950f2ec36
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/MetaLlama/Converse/Converse.csproj
@@ -0,0 +1,12 @@
+
+
+ Exe
+ net8.0
+ MetaLlama.$(MSBuildProjectName)
+
+
+
+
+
+
+
diff --git a/dotnetv4/Bedrock-runtime/Models/MetaLlama/ConverseStream/ConverseStream.cs b/dotnetv4/Bedrock-runtime/Models/MetaLlama/ConverseStream/ConverseStream.cs
new file mode 100644
index 00000000000..bb30abdcdbf
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/MetaLlama/ConverseStream/ConverseStream.cs
@@ -0,0 +1,67 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+// snippet-start:[BedrockRuntime.dotnetv4.ConverseStream_MetaLlama]
+// Use the Converse API to send a text message to Meta Llama
+// and print the response stream.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using Amazon;
+using Amazon.BedrockRuntime;
+using Amazon.BedrockRuntime.Model;
+
+// Create a Bedrock Runtime client in the AWS Region you want to use.
+var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1);
+
+// Set the model ID, e.g., Llama 3 8b Instruct.
+var modelId = "meta.llama3-8b-instruct-v1:0";
+
+// Define the user message.
+var userMessage = "Describe the purpose of a 'hello world' program in one line.";
+
+// Create a request with the model ID, the user message, and an inference configuration.
+var request = new ConverseStreamRequest
+{
+ ModelId = modelId,
+ Messages = new List
+ {
+ new Message
+ {
+ Role = ConversationRole.User,
+ Content = new List { new ContentBlock { Text = userMessage } }
+ }
+ },
+ InferenceConfig = new InferenceConfiguration()
+ {
+ MaxTokens = 512,
+ Temperature = 0.5F,
+ TopP = 0.9F
+ }
+};
+
+try
+{
+ // Send the request to the Bedrock Runtime and wait for the result.
+ var response = await client.ConverseStreamAsync(request);
+
+ // Extract and print the streamed response text in real-time.
+ foreach (var chunk in response.Stream.AsEnumerable())
+ {
+ if (chunk is ContentBlockDeltaEvent)
+ {
+ Console.Write((chunk as ContentBlockDeltaEvent).Delta.Text);
+ }
+ }
+}
+catch (AmazonBedrockRuntimeException e)
+{
+ Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}");
+ throw;
+}
+
+// snippet-end:[BedrockRuntime.dotnetv4.ConverseStream_MetaLlama]
+
+// Create a partial class to make the top-level script testable.
+namespace MetaLlama { public partial class ConverseStream { } }
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Models/MetaLlama/ConverseStream/ConverseStream.csproj b/dotnetv4/Bedrock-runtime/Models/MetaLlama/ConverseStream/ConverseStream.csproj
new file mode 100644
index 00000000000..a1950f2ec36
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/MetaLlama/ConverseStream/ConverseStream.csproj
@@ -0,0 +1,12 @@
+
+
+ Exe
+ net8.0
+ MetaLlama.$(MSBuildProjectName)
+
+
+
+
+
+
+
diff --git a/dotnetv4/Bedrock-runtime/Models/MetaLlama/ConverseStream/Properties/launchSettings.json b/dotnetv4/Bedrock-runtime/Models/MetaLlama/ConverseStream/Properties/launchSettings.json
new file mode 100644
index 00000000000..33504c948ad
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/MetaLlama/ConverseStream/Properties/launchSettings.json
@@ -0,0 +1,8 @@
+{
+ "profiles": {
+ "WSL": {
+ "commandName": "WSL2",
+ "distributionName": ""
+ }
+ }
+}
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModel/InvokeModel.cs b/dotnetv4/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModel/InvokeModel.cs
new file mode 100644
index 00000000000..dc32643f6ca
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModel/InvokeModel.cs
@@ -0,0 +1,69 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+// snippet-start:[BedrockRuntime.dotnetv4.InvokeModel_MetaLlama3]
+// Use the native inference API to send a text message to Meta Llama 3.
+
+using System;
+using System.IO;
+using System.Text.Json;
+using System.Text.Json.Nodes;
+using Amazon;
+using Amazon.BedrockRuntime;
+using Amazon.BedrockRuntime.Model;
+
+// Create a Bedrock Runtime client in the AWS Region you want to use.
+var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USWest2);
+
+// Set the model ID, e.g., Llama 3 70b Instruct.
+var modelId = "meta.llama3-70b-instruct-v1:0";
+
+// Define the prompt for the model.
+var prompt = "Describe the purpose of a 'hello world' program in one line.";
+
+// Embed the prompt in Llama 2's instruction format.
+var formattedPrompt = $@"
+<|begin_of_text|><|start_header_id|>user<|end_header_id|>
+{prompt}
+<|eot_id|>
+<|start_header_id|>assistant<|end_header_id|>
+";
+
+//Format the request payload using the model's native structure.
+var nativeRequest = JsonSerializer.Serialize(new
+{
+ prompt = formattedPrompt,
+ max_gen_len = 512,
+ temperature = 0.5
+});
+
+// Create a request with the model ID and the model's native request payload.
+var request = new InvokeModelRequest()
+{
+ ModelId = modelId,
+ Body = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(nativeRequest)),
+ ContentType = "application/json"
+};
+
+try
+{
+ // Send the request to the Bedrock Runtime and wait for the response.
+ var response = await client.InvokeModelAsync(request);
+
+ // Decode the response body.
+ var modelResponse = await JsonNode.ParseAsync(response.Body);
+
+ // Extract and print the response text.
+ var responseText = modelResponse["generation"] ?? "";
+ Console.WriteLine(responseText);
+}
+catch (AmazonBedrockRuntimeException e)
+{
+ Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}");
+ throw;
+}
+
+// snippet-end:[BedrockRuntime.dotnetv4.InvokeModel_MetaLlama3]
+
+// Create a partial class to make the top-level script testable.
+namespace MetaLlama3 { public partial class InvokeModel { } }
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModel/Llama3_InvokeModel.csproj b/dotnetv4/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModel/Llama3_InvokeModel.csproj
new file mode 100644
index 00000000000..a1950f2ec36
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModel/Llama3_InvokeModel.csproj
@@ -0,0 +1,12 @@
+
+
+ Exe
+ net8.0
+ MetaLlama.$(MSBuildProjectName)
+
+
+
+
+
+
+
diff --git a/dotnetv4/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs b/dotnetv4/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs
new file mode 100644
index 00000000000..c6435a345db
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs
@@ -0,0 +1,71 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+// snippet-start:[BedrockRuntime.dotnetv4.InvokeModelWithResponseStream_MetaLlama3]
+// Use the native inference API to send a text message to Meta Llama 3
+// and print the response stream.
+
+using System;
+using System.IO;
+using System.Text.Json;
+using System.Text.Json.Nodes;
+using Amazon;
+using Amazon.BedrockRuntime;
+using Amazon.BedrockRuntime.Model;
+
+// Create a Bedrock Runtime client in the AWS Region you want to use.
+var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USWest2);
+
+// Set the model ID, e.g., Llama 3 70b Instruct.
+var modelId = "meta.llama3-70b-instruct-v1:0";
+
+// Define the prompt for the model.
+var prompt = "Describe the purpose of a 'hello world' program in one line.";
+
+// Embed the prompt in Llama 2's instruction format.
+var formattedPrompt = $@"
+<|begin_of_text|><|start_header_id|>user<|end_header_id|>
+{prompt}
+<|eot_id|>
+<|start_header_id|>assistant<|end_header_id|>
+";
+
+//Format the request payload using the model's native structure.
+var nativeRequest = JsonSerializer.Serialize(new
+{
+ prompt = formattedPrompt,
+ max_gen_len = 512,
+ temperature = 0.5
+});
+
+// Create a request with the model ID and the model's native request payload.
+var request = new InvokeModelWithResponseStreamRequest()
+{
+ ModelId = modelId,
+ Body = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(nativeRequest)),
+ ContentType = "application/json"
+};
+
+try
+{
+ // Send the request to the Bedrock Runtime and wait for the response.
+ var streamingResponse = await client.InvokeModelWithResponseStreamAsync(request);
+
+ // Extract and print the streamed response text in real-time.
+ foreach (var item in streamingResponse.Body)
+ {
+ var chunk = JsonSerializer.Deserialize((item as PayloadPart).Bytes);
+ var text = chunk["generation"] ?? "";
+ Console.Write(text);
+ }
+}
+catch (AmazonBedrockRuntimeException e)
+{
+ Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}");
+ throw;
+}
+
+// snippet-end:[BedrockRuntime.dotnetv4.InvokeModelWithResponseStream_MetaLlama3]
+
+// Create a partial class to make the top-level script testable.
+namespace MetaLlama3 { public partial class InvokeModelWithResponseStream { } }
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModelWithResponseStream/Llama3_InvokeModelWithResponseStream.csproj b/dotnetv4/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModelWithResponseStream/Llama3_InvokeModelWithResponseStream.csproj
new file mode 100644
index 00000000000..a1950f2ec36
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModelWithResponseStream/Llama3_InvokeModelWithResponseStream.csproj
@@ -0,0 +1,12 @@
+
+
+ Exe
+ net8.0
+ MetaLlama.$(MSBuildProjectName)
+
+
+
+
+
+
+
diff --git a/dotnetv4/Bedrock-runtime/Models/Mistral/Converse/Converse.cs b/dotnetv4/Bedrock-runtime/Models/Mistral/Converse/Converse.cs
new file mode 100644
index 00000000000..2280639f1a4
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/Mistral/Converse/Converse.cs
@@ -0,0 +1,60 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+// snippet-start:[BedrockRuntime.dotnetv4.Converse_Mistral]
+// Use the Converse API to send a text message to Mistral.
+
+using System;
+using System.Collections.Generic;
+using Amazon;
+using Amazon.BedrockRuntime;
+using Amazon.BedrockRuntime.Model;
+
+// Create a Bedrock Runtime client in the AWS Region you want to use.
+var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1);
+
+// Set the model ID, e.g., Mistral Large.
+var modelId = "mistral.mistral-large-2402-v1:0";
+
+// Define the user message.
+var userMessage = "Describe the purpose of a 'hello world' program in one line.";
+
+// Create a request with the model ID, the user message, and an inference configuration.
+var request = new ConverseRequest
+{
+ ModelId = modelId,
+ Messages = new List
+ {
+ new Message
+ {
+ Role = ConversationRole.User,
+ Content = new List { new ContentBlock { Text = userMessage } }
+ }
+ },
+ InferenceConfig = new InferenceConfiguration()
+ {
+ MaxTokens = 512,
+ Temperature = 0.5F,
+ TopP = 0.9F
+ }
+};
+
+try
+{
+ // Send the request to the Bedrock Runtime and wait for the result.
+ var response = await client.ConverseAsync(request);
+
+ // Extract and print the response text.
+ string responseText = response?.Output?.Message?.Content?[0]?.Text ?? "";
+ Console.WriteLine(responseText);
+}
+catch (AmazonBedrockRuntimeException e)
+{
+ Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}");
+ throw;
+}
+
+// snippet-end:[BedrockRuntime.dotnetv4.Converse_Mistral]
+
+// Create a partial class to make the top-level script testable.
+namespace Mistral { public partial class Converse { } }
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Models/Mistral/Converse/Converse.csproj b/dotnetv4/Bedrock-runtime/Models/Mistral/Converse/Converse.csproj
new file mode 100644
index 00000000000..88c91e1c353
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/Mistral/Converse/Converse.csproj
@@ -0,0 +1,12 @@
+
+
+ Exe
+ net8.0
+ Mistral.$(MSBuildProjectName)
+
+
+
+
+
+
+
diff --git a/dotnetv4/Bedrock-runtime/Models/Mistral/ConverseStream/ConverseStream.cs b/dotnetv4/Bedrock-runtime/Models/Mistral/ConverseStream/ConverseStream.cs
new file mode 100644
index 00000000000..fc3aa00e879
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/Mistral/ConverseStream/ConverseStream.cs
@@ -0,0 +1,67 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+// snippet-start:[BedrockRuntime.dotnetv4.ConverseStream_Mistral]
+// Use the Converse API to send a text message to Mistral
+// and print the response stream.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using Amazon;
+using Amazon.BedrockRuntime;
+using Amazon.BedrockRuntime.Model;
+
+// Create a Bedrock Runtime client in the AWS Region you want to use.
+var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1);
+
+// Set the model ID, e.g., Mistral Large.
+var modelId = "mistral.mistral-large-2402-v1:0";
+
+// Define the user message.
+var userMessage = "Describe the purpose of a 'hello world' program in one line.";
+
+// Create a request with the model ID, the user message, and an inference configuration.
+var request = new ConverseStreamRequest
+{
+ ModelId = modelId,
+ Messages = new List
+ {
+ new Message
+ {
+ Role = ConversationRole.User,
+ Content = new List { new ContentBlock { Text = userMessage } }
+ }
+ },
+ InferenceConfig = new InferenceConfiguration()
+ {
+ MaxTokens = 512,
+ Temperature = 0.5F,
+ TopP = 0.9F
+ }
+};
+
+try
+{
+ // Send the request to the Bedrock Runtime and wait for the result.
+ var response = await client.ConverseStreamAsync(request);
+
+ // Extract and print the streamed response text in real-time.
+ foreach (var chunk in response.Stream.AsEnumerable())
+ {
+ if (chunk is ContentBlockDeltaEvent)
+ {
+ Console.Write((chunk as ContentBlockDeltaEvent).Delta.Text);
+ }
+ }
+}
+catch (AmazonBedrockRuntimeException e)
+{
+ Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}");
+ throw;
+}
+
+// snippet-end:[BedrockRuntime.dotnetv4.ConverseStream_Mistral]
+
+// Create a partial class to make the top-level script testable.
+namespace Mistral { public partial class ConverseStream { } }
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Models/Mistral/ConverseStream/ConverseStream.csproj b/dotnetv4/Bedrock-runtime/Models/Mistral/ConverseStream/ConverseStream.csproj
new file mode 100644
index 00000000000..c11471e4a6c
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/Mistral/ConverseStream/ConverseStream.csproj
@@ -0,0 +1,12 @@
+
+
+ Exe
+ net8.0
+ Mistral.$(MSBuildProjectName)
+
+
+
+
+
+
+
diff --git a/dotnetv4/Bedrock-runtime/Models/Mistral/InvokeModel/InvokeModel.cs b/dotnetv4/Bedrock-runtime/Models/Mistral/InvokeModel/InvokeModel.cs
new file mode 100644
index 00000000000..ea8f2af4450
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/Mistral/InvokeModel/InvokeModel.cs
@@ -0,0 +1,64 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+// snippet-start:[BedrockRuntime.dotnetv4.InvokeModel_Mistral]
+// Use the native inference API to send a text message to Mistral.
+
+using System;
+using System.IO;
+using System.Text.Json;
+using System.Text.Json.Nodes;
+using Amazon;
+using Amazon.BedrockRuntime;
+using Amazon.BedrockRuntime.Model;
+
+// Create a Bedrock Runtime client in the AWS Region you want to use.
+var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1);
+
+// Set the model ID, e.g., Mistral Large.
+var modelId = "mistral.mistral-large-2402-v1:0";
+
+// Define the prompt for the model.
+var prompt = "Describe the purpose of a 'hello world' program in one line.";
+
+// Embed the prompt in Mistral's instruction format.
+var formattedPrompt = $"[INST] {prompt} [/INST]";
+
+//Format the request payload using the model's native structure.
+var nativeRequest = JsonSerializer.Serialize(new
+{
+ prompt = formattedPrompt,
+ max_tokens = 512,
+ temperature = 0.5
+});
+
+// Create a request with the model ID and the model's native request payload.
+var request = new InvokeModelRequest()
+{
+ ModelId = modelId,
+ Body = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(nativeRequest)),
+ ContentType = "application/json"
+};
+
+try
+{
+ // Send the request to the Bedrock Runtime and wait for the response.
+ var response = await client.InvokeModelAsync(request);
+
+ // Decode the response body.
+ var modelResponse = await JsonNode.ParseAsync(response.Body);
+
+ // Extract and print the response text.
+ var responseText = modelResponse["outputs"]?[0]?["text"] ?? "";
+ Console.WriteLine(responseText);
+}
+catch (AmazonBedrockRuntimeException e)
+{
+ Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}");
+ throw;
+}
+
+// snippet-end:[BedrockRuntime.dotnetv4.InvokeModel_Mistral]
+
+// Create a partial class to make the top-level script testable.
+namespace Mistral { public partial class InvokeModel { } }
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Models/Mistral/InvokeModel/InvokeModel.csproj b/dotnetv4/Bedrock-runtime/Models/Mistral/InvokeModel/InvokeModel.csproj
new file mode 100644
index 00000000000..c11471e4a6c
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/Mistral/InvokeModel/InvokeModel.csproj
@@ -0,0 +1,12 @@
+
+
+ Exe
+ net8.0
+ Mistral.$(MSBuildProjectName)
+
+
+
+
+
+
+
diff --git a/dotnetv4/Bedrock-runtime/Models/Mistral/InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs b/dotnetv4/Bedrock-runtime/Models/Mistral/InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs
new file mode 100644
index 00000000000..e6966b7d7d2
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/Mistral/InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs
@@ -0,0 +1,66 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+// snippet-start:[BedrockRuntime.dotnetv4.InvokeModelWithResponseStream_Mistral]
+// Use the native inference API to send a text message to Mistral
+// and print the response stream.
+
+using System;
+using System.IO;
+using System.Text.Json;
+using System.Text.Json.Nodes;
+using Amazon;
+using Amazon.BedrockRuntime;
+using Amazon.BedrockRuntime.Model;
+
+// Create a Bedrock Runtime client in the AWS Region you want to use.
+var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1);
+
+// Set the model ID, e.g., Mistral Large.
+var modelId = "mistral.mistral-large-2402-v1:0";
+
+// Define the prompt for the model.
+var prompt = "Describe the purpose of a 'hello world' program in one line.";
+
+// Embed the prompt in Mistral's instruction format.
+var formattedPrompt = $"[INST] {prompt} [/INST]";
+
+//Format the request payload using the model's native structure.
+var nativeRequest = JsonSerializer.Serialize(new
+{
+ prompt = formattedPrompt,
+ max_tokens = 512,
+ temperature = 0.5
+});
+
+// Create a request with the model ID and the model's native request payload.
+var request = new InvokeModelWithResponseStreamRequest()
+{
+ ModelId = modelId,
+ Body = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(nativeRequest)),
+ ContentType = "application/json"
+};
+
+try
+{
+ // Send the request to the Bedrock Runtime and wait for the response.
+ var streamingResponse = await client.InvokeModelWithResponseStreamAsync(request);
+
+ // Extract and print the streamed response text in real-time.
+ foreach (var item in streamingResponse.Body)
+ {
+ var chunk = JsonSerializer.Deserialize((item as PayloadPart).Bytes);
+ var text = chunk["outputs"]?[0]?["text"] ?? "";
+ Console.Write(text);
+ }
+}
+catch (AmazonBedrockRuntimeException e)
+{
+ Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}");
+ throw;
+}
+
+// snippet-end:[BedrockRuntime.dotnetv4.InvokeModelWithResponseStream_Mistral]
+
+// Create a partial class to make the top-level script testable.
+namespace Mistral { public partial class InvokeModelWithResponseStream { } }
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Models/Mistral/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj b/dotnetv4/Bedrock-runtime/Models/Mistral/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj
new file mode 100644
index 00000000000..c11471e4a6c
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Models/Mistral/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj
@@ -0,0 +1,12 @@
+
+
+ Exe
+ net8.0
+ Mistral.$(MSBuildProjectName)
+
+
+
+
+
+
+
diff --git a/dotnetv4/Bedrock-runtime/README.md b/dotnetv4/Bedrock-runtime/README.md
new file mode 100644
index 00000000000..f013c458260
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/README.md
@@ -0,0 +1,132 @@
+# Amazon Bedrock Runtime code examples for the SDK for .NET
+
+## Overview
+
+Shows how to use the AWS SDK for .NET to work with Amazon Bedrock Runtime.
+
+
+
+
+_Amazon Bedrock Runtime is a fully managed service that makes it easy to use foundation models from third-party providers and Amazon._
+
+## ⚠ Important
+
+* Running this code might result in charges to your AWS account. For more details, see [AWS Pricing](https://aws.amazon.com/pricing/) and [Free Tier](https://aws.amazon.com/free/).
+* Running the tests might result in charges to your AWS account.
+* We recommend that you grant your code least privilege. At most, grant only the minimum permissions required to perform the task. For more information, see [Grant least privilege](https://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html#grant-least-privilege).
+* This code is not tested in every AWS Region. For more information, see [AWS Regional Services](https://aws.amazon.com/about-aws/global-infrastructure/regional-product-services).
+
+
+
+
+## Code examples
+
+### Prerequisites
+
+For prerequisites, see the [README](../README.md#Prerequisites) in the `dotnetv4` folder.
+
+
+
+
+### AI21 Labs Jurassic-2
+
+- [Converse](Models/Ai21LabsJurassic2/Converse/Converse.cs#L4)
+- [InvokeModel](Models/Ai21LabsJurassic2/InvokeModel/InvokeModel.cs#L4)
+
+### Amazon Titan Text
+
+- [Converse](Models/AmazonTitanText/Converse/Converse.cs#L4)
+- [ConverseStream](Models/AmazonTitanText/ConverseStream/ConverseStream.cs#L4)
+- [InvokeModel](Models/AmazonTitanText/InvokeModel/InvokeModel.cs#L4)
+- [InvokeModelWithResponseStream](Models/AmazonTitanText/InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs#L4)
+
+### Anthropic Claude
+
+- [Converse](Models/AnthropicClaude/Converse/Converse.cs#L4)
+- [ConverseStream](Models/AnthropicClaude/ConverseStream/ConverseStream.cs#L4)
+- [InvokeModel](Models/AnthropicClaude/InvokeModel/InvokeModel.cs#L4)
+- [InvokeModelWithResponseStream](Models/AnthropicClaude/InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs#L4)
+
+### Cohere Command
+
+- [Converse](Models/CohereCommand/Converse/Converse.cs#L4)
+- [ConverseStream](Models/CohereCommand/ConverseStream/ConverseStream.cs#L4)
+- [InvokeModel: Command R and R+](Models/CohereCommand/Command_R_InvokeModel/InvokeModel.cs#L4)
+- [InvokeModel: Command and Command Light](Models/CohereCommand/Command_InvokeModel/InvokeModel.cs#L4)
+- [InvokeModelWithResponseStream: Command R and R+](Models/CohereCommand/Command_R_InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs#L4)
+- [InvokeModelWithResponseStream: Command and Command Light](Models/CohereCommand/Command_InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs#L4)
+
+### Meta Llama
+
+- [Converse](Models/MetaLlama/Converse/Converse.cs#L4)
+- [ConverseStream](Models/MetaLlama/ConverseStream/ConverseStream.cs#L4)
+- [InvokeModel: Llama 2](Models/MetaLlama/Llama2_InvokeModel/InvokeModel.cs#L4)
+- [InvokeModel: Llama 3](Models/MetaLlama/Llama3_InvokeModel/InvokeModel.cs#L4)
+- [InvokeModelWithResponseStream: Llama 2](Models/MetaLlama/Llama2_InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs#L4)
+- [InvokeModelWithResponseStream: Llama 3](Models/MetaLlama/Llama3_InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs#L4)
+
+### Mistral AI
+
+- [Converse](Models/Mistral/Converse/Converse.cs#L4)
+- [ConverseStream](Models/Mistral/ConverseStream/ConverseStream.cs#L4)
+- [InvokeModel](Models/Mistral/InvokeModel/InvokeModel.cs#L4)
+- [InvokeModelWithResponseStream](Models/Mistral/InvokeModelWithResponseStream/InvokeModelWithResponseStream.cs#L4)
+
+
+
+
+
+## Run the examples
+
+### Instructions
+
+For general instructions to run the examples, see the
+[README](../README.md#building-and-running-the-code-examples) in the `dotnetv4` folder.
+
+Some projects might include a settings.json file. Before compiling the project,
+you can change these values to match your own account and resources. Alternatively,
+add a settings.local.json file with your local settings, which will be loaded automatically
+when the application runs.
+
+After the example compiles, you can run it from the command line. To do so, navigate to
+the folder that contains the .csproj file and run the following command:
+
+```
+dotnet run
+```
+
+Alternatively, you can run the example from within your IDE.
+
+
+
+
+
+
+
+### Tests
+
+⚠ Running tests might result in charges to your AWS account.
+
+
+To find instructions for running these tests, see the [README](../README.md#Tests)
+in the `dotnetv4` folder.
+
+
+
+
+
+
+## Additional resources
+
+- [Amazon Bedrock Runtime User Guide](https://docs.aws.amazon.com/bedrock/latest/userguide/what-is-bedrock.html)
+- [Amazon Bedrock Runtime API Reference](https://docs.aws.amazon.com/bedrock/latest/APIReference/welcome.html)
+- [SDK for .NET Amazon Bedrock Runtime reference](https://docs.aws.amazon.com/sdkfornet/v3/apidocs/items/Bedrock-runtime/NBedrock-runtime.html)
+
+
+
+
+---
+
+Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+
+SPDX-License-Identifier: Apache-2.0
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Tests/ActionTest_Converse.cs b/dotnetv4/Bedrock-runtime/Tests/ActionTest_Converse.cs
new file mode 100644
index 00000000000..d57db96634e
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Tests/ActionTest_Converse.cs
@@ -0,0 +1,21 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+namespace BedrockRuntimeTests;
+
+public class ActionTest_Converse
+{
+ [Theory, Trait("Category", "Integration")]
+ [InlineData(typeof(Mistral.Converse))]
+ [InlineData(typeof(MetaLlama.Converse))]
+ [InlineData(typeof(CohereCommand.Converse))]
+ [InlineData(typeof(AnthropicClaude.Converse))]
+ [InlineData(typeof(AmazonTitanText.Converse))]
+ [InlineData(typeof(Ai21LabsJurassic2.Converse))]
+ public void ConverseDoesNotThrow(Type type)
+ {
+ var entryPoint = type.Assembly.EntryPoint!;
+ var exception = Record.Exception(() => entryPoint.Invoke(null, [Array.Empty()]));
+ Assert.Null(exception);
+ }
+}
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Tests/ActionTest_ConverseStream.cs b/dotnetv4/Bedrock-runtime/Tests/ActionTest_ConverseStream.cs
new file mode 100644
index 00000000000..3c4ab3417f0
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Tests/ActionTest_ConverseStream.cs
@@ -0,0 +1,20 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+namespace BedrockRuntimeTests;
+
+public class ActionTest_ConverseStream
+{
+ [Theory, Trait("Category", "Integration")]
+ [InlineData(typeof(Mistral.ConverseStream))]
+ [InlineData(typeof(MetaLlama.ConverseStream))]
+ [InlineData(typeof(CohereCommand.ConverseStream))]
+ [InlineData(typeof(AnthropicClaude.ConverseStream))]
+ [InlineData(typeof(AmazonTitanText.ConverseStream))]
+ public void ConverseStreamDoesNotThrow(Type type)
+ {
+ var entryPoint = type.Assembly.EntryPoint!;
+ var exception = Record.Exception(() => entryPoint.Invoke(null, [Array.Empty()]));
+ Assert.Null(exception);
+ }
+}
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Tests/ActionTest_InvokeModelWithResponseStream.cs b/dotnetv4/Bedrock-runtime/Tests/ActionTest_InvokeModelWithResponseStream.cs
new file mode 100644
index 00000000000..c0520fa0d25
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Tests/ActionTest_InvokeModelWithResponseStream.cs
@@ -0,0 +1,22 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+namespace BedrockRuntimeTests
+{
+ public class ActionTest_InvokeModelWithResponseStream
+ {
+ [Theory, Trait("Category", "Integration")]
+ [InlineData(typeof(Mistral.InvokeModelWithResponseStream))]
+ [InlineData(typeof(MetaLlama3.InvokeModelWithResponseStream))]
+ [InlineData(typeof(CohereCommand.InvokeModelWithResponseStream))]
+ [InlineData(typeof(CohereCommandR.InvokeModelWithResponseStream))]
+ [InlineData(typeof(AnthropicClaude.InvokeModelWithResponseStream))]
+ [InlineData(typeof(AmazonTitanText.InvokeModelWithResponseStream))]
+ public void InvokeModelWithResponseStreamDoesNotThrow(Type type)
+ {
+ var entryPoint = type.Assembly.EntryPoint!;
+ var exception = Record.Exception(() => entryPoint.Invoke(null, [Array.Empty()]));
+ Assert.Null(exception);
+ }
+ }
+}
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Tests/ActionTests_InvokeModel.cs b/dotnetv4/Bedrock-runtime/Tests/ActionTests_InvokeModel.cs
new file mode 100644
index 00000000000..0584cf61793
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Tests/ActionTests_InvokeModel.cs
@@ -0,0 +1,22 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+namespace BedrockRuntimeTests;
+
+public class ActionTest_InvokeModel
+{
+ [Theory, Trait("Category", "Integration")]
+ [InlineData(typeof(Mistral.InvokeModel))]
+ [InlineData(typeof(MetaLlama3.InvokeModel))]
+ [InlineData(typeof(CohereCommand.InvokeModel))]
+ [InlineData(typeof(CohereCommandR.InvokeModel))]
+ [InlineData(typeof(AnthropicClaude.InvokeModel))]
+ [InlineData(typeof(AmazonTitanText.InvokeModel))]
+ [InlineData(typeof(Ai21LabsJurassic2.InvokeModel))]
+ public void InvokeModelDoesNotThrow(Type type)
+ {
+ var entryPoint = type.Assembly.EntryPoint!;
+ var exception = Record.Exception(() => entryPoint.Invoke(null, [Array.Empty()]));
+ Assert.Null(exception);
+ }
+}
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Tests/BedrockRuntimeTests.csproj b/dotnetv4/Bedrock-runtime/Tests/BedrockRuntimeTests.csproj
new file mode 100644
index 00000000000..68e2b8bc617
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Tests/BedrockRuntimeTests.csproj
@@ -0,0 +1,57 @@
+
+
+
+ net8.0
+ BedrockRuntimeTests
+ enable
+ enable
+
+ false
+ true
+
+
+
+
+
+
+
+
+
+
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+ all
+
+
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+ all
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/dotnetv4/Bedrock-runtime/Tests/GlobalUsings.cs b/dotnetv4/Bedrock-runtime/Tests/GlobalUsings.cs
new file mode 100644
index 00000000000..0f64a5599c7
--- /dev/null
+++ b/dotnetv4/Bedrock-runtime/Tests/GlobalUsings.cs
@@ -0,0 +1,7 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+global using Xunit;
+
+// Optional.
+[assembly: CollectionBehavior(DisableTestParallelization = true)]
\ No newline at end of file
diff --git a/dotnetv4/Bedrock/Actions/BedrockActions.csproj b/dotnetv4/Bedrock/Actions/BedrockActions.csproj
new file mode 100644
index 00000000000..9f12aa3e3f2
--- /dev/null
+++ b/dotnetv4/Bedrock/Actions/BedrockActions.csproj
@@ -0,0 +1,14 @@
+
+
+
+ Exe
+ net8.0
+ enable
+ enable
+
+
+
+
+
+
+
diff --git a/dotnetv4/Bedrock/Actions/HelloBedrock.cs b/dotnetv4/Bedrock/Actions/HelloBedrock.cs
new file mode 100644
index 00000000000..07c46831329
--- /dev/null
+++ b/dotnetv4/Bedrock/Actions/HelloBedrock.cs
@@ -0,0 +1,74 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+// snippet-start:[Bedrock.dotnetv4.BedrockActions.HelloBedrock]
+using Amazon;
+using Amazon.Bedrock;
+using Amazon.Bedrock.Model;
+
+namespace BedrockActions;
+
+///
+/// This example shows how to list foundation models.
+///
+internal class HelloBedrock
+{
+ ///
+ /// Main method to call the ListFoundationModelsAsync method.
+ ///
+ /// The command line arguments.
+ static async Task Main(string[] args)
+ {
+ // Specify a region endpoint where Amazon Bedrock is available. For a list of supported region see https://docs.aws.amazon.com/bedrock/latest/userguide/what-is-bedrock.html#bedrock-regions
+ AmazonBedrockClient bedrockClient = new(RegionEndpoint.USWest2);
+
+ await ListFoundationModelsAsync(bedrockClient);
+
+ }
+
+ // snippet-start:[Bedrock.dotnetv4.BedrockActions.ListFoundationModels]
+
+ ///
+ /// List foundation models.
+ ///
+ /// The Amazon Bedrock client.
+ private static async Task ListFoundationModelsAsync(AmazonBedrockClient bedrockClient)
+ {
+ Console.WriteLine("List foundation models with no filter.");
+
+ try
+ {
+ var response = await bedrockClient.ListFoundationModelsAsync(new ListFoundationModelsRequest()
+ {
+ });
+
+ if (response?.HttpStatusCode == System.Net.HttpStatusCode.OK)
+ {
+ foreach (var fm in response.ModelSummaries)
+ {
+ WriteToConsole(fm);
+ }
+ }
+ else
+ {
+ Console.WriteLine("Something wrong happened");
+ }
+ }
+ catch (AmazonBedrockException e)
+ {
+ Console.WriteLine(e.Message);
+ }
+ }
+
+ // snippet-end:[Bedrock.dotnetv4.BedrockActions.ListFoundationModels]
+
+ ///
+ /// Write the foundation model summary to console.
+ ///
+ /// The foundation model summary to write to console.
+ private static void WriteToConsole(FoundationModelSummary foundationModel)
+ {
+ Console.WriteLine($"{foundationModel.ModelId}, Customization: {string.Join(", ", foundationModel.CustomizationsSupported)}, Stream: {foundationModel.ResponseStreamingSupported}, Input: {string.Join(", ", foundationModel.InputModalities)}, Output: {string.Join(", ", foundationModel.OutputModalities)}");
+ }
+}
+// snippet-end:[Bedrock.dotnetv4.BedrockActions.HelloBedrock]
\ No newline at end of file
diff --git a/dotnetv4/Bedrock/BedrockExamples.sln b/dotnetv4/Bedrock/BedrockExamples.sln
new file mode 100644
index 00000000000..0c01c36e80a
--- /dev/null
+++ b/dotnetv4/Bedrock/BedrockExamples.sln
@@ -0,0 +1,39 @@
+
+Microsoft Visual Studio Solution File, Format Version 12.00
+# Visual Studio Version 17
+VisualStudioVersion = 17.8.34309.116
+MinimumVisualStudioVersion = 10.0.40219.1
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "BedrockActions", "Actions\BedrockActions.csproj", "{C47E3B3E-0040-4CB6-AB92-EF4395C1EB83}"
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Actions", "Actions", "{0DD1E95E-9EF2-4E43-86B3-F636736BE054}"
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Tests", "Tests", "{898AFE57-24C6-4D79-81C2-614873B38F62}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "BedrockTests", "Tests\BedrockTests.csproj", "{5486426B-A8E8-4C6A-BEE2-83DD7CDB68A6}"
+EndProject
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|Any CPU = Debug|Any CPU
+ Release|Any CPU = Release|Any CPU
+ EndGlobalSection
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+ {C47E3B3E-0040-4CB6-AB92-EF4395C1EB83}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {C47E3B3E-0040-4CB6-AB92-EF4395C1EB83}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {C47E3B3E-0040-4CB6-AB92-EF4395C1EB83}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {C47E3B3E-0040-4CB6-AB92-EF4395C1EB83}.Release|Any CPU.Build.0 = Release|Any CPU
+ {5486426B-A8E8-4C6A-BEE2-83DD7CDB68A6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {5486426B-A8E8-4C6A-BEE2-83DD7CDB68A6}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {5486426B-A8E8-4C6A-BEE2-83DD7CDB68A6}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {5486426B-A8E8-4C6A-BEE2-83DD7CDB68A6}.Release|Any CPU.Build.0 = Release|Any CPU
+ EndGlobalSection
+ GlobalSection(SolutionProperties) = preSolution
+ HideSolutionNode = FALSE
+ EndGlobalSection
+ GlobalSection(NestedProjects) = preSolution
+ {C47E3B3E-0040-4CB6-AB92-EF4395C1EB83} = {0DD1E95E-9EF2-4E43-86B3-F636736BE054}
+ {5486426B-A8E8-4C6A-BEE2-83DD7CDB68A6} = {898AFE57-24C6-4D79-81C2-614873B38F62}
+ EndGlobalSection
+ GlobalSection(ExtensibilityGlobals) = postSolution
+ SolutionGuid = {3D82D9F6-BE3C-40F1-9224-B8E4D746FC2E}
+ EndGlobalSection
+EndGlobal
diff --git a/dotnetv4/Bedrock/README.md b/dotnetv4/Bedrock/README.md
new file mode 100644
index 00000000000..3a495548dfd
--- /dev/null
+++ b/dotnetv4/Bedrock/README.md
@@ -0,0 +1,104 @@
+# Amazon Bedrock code examples for the SDK for .NET
+
+## Overview
+
+Shows how to use the AWS SDK for .NET to work with Amazon Bedrock.
+
+
+
+
+_Amazon Bedrock enables you to build and scale generative AI applications with foundation models._
+
+## ⚠ Important
+
+* Running this code might result in charges to your AWS account. For more details, see [AWS Pricing](https://aws.amazon.com/pricing/) and [Free Tier](https://aws.amazon.com/free/).
+* Running the tests might result in charges to your AWS account.
+* We recommend that you grant your code least privilege. At most, grant only the minimum permissions required to perform the task. For more information, see [Grant least privilege](https://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html#grant-least-privilege).
+* This code is not tested in every AWS Region. For more information, see [AWS Regional Services](https://aws.amazon.com/about-aws/global-infrastructure/regional-product-services).
+
+
+
+
+## Code examples
+
+### Prerequisites
+
+For prerequisites, see the [README](../README.md#Prerequisites) in the `dotnetv4` folder.
+
+
+
+
+
+### Get started
+
+- [Hello Amazon Bedrock](Actions/HelloBedrock.cs#L4) (`ListFoundationModels`)
+
+
+### Single actions
+
+Code excerpts that show you how to call individual service functions.
+
+- [ListFoundationModels](Actions/HelloBedrock.cs#L29)
+
+
+
+
+
+## Run the examples
+
+### Instructions
+
+For general instructions to run the examples, see the
+[README](../README.md#building-and-running-the-code-examples) in the `dotnetv4` folder.
+
+Some projects might include a settings.json file. Before compiling the project,
+you can change these values to match your own account and resources. Alternatively,
+add a settings.local.json file with your local settings, which will be loaded automatically
+when the application runs.
+
+After the example compiles, you can run it from the command line. To do so, navigate to
+the folder that contains the .csproj file and run the following command:
+
+```
+dotnet run
+```
+
+Alternatively, you can run the example from within your IDE.
+
+
+
+
+
+#### Hello Amazon Bedrock
+
+This example shows you how to get started using Amazon Bedrock.
+
+
+
+### Tests
+
+⚠ Running tests might result in charges to your AWS account.
+
+
+To find instructions for running these tests, see the [README](../README.md#Tests)
+in the `dotnetv4` folder.
+
+
+
+
+
+
+## Additional resources
+
+- [Amazon Bedrock User Guide](https://docs.aws.amazon.com/bedrock/latest/userguide/what-is-bedrock.html)
+- [Amazon Bedrock API Reference](https://docs.aws.amazon.com/bedrock/latest/APIReference/welcome.html)
+- [SDK for .NET Amazon Bedrock reference](https://docs.aws.amazon.com/sdkfornet/v3/apidocs/items/Bedrock/NBedrock.html)
+
+
+
+
+---
+
+Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+
+SPDX-License-Identifier: Apache-2.0
\ No newline at end of file
diff --git a/dotnetv4/Bedrock/Tests/BedrockTest.cs b/dotnetv4/Bedrock/Tests/BedrockTest.cs
new file mode 100644
index 00000000000..51061dea432
--- /dev/null
+++ b/dotnetv4/Bedrock/Tests/BedrockTest.cs
@@ -0,0 +1,36 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+using Amazon.Bedrock;
+using Amazon.Bedrock.Model;
+
+namespace BedrockTests;
+
+///
+/// Bedrock tests.
+///
+public class BedrockTest
+{
+ private readonly AmazonBedrockClient bedrockClient;
+
+ ///
+ /// Constructor for the test class.
+ ///
+ public BedrockTest()
+ {
+ bedrockClient = new AmazonBedrockClient(Amazon.RegionEndpoint.USEast1);
+ }
+
+ ///
+ /// List foundation models. Should not be null.
+ ///
+ /// Async task.
+ [Fact]
+ [Trait("Category", "Integration")]
+ [Trait("Category", "Weathertop")]
+ public async Task ListFoundationModelsAsync_ShouldNotBeNull()
+ {
+ var result = await bedrockClient.ListFoundationModelsAsync(new ListFoundationModelsRequest());
+ Assert.NotEmpty(result.ModelSummaries);
+ }
+}
\ No newline at end of file
diff --git a/dotnetv4/Bedrock/Tests/BedrockTests.csproj b/dotnetv4/Bedrock/Tests/BedrockTests.csproj
new file mode 100644
index 00000000000..f0acb631a57
--- /dev/null
+++ b/dotnetv4/Bedrock/Tests/BedrockTests.csproj
@@ -0,0 +1,24 @@
+
+
+
+ net8.0
+ enable
+ enable
+
+ false
+ true
+
+
+
+
+
+
+
+
+
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+ all
+
+
+
+
diff --git a/dotnetv4/Bedrock/Tests/GlobalUsings.cs b/dotnetv4/Bedrock/Tests/GlobalUsings.cs
new file mode 100644
index 00000000000..0f64a5599c7
--- /dev/null
+++ b/dotnetv4/Bedrock/Tests/GlobalUsings.cs
@@ -0,0 +1,7 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+global using Xunit;
+
+// Optional.
+[assembly: CollectionBehavior(DisableTestParallelization = true)]
\ No newline at end of file
diff --git a/dotnetv4/DotNetV4Examples.sln b/dotnetv4/DotNetV4Examples.sln
index b6653665d13..a41933d217f 100644
--- a/dotnetv4/DotNetV4Examples.sln
+++ b/dotnetv4/DotNetV4Examples.sln
@@ -21,6 +21,80 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoScalingBasics", "AutoSc
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoScalingActions", "AutoScaling\Actions\AutoScalingActions.csproj", "{18B07F62-06CC-4562-BB86-2C072758B90F}"
EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Bedrock", "Bedrock", "{1CB557AF-EA75-4505-B50F-FF3AC2A5A1E9}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "BedrockTests", "Bedrock\Tests\BedrockTests.csproj", "{F98B4D67-5A92-4D66-9DAA-8334D65E23B1}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "BedrockActions", "Bedrock\Actions\BedrockActions.csproj", "{C1A6A3FD-5ADD-4489-92E3-D888F256B74A}"
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Bedrock-runtime", "Bedrock-runtime", "{D859B39C-9106-4D3D-8C57-11B15FA8106B}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "BedrockRuntimeTests", "Bedrock-runtime\Tests\BedrockRuntimeTests.csproj", "{F8B5BC77-F8BF-45E8-8E12-7E197F925772}"
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Models", "Models", "{4429C078-35C8-4E2B-9C7B-F0C619741B67}"
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Mistral", "Mistral", "{A9F26F1C-9B8C-4D1B-82FD-35460C47CA2C}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "InvokeModelWithResponseStream", "Bedrock-runtime\Models\Mistral\InvokeModelWithResponseStream\InvokeModelWithResponseStream.csproj", "{37CACA7D-D3BE-42AF-A8C2-639E16C03BC4}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "InvokeModel", "Bedrock-runtime\Models\Mistral\InvokeModel\InvokeModel.csproj", "{7B624438-4340-4333-B2F6-2ADA7A93006C}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ConverseStream", "Bedrock-runtime\Models\Mistral\ConverseStream\ConverseStream.csproj", "{F60B3806-CC22-470E-80EE-2E480912CE4D}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Converse", "Bedrock-runtime\Models\Mistral\Converse\Converse.csproj", "{E264ABD1-EDC9-4E8E-B828-9CA239792051}"
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "MetaLlama", "MetaLlama", "{AEED729A-51EE-4238-8BF6-751D16A7A755}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Llama3_InvokeModelWithResponseStream", "Bedrock-runtime\Models\MetaLlama\Llama3_InvokeModelWithResponseStream\Llama3_InvokeModelWithResponseStream.csproj", "{44E0145A-684C-466D-8258-171AF9751D95}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Llama3_InvokeModel", "Bedrock-runtime\Models\MetaLlama\Llama3_InvokeModel\Llama3_InvokeModel.csproj", "{112C5C1D-F0A6-4068-A9EB-6047CA1F5CDF}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ConverseStream", "Bedrock-runtime\Models\MetaLlama\ConverseStream\ConverseStream.csproj", "{51F052FC-2DCB-48AF-A4D3-5C42C8C5F713}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Converse", "Bedrock-runtime\Models\MetaLlama\Converse\Converse.csproj", "{A350D217-DC32-4537-8A9C-167B560CAF75}"
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "CohereCommand", "CohereCommand", "{39EAAA32-53A8-4641-873C-976FD5963360}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ConverseStream", "Bedrock-runtime\Models\CohereCommand\ConverseStream\ConverseStream.csproj", "{9A433C22-4811-4AD9-99C1-3DF85D9FB54B}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Converse", "Bedrock-runtime\Models\CohereCommand\Converse\Converse.csproj", "{81EA8494-176C-4178-A1C3-6FA3B1222B74}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Command_R_InvokeModelWithResponseStream", "Bedrock-runtime\Models\CohereCommand\Command_R_InvokeModelWithResponseStream\Command_R_InvokeModelWithResponseStream.csproj", "{085F3A30-A788-48D6-8067-74D71C29A941}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Command_R_InvokeModel", "Bedrock-runtime\Models\CohereCommand\Command_R_InvokeModel\Command_R_InvokeModel.csproj", "{6FCC8A6C-A172-4AAF-A0FC-66C3BD9E8716}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Command_InvokeModelWithResponseStream", "Bedrock-runtime\Models\CohereCommand\Command_InvokeModelWithResponseStream\Command_InvokeModelWithResponseStream.csproj", "{6E0B8FF0-0D03-4424-86D5-CA01437C6814}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Command_InvokeModel", "Bedrock-runtime\Models\CohereCommand\Command_InvokeModel\Command_InvokeModel.csproj", "{5C91FECD-E8B6-4659-8691-60CA676E8F68}"
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "AnthropicClaude", "AnthropicClaude", "{6FF2EDB6-D1B8-4EE0-B1F0-2BCE66972E39}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "InvokeModelWithResponseStream", "Bedrock-runtime\Models\AnthropicClaude\InvokeModelWithResponseStream\InvokeModelWithResponseStream.csproj", "{345DA0D1-C762-49EF-9953-6F4D57CB7FC7}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "InvokeModel", "Bedrock-runtime\Models\AnthropicClaude\InvokeModel\InvokeModel.csproj", "{C95689B5-C0A1-4C1F-9E97-369D3D397930}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ConverseStream", "Bedrock-runtime\Models\AnthropicClaude\ConverseStream\ConverseStream.csproj", "{8551C158-60B4-4594-8B1D-5BE851F90EE4}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Converse", "Bedrock-runtime\Models\AnthropicClaude\Converse\Converse.csproj", "{874C7405-ED8D-477D-9362-0C69CF56F213}"
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "AmazonTitanText", "AmazonTitanText", "{74979310-8A92-47DC-B5CA-EFA7970E1202}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "InvokeModelWithResponseStream", "Bedrock-runtime\Models\AmazonTitanText\InvokeModelWithResponseStream\InvokeModelWithResponseStream.csproj", "{7FA90AFA-ED17-43CD-94EF-314B43095C10}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "InvokeModel", "Bedrock-runtime\Models\AmazonTitanText\InvokeModel\InvokeModel.csproj", "{18E636A2-A383-487C-AB19-B133B50173F2}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ConverseStream", "Bedrock-runtime\Models\AmazonTitanText\ConverseStream\ConverseStream.csproj", "{AD2DCA34-3627-43EC-BEE7-7D1104FC521A}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Converse", "Bedrock-runtime\Models\AmazonTitanText\Converse\Converse.csproj", "{3EA8A897-A32D-42B6-B87E-CE269E4597D5}"
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Ai21LabsJurassic2", "Ai21LabsJurassic2", "{017F0D68-919C-41EF-9E33-087D91BA55CE}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "InvokeModel", "Bedrock-runtime\Models\Ai21LabsJurassic2\InvokeModel\InvokeModel.csproj", "{48041B99-B3B8-4970-B9AA-AB2591EA5E55}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Converse", "Bedrock-runtime\Models\Ai21LabsJurassic2\Converse\Converse.csproj", "{8D8B72F0-E17E-4A85-93B1-D035E5B81A33}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "BedrockRuntimeActions", "Bedrock-runtime\Actions\BedrockRuntimeActions.csproj", "{05E93A3E-CFA0-4980-8EE5-CD25C7ED766D}"
+EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -51,6 +125,118 @@ Global
{18B07F62-06CC-4562-BB86-2C072758B90F}.Debug|Any CPU.Build.0 = Debug|Any CPU
{18B07F62-06CC-4562-BB86-2C072758B90F}.Release|Any CPU.ActiveCfg = Release|Any CPU
{18B07F62-06CC-4562-BB86-2C072758B90F}.Release|Any CPU.Build.0 = Release|Any CPU
+ {F98B4D67-5A92-4D66-9DAA-8334D65E23B1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {F98B4D67-5A92-4D66-9DAA-8334D65E23B1}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {F98B4D67-5A92-4D66-9DAA-8334D65E23B1}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {F98B4D67-5A92-4D66-9DAA-8334D65E23B1}.Release|Any CPU.Build.0 = Release|Any CPU
+ {C1A6A3FD-5ADD-4489-92E3-D888F256B74A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {C1A6A3FD-5ADD-4489-92E3-D888F256B74A}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {C1A6A3FD-5ADD-4489-92E3-D888F256B74A}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {C1A6A3FD-5ADD-4489-92E3-D888F256B74A}.Release|Any CPU.Build.0 = Release|Any CPU
+ {F8B5BC77-F8BF-45E8-8E12-7E197F925772}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {F8B5BC77-F8BF-45E8-8E12-7E197F925772}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {F8B5BC77-F8BF-45E8-8E12-7E197F925772}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {F8B5BC77-F8BF-45E8-8E12-7E197F925772}.Release|Any CPU.Build.0 = Release|Any CPU
+ {37CACA7D-D3BE-42AF-A8C2-639E16C03BC4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {37CACA7D-D3BE-42AF-A8C2-639E16C03BC4}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {37CACA7D-D3BE-42AF-A8C2-639E16C03BC4}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {37CACA7D-D3BE-42AF-A8C2-639E16C03BC4}.Release|Any CPU.Build.0 = Release|Any CPU
+ {7B624438-4340-4333-B2F6-2ADA7A93006C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {7B624438-4340-4333-B2F6-2ADA7A93006C}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {7B624438-4340-4333-B2F6-2ADA7A93006C}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {7B624438-4340-4333-B2F6-2ADA7A93006C}.Release|Any CPU.Build.0 = Release|Any CPU
+ {F60B3806-CC22-470E-80EE-2E480912CE4D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {F60B3806-CC22-470E-80EE-2E480912CE4D}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {F60B3806-CC22-470E-80EE-2E480912CE4D}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {F60B3806-CC22-470E-80EE-2E480912CE4D}.Release|Any CPU.Build.0 = Release|Any CPU
+ {E264ABD1-EDC9-4E8E-B828-9CA239792051}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {E264ABD1-EDC9-4E8E-B828-9CA239792051}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {E264ABD1-EDC9-4E8E-B828-9CA239792051}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {E264ABD1-EDC9-4E8E-B828-9CA239792051}.Release|Any CPU.Build.0 = Release|Any CPU
+ {44E0145A-684C-466D-8258-171AF9751D95}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {44E0145A-684C-466D-8258-171AF9751D95}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {44E0145A-684C-466D-8258-171AF9751D95}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {44E0145A-684C-466D-8258-171AF9751D95}.Release|Any CPU.Build.0 = Release|Any CPU
+ {112C5C1D-F0A6-4068-A9EB-6047CA1F5CDF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {112C5C1D-F0A6-4068-A9EB-6047CA1F5CDF}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {112C5C1D-F0A6-4068-A9EB-6047CA1F5CDF}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {112C5C1D-F0A6-4068-A9EB-6047CA1F5CDF}.Release|Any CPU.Build.0 = Release|Any CPU
+ {51F052FC-2DCB-48AF-A4D3-5C42C8C5F713}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {51F052FC-2DCB-48AF-A4D3-5C42C8C5F713}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {51F052FC-2DCB-48AF-A4D3-5C42C8C5F713}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {51F052FC-2DCB-48AF-A4D3-5C42C8C5F713}.Release|Any CPU.Build.0 = Release|Any CPU
+ {A350D217-DC32-4537-8A9C-167B560CAF75}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {A350D217-DC32-4537-8A9C-167B560CAF75}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {A350D217-DC32-4537-8A9C-167B560CAF75}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {A350D217-DC32-4537-8A9C-167B560CAF75}.Release|Any CPU.Build.0 = Release|Any CPU
+ {9A433C22-4811-4AD9-99C1-3DF85D9FB54B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {9A433C22-4811-4AD9-99C1-3DF85D9FB54B}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {9A433C22-4811-4AD9-99C1-3DF85D9FB54B}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {9A433C22-4811-4AD9-99C1-3DF85D9FB54B}.Release|Any CPU.Build.0 = Release|Any CPU
+ {81EA8494-176C-4178-A1C3-6FA3B1222B74}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {81EA8494-176C-4178-A1C3-6FA3B1222B74}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {81EA8494-176C-4178-A1C3-6FA3B1222B74}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {81EA8494-176C-4178-A1C3-6FA3B1222B74}.Release|Any CPU.Build.0 = Release|Any CPU
+ {085F3A30-A788-48D6-8067-74D71C29A941}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {085F3A30-A788-48D6-8067-74D71C29A941}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {085F3A30-A788-48D6-8067-74D71C29A941}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {085F3A30-A788-48D6-8067-74D71C29A941}.Release|Any CPU.Build.0 = Release|Any CPU
+ {6FCC8A6C-A172-4AAF-A0FC-66C3BD9E8716}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {6FCC8A6C-A172-4AAF-A0FC-66C3BD9E8716}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {6FCC8A6C-A172-4AAF-A0FC-66C3BD9E8716}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {6FCC8A6C-A172-4AAF-A0FC-66C3BD9E8716}.Release|Any CPU.Build.0 = Release|Any CPU
+ {6E0B8FF0-0D03-4424-86D5-CA01437C6814}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {6E0B8FF0-0D03-4424-86D5-CA01437C6814}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {6E0B8FF0-0D03-4424-86D5-CA01437C6814}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {6E0B8FF0-0D03-4424-86D5-CA01437C6814}.Release|Any CPU.Build.0 = Release|Any CPU
+ {5C91FECD-E8B6-4659-8691-60CA676E8F68}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {5C91FECD-E8B6-4659-8691-60CA676E8F68}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {5C91FECD-E8B6-4659-8691-60CA676E8F68}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {5C91FECD-E8B6-4659-8691-60CA676E8F68}.Release|Any CPU.Build.0 = Release|Any CPU
+ {345DA0D1-C762-49EF-9953-6F4D57CB7FC7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {345DA0D1-C762-49EF-9953-6F4D57CB7FC7}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {345DA0D1-C762-49EF-9953-6F4D57CB7FC7}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {345DA0D1-C762-49EF-9953-6F4D57CB7FC7}.Release|Any CPU.Build.0 = Release|Any CPU
+ {C95689B5-C0A1-4C1F-9E97-369D3D397930}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {C95689B5-C0A1-4C1F-9E97-369D3D397930}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {C95689B5-C0A1-4C1F-9E97-369D3D397930}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {C95689B5-C0A1-4C1F-9E97-369D3D397930}.Release|Any CPU.Build.0 = Release|Any CPU
+ {8551C158-60B4-4594-8B1D-5BE851F90EE4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {8551C158-60B4-4594-8B1D-5BE851F90EE4}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {8551C158-60B4-4594-8B1D-5BE851F90EE4}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {8551C158-60B4-4594-8B1D-5BE851F90EE4}.Release|Any CPU.Build.0 = Release|Any CPU
+ {874C7405-ED8D-477D-9362-0C69CF56F213}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {874C7405-ED8D-477D-9362-0C69CF56F213}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {874C7405-ED8D-477D-9362-0C69CF56F213}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {874C7405-ED8D-477D-9362-0C69CF56F213}.Release|Any CPU.Build.0 = Release|Any CPU
+ {7FA90AFA-ED17-43CD-94EF-314B43095C10}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {7FA90AFA-ED17-43CD-94EF-314B43095C10}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {7FA90AFA-ED17-43CD-94EF-314B43095C10}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {7FA90AFA-ED17-43CD-94EF-314B43095C10}.Release|Any CPU.Build.0 = Release|Any CPU
+ {18E636A2-A383-487C-AB19-B133B50173F2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {18E636A2-A383-487C-AB19-B133B50173F2}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {18E636A2-A383-487C-AB19-B133B50173F2}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {18E636A2-A383-487C-AB19-B133B50173F2}.Release|Any CPU.Build.0 = Release|Any CPU
+ {AD2DCA34-3627-43EC-BEE7-7D1104FC521A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {AD2DCA34-3627-43EC-BEE7-7D1104FC521A}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {AD2DCA34-3627-43EC-BEE7-7D1104FC521A}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {AD2DCA34-3627-43EC-BEE7-7D1104FC521A}.Release|Any CPU.Build.0 = Release|Any CPU
+ {3EA8A897-A32D-42B6-B87E-CE269E4597D5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {3EA8A897-A32D-42B6-B87E-CE269E4597D5}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {3EA8A897-A32D-42B6-B87E-CE269E4597D5}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {3EA8A897-A32D-42B6-B87E-CE269E4597D5}.Release|Any CPU.Build.0 = Release|Any CPU
+ {48041B99-B3B8-4970-B9AA-AB2591EA5E55}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {48041B99-B3B8-4970-B9AA-AB2591EA5E55}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {48041B99-B3B8-4970-B9AA-AB2591EA5E55}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {48041B99-B3B8-4970-B9AA-AB2591EA5E55}.Release|Any CPU.Build.0 = Release|Any CPU
+ {8D8B72F0-E17E-4A85-93B1-D035E5B81A33}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {8D8B72F0-E17E-4A85-93B1-D035E5B81A33}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {8D8B72F0-E17E-4A85-93B1-D035E5B81A33}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {8D8B72F0-E17E-4A85-93B1-D035E5B81A33}.Release|Any CPU.Build.0 = Release|Any CPU
+ {05E93A3E-CFA0-4980-8EE5-CD25C7ED766D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {05E93A3E-CFA0-4980-8EE5-CD25C7ED766D}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {05E93A3E-CFA0-4980-8EE5-CD25C7ED766D}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {05E93A3E-CFA0-4980-8EE5-CD25C7ED766D}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
@@ -63,6 +249,41 @@ Global
{A0D54C23-C7ED-45D3-92C8-7B267E499CD1} = {F1E506B7-81E3-4E1E-A81E-B810FE690ADE}
{96B016E8-CDB3-490B-A1BB-6A9008E9E30B} = {A0D54C23-C7ED-45D3-92C8-7B267E499CD1}
{18B07F62-06CC-4562-BB86-2C072758B90F} = {F1E506B7-81E3-4E1E-A81E-B810FE690ADE}
+ {F98B4D67-5A92-4D66-9DAA-8334D65E23B1} = {1CB557AF-EA75-4505-B50F-FF3AC2A5A1E9}
+ {C1A6A3FD-5ADD-4489-92E3-D888F256B74A} = {1CB557AF-EA75-4505-B50F-FF3AC2A5A1E9}
+ {F8B5BC77-F8BF-45E8-8E12-7E197F925772} = {D859B39C-9106-4D3D-8C57-11B15FA8106B}
+ {4429C078-35C8-4E2B-9C7B-F0C619741B67} = {D859B39C-9106-4D3D-8C57-11B15FA8106B}
+ {A9F26F1C-9B8C-4D1B-82FD-35460C47CA2C} = {4429C078-35C8-4E2B-9C7B-F0C619741B67}
+ {37CACA7D-D3BE-42AF-A8C2-639E16C03BC4} = {A9F26F1C-9B8C-4D1B-82FD-35460C47CA2C}
+ {7B624438-4340-4333-B2F6-2ADA7A93006C} = {A9F26F1C-9B8C-4D1B-82FD-35460C47CA2C}
+ {F60B3806-CC22-470E-80EE-2E480912CE4D} = {A9F26F1C-9B8C-4D1B-82FD-35460C47CA2C}
+ {E264ABD1-EDC9-4E8E-B828-9CA239792051} = {A9F26F1C-9B8C-4D1B-82FD-35460C47CA2C}
+ {AEED729A-51EE-4238-8BF6-751D16A7A755} = {4429C078-35C8-4E2B-9C7B-F0C619741B67}
+ {44E0145A-684C-466D-8258-171AF9751D95} = {AEED729A-51EE-4238-8BF6-751D16A7A755}
+ {112C5C1D-F0A6-4068-A9EB-6047CA1F5CDF} = {AEED729A-51EE-4238-8BF6-751D16A7A755}
+ {51F052FC-2DCB-48AF-A4D3-5C42C8C5F713} = {AEED729A-51EE-4238-8BF6-751D16A7A755}
+ {A350D217-DC32-4537-8A9C-167B560CAF75} = {AEED729A-51EE-4238-8BF6-751D16A7A755}
+ {39EAAA32-53A8-4641-873C-976FD5963360} = {4429C078-35C8-4E2B-9C7B-F0C619741B67}
+ {9A433C22-4811-4AD9-99C1-3DF85D9FB54B} = {39EAAA32-53A8-4641-873C-976FD5963360}
+ {81EA8494-176C-4178-A1C3-6FA3B1222B74} = {39EAAA32-53A8-4641-873C-976FD5963360}
+ {085F3A30-A788-48D6-8067-74D71C29A941} = {39EAAA32-53A8-4641-873C-976FD5963360}
+ {6FCC8A6C-A172-4AAF-A0FC-66C3BD9E8716} = {39EAAA32-53A8-4641-873C-976FD5963360}
+ {6E0B8FF0-0D03-4424-86D5-CA01437C6814} = {39EAAA32-53A8-4641-873C-976FD5963360}
+ {5C91FECD-E8B6-4659-8691-60CA676E8F68} = {39EAAA32-53A8-4641-873C-976FD5963360}
+ {6FF2EDB6-D1B8-4EE0-B1F0-2BCE66972E39} = {4429C078-35C8-4E2B-9C7B-F0C619741B67}
+ {345DA0D1-C762-49EF-9953-6F4D57CB7FC7} = {6FF2EDB6-D1B8-4EE0-B1F0-2BCE66972E39}
+ {C95689B5-C0A1-4C1F-9E97-369D3D397930} = {6FF2EDB6-D1B8-4EE0-B1F0-2BCE66972E39}
+ {8551C158-60B4-4594-8B1D-5BE851F90EE4} = {6FF2EDB6-D1B8-4EE0-B1F0-2BCE66972E39}
+ {874C7405-ED8D-477D-9362-0C69CF56F213} = {6FF2EDB6-D1B8-4EE0-B1F0-2BCE66972E39}
+ {74979310-8A92-47DC-B5CA-EFA7970E1202} = {4429C078-35C8-4E2B-9C7B-F0C619741B67}
+ {7FA90AFA-ED17-43CD-94EF-314B43095C10} = {74979310-8A92-47DC-B5CA-EFA7970E1202}
+ {18E636A2-A383-487C-AB19-B133B50173F2} = {74979310-8A92-47DC-B5CA-EFA7970E1202}
+ {AD2DCA34-3627-43EC-BEE7-7D1104FC521A} = {74979310-8A92-47DC-B5CA-EFA7970E1202}
+ {3EA8A897-A32D-42B6-B87E-CE269E4597D5} = {74979310-8A92-47DC-B5CA-EFA7970E1202}
+ {017F0D68-919C-41EF-9E33-087D91BA55CE} = {4429C078-35C8-4E2B-9C7B-F0C619741B67}
+ {48041B99-B3B8-4970-B9AA-AB2591EA5E55} = {017F0D68-919C-41EF-9E33-087D91BA55CE}
+ {8D8B72F0-E17E-4A85-93B1-D035E5B81A33} = {017F0D68-919C-41EF-9E33-087D91BA55CE}
+ {05E93A3E-CFA0-4980-8EE5-CD25C7ED766D} = {D859B39C-9106-4D3D-8C57-11B15FA8106B}
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {08502818-E8E1-4A91-A51C-4C8C8D4FF9CA}