Skip to content

.Net: Bug: VertexAIGeminiChatCompletion 400 bad request when model makes multiple function calls in one message #12527

Open
@Klok-e

Description

@Klok-e

Describe the bug
When the model makes multiple function calls in one message, the gemini API returns the following:

{
  "error" : {
    "code" : 400,
    "message" : "Please ensure that the number of function response parts is equal to the number of function call parts of the function call turn.",
    "status" : "INVALID_ARGUMENT"
  }
}

The request looks like this:

{
  "contents" : [ {
    "parts" : [ {
      "text" : "Using File Search find information on semantic kernel. Invoke the function exactly two times."
    } ],
    "role" : "user"
  }, {
    "parts" : [ {
      "functionCall" : {
        "name" : "FileSearch_GetTextSearchResults",
        "args" : {
          "query" : "semantic kernel"
        }
      }
    }, {
      "functionCall" : {
        "name" : "FileSearch_GetTextSearchResults",
        "args" : {
          "count" : 1,
          "query" : "semantic kernel"
        }
      }
    } ],
    "role" : "model"
  }, {
    "parts" : [ {
      "functionResponse" : {
        "name" : "FileSearch_GetTextSearchResults",
        "response" : {
          "content" : [ {
            "Name" : "0",
            "Link" : "file3.txt",
            "Value" : "Semantic kernel stuff 4"
          } ]
        }
      }
    } ],
    "role" : "function"
  }, {
    "parts" : [ {
      "functionResponse" : {
        "name" : "FileSearch_GetTextSearchResults",
        "response" : {
          "content" : [ {
            "Name" : "0",
            "Link" : "file3.txt",
            "Value" : "Semantic kernel stuff 4"
          } ]
        }
      }
    } ],
    "role" : "function"
  } ],
  "generationConfig" : { },
  "tools" : [ {
    "functionDeclarations" : [ {
      "name" : "FileSearch_GetTextSearchResults",
      "description" : "Perform a search for content related to the specified query. The search will return the name, value and link for the related content.",
      "parameters" : {
        "type" : "object",
        "required" : [ "query" ],
        "properties" : {
          "query" : {
            "description" : "What to search for",
            "type" : "string"
          },
          "count" : {
            "description" : "Number of results (default value: 2)",
            "type" : "integer"
          },
          "skip" : {
            "description" : "Number of results to skip (default value: 0)",
            "type" : "integer"
          }
        }
      }
    } ]
  } ]
}

To Reproduce

#pragma warning disable SKEXP0070
var builder = Kernel.CreateBuilder();

builder = builder
    .AddVertexAIGeminiChatCompletion(
        modelId: options.ModelName,
        bearerKey: accessToken,
        location: options.Location,
        projectId: options.ProjectId
    )
    .AddVertexAIEmbeddingGenerator(
        modelId: options.EmbeddingModel,
        bearerKey: accessToken,
        location: options.Location,
        projectId: options.ProjectId
    )
    .AddVectorStoreTextSearch<TextEmbeddingDataModel>();
#pragma warning restore SKEXP0070

var vectorStore = new InMemoryVectorStore();
        
builder.Services.AddScoped<IEmbeddingGenerator>(x => x.GetRequiredService<IEmbeddingGenerator<string, Embedding<float>>>());
builder.Services.AddSingleton(vectorStore);

builder.Services.AddSingleton<IVectorSearchable<TextEmbeddingDataModel>>(services =>
    vectorStore.GetCollection<string, TextEmbeddingDataModel>(
        "1", new VectorStoreCollectionDefinition { EmbeddingGenerator = services.GetRequiredService<IEmbeddingGenerator<string, Embedding<float>>>(), })
);

var kernel = builder.Build();

var collection = vectorStore.GetCollection<string, TextEmbeddingDataModel>("1", new VectorStoreCollectionDefinition()
{
    EmbeddingGenerator = kernel.GetRequiredService<IEmbeddingGenerator<string, Embedding<float>>>(),
});
await collection.EnsureCollectionExistsAsync();

List<(string, string)> documents =
[
    ("Semantic kernel stuff 1", "file1.txt"),
    ("Semantic kernel stuff 2", "file1.txt"),
    ("Semantic kernel stuff 3", "file2.txt"),
    ("Semantic kernel stuff 4", "file3.txt"),
];

await collection.UpsertAsync(documents.Select((textChunk, i) => new TextEmbeddingDataModel
{
    Key = i.ToString(),
    Text = (textChunk.Item1),
    FileName = textChunk.Item2,
}));

#pragma warning disable SKEXP0001
var textSearch = kernel.Services.GetRequiredService<VectorStoreTextSearch<TextEmbeddingDataModel>>();
#pragma warning restore SKEXP0001

kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions(
        "FileSearch",
        "Can search in documents",
        [textSearch.CreateGetTextSearchResults(searchOptions: new TextSearchOptions { Top = 3 }),]
    )
);

#pragma warning disable SKEXP0070

GeminiPromptExecutionSettings settings = new()
{
    ToolCallBehavior = GeminiToolCallBehavior.AutoInvokeKernelFunctions,
};
#pragma warning restore SKEXP0070

var chatCompletionService = kernel.GetRequiredService<IChatCompletionService>();

var history = new ChatHistory();

history.AddUserMessage("Using File Search find information on semantic kernel. Invoke the function exactly two times.");

var result = await chatCompletionService.GetChatMessageContentsAsync(history, settings, kernel);
Console.WriteLine(result);

Expected behavior
Gemini answers the question.

Platform

  • Language: C#
  • Source: Microsoft.SemanticKernel 1.57.0
  • AI model: gemini-2.5-flash-preview-05-20
  • OS: Windows

Metadata

Metadata

Assignees

Labels

.NETIssue or Pull requests regarding .NET codebugSomething isn't working

Type

Projects

Status

Bug

Milestone

No milestone

Relationships

None yet

Development

No branches or pull requests

Issue actions