Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Bug]: Completion response is wrong when using Tools in Bedrock's Claude 3 streaming #4091

Closed
iwamot opened this issue Jun 9, 2024 · 7 comments · Fixed by #4106
Closed
Assignees
Labels
bug Something isn't working

Comments

@iwamot
Copy link
Contributor

iwamot commented Jun 9, 2024

What happened?

Thank you so much for supporting the Converse API by #4033.

However, this may cause the following problems:

  1. if stream=True, no tool_calls are included.
  2. for Opus, content returns the thought content in the <thinking> tag.

non-streaming.py

import os
from litellm import completion

tools = [
    {
        "type": "function",
        "function": {
            "name": "get_current_weather",
            "description": "Get the current weather in a given location",
            "parameters": {
                "type": "object",
                "properties": {
                    "location": {
                        "type": "string",
                        "description": "The city and state, e.g. San Francisco, CA",
                    },
                    "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]},
                },
                "required": ["location"],
            },
        },
    }
]
messages = [{"role": "user", "content": "What's the weather like in Boston today?"}]

models = [
    "bedrock/anthropic.claude-3-sonnet-20240229-v1:0",
    "bedrock/anthropic.claude-3-haiku-20240307-v1:0",
    "bedrock/anthropic.claude-3-opus-20240229-v1:0",
]

for model in models:
    response = completion(
        model=model,
        messages=messages,
        tools=tools,
        tool_choice="auto",
    )
    print(response)
    print()

output

ModelResponse(id='chatcmpl-fe15c747-3cfd-4f2a-b0cc-1d66aafc3452', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content="Okay, let's get the current weather for Boston using the available tool:", role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{"location": "Boston, MA", "unit": "fahrenheit"}', name='get_current_weather'), id='tooluse_v4bh3bNHRa-_aJKbW2N_2A', type='function')]))], created=1717935949, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion', system_fingerprint=None, usage=Usage(prompt_tokens=271, completion_tokens=92, total_tokens=363))

ModelResponse(id='chatcmpl-333b9501-78b4-420d-af2a-505ed55a7813', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content='', role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{"location": "Boston, MA", "unit": "fahrenheit"}', name='get_current_weather'), id='tooluse_AcEepAtWTL2Q-nFGzIZAjg', type='function')]))], created=1717935950, model='anthropic.claude-3-haiku-20240307-v1:0', object='chat.completion', system_fingerprint=None, usage=Usage(prompt_tokens=376, completion_tokens=75, total_tokens=451))

ModelResponse(id='chatcmpl-bc707c5f-30b9-4934-91b2-37a8aa357dd9', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content='<thinking>\nTo answer the question about the weather in Boston today, the relevant tool is get_current_weather. \n\nThe get_current_weather tool has the following parameters:\n- location (required): The user provided "Boston" for the location, so we have this value.\n- unit (optional): The user did not specify a unit preference between Celsius and Fahrenheit. Since this is an optional parameter, we don\'t need to ask the user for this and can use the default.\n\nWe have the required location parameter, so we can proceed with calling the get_current_weather tool to get the information needed to answer the question.\n</thinking>', role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{"location": "Boston, MA"}', name='get_current_weather'), id='tooluse_wltwQDBMRvqky6YLNS0ixw', type='function')]))], created=1717935963, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion', system_fingerprint=None, usage=Usage(prompt_tokens=645, completion_tokens=196, total_tokens=841))

streaming.py

import os
from litellm import completion, stream_chunk_builder

tools = [
    {
        "type": "function",
        "function": {
            "name": "get_current_weather",
            "description": "Get the current weather in a given location",
            "parameters": {
                "type": "object",
                "properties": {
                    "location": {
                        "type": "string",
                        "description": "The city and state, e.g. San Francisco, CA",
                    },
                    "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]},
                },
                "required": ["location"],
            },
        },
    }
]
messages = [{"role": "user", "content": "What's the weather like in Boston today?"}]

models = [
    "bedrock/anthropic.claude-3-sonnet-20240229-v1:0",
    "bedrock/anthropic.claude-3-haiku-20240307-v1:0",
    "bedrock/anthropic.claude-3-opus-20240229-v1:0",
]

for model in models:
    response = completion(
        model=model,
        messages=messages,
        tools=tools,
        tool_choice="auto",
        stream=True,
    )
    chunks = []
    for chunk in response:
        chunks.append(chunk)
    print(stream_chunk_builder(chunks, messages=messages))
    print()

output

ModelResponse(id='chatcmpl-52b2929c-8058-4eae-a43a-525236233e3a', choices=[Choices(finish_reason='stop', index=0, message=Message(content="Okay, let's get the current weather for Boston.", role='assistant'))], created=1717936147, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion', system_fingerprint=None, usage=Usage(completion_tokens=11, prompt_tokens=16, total_tokens=27))

ModelResponse(id='chatcmpl-26ded7f0-d685-4832-9f3c-8ab14cae24c4', choices=[Choices(finish_reason='stop', index=0, message=Message(content='', role='assistant'))], created=1717936149, model='anthropic.claude-3-haiku-20240307-v1:0', object='chat.completion', system_fingerprint=None, usage=Usage(completion_tokens=0, prompt_tokens=16, total_tokens=16))

ModelResponse(id='chatcmpl-10420f15-d3ec-4d0b-bee3-7bcddb6304b8', choices=[Choices(finish_reason='stop', index=0, message=Message(content='<thinking>\nTo answer the question about the current weather in Boston, the get_current_weather tool is relevant and can provide the needed information.\n\nThe tool takes two parameters:\n- location (required): The user directly provided "Boston" for the location, so we have this value.\n- unit (optional): The user did not specify a unit preference between Celsius and Fahrenheit. Since this is an optional parameter, we don\'t need to ask the user for this - the tool can use its default unit.\n\nSince we have a value for the required location parameter, we can proceed with calling the get_current_weather tool without needing any additional information from the user.\n</thinking>', role='assistant'))], created=1717936150, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion', system_fingerprint=None, usage=Usage(completion_tokens=133, prompt_tokens=16, total_tokens=149))

Relevant log output

No response

Twitter / LinkedIn details

@iwamot / https://www.linkedin.com/in/iwamot/

@iwamot iwamot added the bug Something isn't working label Jun 9, 2024
@krrishdholakia krrishdholakia self-assigned this Jun 10, 2024
@krrishdholakia krrishdholakia changed the title [Bug]: Completion response is wrong when using Tools in Bedrock's Claude 3 [Bug]: Completion response is wrong when using Tools in Bedrock's Claude 3 streaming Jun 10, 2024
@krrishdholakia
Copy link
Contributor

picking this up today - this seems critical

@krrishdholakia
Copy link
Contributor

able to repro working on a fix

@iwamot
Copy link
Contributor Author

iwamot commented Jun 11, 2024

@krrishdholakia Thank you for your quick response.

The behavior regarding bedrock/anthropic.claude-3-haiku-20240307-v1:0 seems to have improved.

For the other two models, message still does not contain tool_calls.

ModelResponse(id='chatcmpl-2f7e4f89-c596-4dda-a682-cee230e33452', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content="Okay, let's get the current weather for Boston using the available tool:", role='assistant'))], created=1718095516, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion', system_fingerprint=None, usage=Usage(completion_tokens=15, prompt_tokens=16, total_tokens=31))

ModelResponse(id='chatcmpl-698d4c23-ad2c-481d-a5f1-81456dc49d49', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{"location": "Boston, MA", "unit": "fahrenheit"}', name='get_current_weather'), id='tooluse_WlRC8Eq-Q3mYH3CGKcDNTQ', type='function')]))], created=1718095519, model='anthropic.claude-3-haiku-20240307-v1:0', object='chat.completion', system_fingerprint=None, usage=Usage(completion_tokens=15, prompt_tokens=16, total_tokens=31))

ModelResponse(id='chatcmpl-6c8a8f93-7e5f-4137-b94f-1da49d1cdfd3', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content="<thinking>\nTo answer the question about the weather in Boston today, the relevant tool is get_current_weather. \nThe get_current_weather tool has two parameters:\n- location (required): The user provided the location of Boston in the query.\n- unit (optional): The user did not specify a unit. Since this is an optional parameter, we don't need to ask the user for this and can proceed with the default.\n\nNo other tools are needed. We have the required location parameter, so we can proceed with calling the get_current_weather tool.\n</thinking>", role='assistant'))], created=1718095521, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion', system_fingerprint=None, usage=Usage(completion_tokens=113, prompt_tokens=16, total_tokens=129))

@krrishdholakia
Copy link
Contributor

Okay, let's get the current weather for Boston using the available tool:

@iwamot looks like sonnet responded with string not function call - can you share the raw response? run with litellm.set_verbose=True

@krrishdholakia
Copy link
Contributor

To answer the question about the weather

Opus also seems to be returning text not function call

you can force their behaviour @iwamot by setting tool_choice=required

@iwamot
Copy link
Contributor Author

iwamot commented Jun 13, 2024

@krrishdholakia Thank you for your concern. I pasted the log below. It would be desirable if tool_choice=auto could be specified.

verbose.log
Request to litellm:
litellm.completion(model='bedrock/anthropic.claude-3-sonnet-20240229-v1:0', messages=[{'role': 'user', 'content': "What's the weather like in Boston today?"}], tools=[{'type': 'function', 'function': {'name': 'get_current_weather', 'description': 'Get the current weather in a given location', 'parameters': {'type': 'object', 'properties': {'location': {'type': 'string', 'description': 'The city and state, e.g. San Francisco, CA'}, 'unit': {'type': 'string', 'enum': ['celsius', 'fahrenheit']}}, 'required': ['location']}}}], tool_choice='auto', stream=True)


self.optional_params: {}
SYNC kwargs[caching]: False; litellm.cache: None; kwargs.get('cache')['no-cache']: False
Final returned optional params: {'stream': True, 'tools': [{'type': 'function', 'function': {'name': 'get_current_weather', 'description': 'Get the current weather in a given location', 'parameters': {'type': 'object', 'properties': {'location': {'type': 'string', 'description': 'The city and state, e.g. San Francisco, CA'}, 'unit': {'type': 'string', 'enum': ['celsius', 'fahrenheit']}}, 'required': ['location']}}}], 'tool_choice': {'auto': {}}}
self.optional_params: {'stream': True, 'tools': [{'type': 'function', 'function': {'name': 'get_current_weather', 'description': 'Get the current weather in a given location', 'parameters': {'type': 'object', 'properties': {'location': {'type': 'string', 'description': 'The city and state, e.g. San Francisco, CA'}, 'unit': {'type': 'string', 'enum': ['celsius', 'fahrenheit']}}, 'required': ['location']}}}], 'tool_choice': {'auto': {}}}


POST Request Sent from LiteLLM:
curl -X POST \
https://bedrock-runtime.us-west-2.amazonaws.com/model/anthropic.claude-3-sonnet-20240229-v1:0/converse-stream \
-H 'Content-Type: *****' -H 'X-Amz-Date: *****' -H 'X-Amz-Security-Token: FwoGZXIvYXdzEP///////////wEaDJ1NRyZnjUn0RVrAyiKvArQ2SYsKr5u4LFpK5SxzNvwv/f0NYrQ6j+HHTq21FHQfAPKxvDxU+Y881w4ANAx4EBqkMee4TUEGYAffwoTO81FO8GyMHw9pQQLQcOk4+scVm1fQyng+D748bG0SyVbdd6g0Hki0IzMvpKGIiJDJQhnkHTZN7P4jWDvNhhzbT4WZYohcWfgmlanPEGeXSTqsTGcKyK/0tZucmJ6Jhlk9TnYqejwsDhqqWnjoHVdZyEDQNwcg2d1LeqUSsczYwxiJDBb6I1Cqr7NH+pSoBN7dyabgRwimgZsqNKaX3qLztUc7DFqQzONy0+dluTMKzpWimCoCvAlfPdBGOQG/JTdGX9Fu3dgl2Ycu3glUSjG8ukSNK8fZwa7d4ZCdm9ePGI4/ARdXBHx3Ir+Kz5qC6aeEhyjRiqqzBjIqow1Os0Xyqnqp********************************************' -H 'Authorization: AWS4-HMAC-SHA256 Credential=ASIAUHQFY362MTIKRJRW/20240613/us-west-2/bedrock/aws4_request, SignedHeaders=content-type;host;x-amz-date;x-amz-security-token, Signature=b46fbb8ae4fd772e50cd********************************************' -H 'Content-Length: *****' \
-d '{"messages": [{"role": "user", "content": [{"text": "What's the weather like in Boston today?"}]}], "additionalModelRequestFields": {}, "system": [], "inferenceConfig": {}, "toolConfig": {"tools": [{"toolSpec": {"inputSchema": {"json": {"type": "object", "properties": {"location": {"type": "string", "description": "The city and state, e.g. San Francisco, CA"}, "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}}, "required": ["location"]}}, "name": "get_current_weather", "description": "Get the current weather in a given location"}}], "toolChoice": {"auto": {}}}}'


RAW RESPONSE:
first stream response received


PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0, 'tool_use': None}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '', 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0, 'tool_use': None}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ''}
self.sent_first_chunk: False
PROCESSED CHUNK POST CHUNK CREATOR: None
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': 'Okay', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': 'Okay', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': 'Okay'}
self.sent_first_chunk: False
hold - False, model_response_str - Okay
returning model_response: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='Okay', role='assistant', function_call=None, tool_calls=None), logprobs=None)], created=1718258003, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='Okay', role='assistant', function_call=None, tool_calls=None), logprobs=None)], created=1718258003, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ',', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ',', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ','}
self.sent_first_chunk: True
hold - False, model_response_str - ,
returning model_response: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=',', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258003, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=',', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258003, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' let', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' let', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' let'}
self.sent_first_chunk: True
hold - False, model_response_str -  let
returning model_response: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' let', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258003, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' let', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258003, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': "'s", 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': "'s", 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': "'s"}
self.sent_first_chunk: True
hold - False, model_response_str - 's
returning model_response: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content="'s", role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258003, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content="'s", role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258003, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' get', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' get', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' get'}
self.sent_first_chunk: True
hold - False, model_response_str -  get
returning model_response: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' get', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258003, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' get', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258003, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' the', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' the', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' the'}
self.sent_first_chunk: True
hold - False, model_response_str -  the
returning model_response: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' the', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258003, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' the', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258003, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' current', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' current', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' current'}
self.sent_first_chunk: True
hold - False, model_response_str -  current
returning model_response: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' current', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258004, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' current', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258004, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' weather', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' weather', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' weather'}
self.sent_first_chunk: True
hold - False, model_response_str -  weather
returning model_response: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' weather', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258004, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' weather', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258004, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' for', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' for', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' for'}
self.sent_first_chunk: True
hold - False, model_response_str -  for
returning model_response: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' for', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258004, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' for', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258004, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' Boston', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' Boston', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' Boston'}
self.sent_first_chunk: True
hold - False, model_response_str -  Boston
returning model_response: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' Boston', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258004, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' Boston', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258004, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' using', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' using', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' using'}
self.sent_first_chunk: True
hold - False, model_response_str -  using
returning model_response: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' using', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258004, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' using', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258004, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' the', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' the', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' the'}
self.sent_first_chunk: True
hold - False, model_response_str -  the
returning model_response: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' the', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258004, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' the', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258004, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' available', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' available', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' available'}
self.sent_first_chunk: True
hold - False, model_response_str -  available
returning model_response: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' available', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258006, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' available', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258006, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' tool', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' tool', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' tool'}
self.sent_first_chunk: True
hold - False, model_response_str -  tool
returning model_response: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' tool', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258006, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' tool', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258006, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ':', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ':', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ':'}
self.sent_first_chunk: True
hold - False, model_response_str - :
returning model_response: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=':', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258006, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=':', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258006, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ''}
self.sent_first_chunk: True
PROCESSED CHUNK POST CHUNK CREATOR: None
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': {'id': 'tooluse_MtMfbhBQRVSxDT3jIghCAw', 'type': 'function', 'function': {'name': 'get_current_weather', 'arguments': ''}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '', 'tool_use': {'id': 'tooluse_MtMfbhBQRVSxDT3jIghCAw', 'type': 'function', 'function': {'name': 'get_current_weather', 'arguments': ''}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '', 'tool_calls': [{'id': 'tooluse_MtMfbhBQRVSxDT3jIghCAw', 'type': 'function', 'function': {'name': 'get_current_weather', 'arguments': ''}}]}
self.sent_first_chunk: True
hold - False, model_response_str - 
returning model_response: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id='tooluse_MtMfbhBQRVSxDT3jIghCAw', function=Function(arguments='', name='get_current_weather'), type='function', index=0)]), logprobs=None)], created=1718258006, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id='tooluse_MtMfbhBQRVSxDT3jIghCAw', function=Function(arguments='', name='get_current_weather'), type='function', index=0)]), logprobs=None)], created=1718258006, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': ''}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': ''}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '', 'tool_calls': [{'id': None, 'type': 'function', 'function': {'name': None, 'arguments': ''}}]}
self.sent_first_chunk: True
hold - False, model_response_str - 
returning model_response: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='', name=None), type='function', index=0)]), logprobs=None)], created=1718258006, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='', name=None), type='function', index=0)]), logprobs=None)], created=1718258006, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': '{"'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': '{"'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '', 'tool_calls': [{'id': None, 'type': 'function', 'function': {'name': None, 'arguments': '{"'}}]}
self.sent_first_chunk: True
hold - False, model_response_str - 
returning model_response: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='{"', name=None), type='function', index=0)]), logprobs=None)], created=1718258006, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='{"', name=None), type='function', index=0)]), logprobs=None)], created=1718258006, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 'location"'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 'location"'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '', 'tool_calls': [{'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 'location"'}}]}
self.sent_first_chunk: True
hold - False, model_response_str - 
returning model_response: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='location"', name=None), type='function', index=0)]), logprobs=None)], created=1718258007, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='location"', name=None), type='function', index=0)]), logprobs=None)], created=1718258007, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': ': "Bosto'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': ': "Bosto'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '', 'tool_calls': [{'id': None, 'type': 'function', 'function': {'name': None, 'arguments': ': "Bosto'}}]}
self.sent_first_chunk: True
hold - False, model_response_str - 
returning model_response: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments=': "Bosto', name=None), type='function', index=0)]), logprobs=None)], created=1718258007, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments=': "Bosto', name=None), type='function', index=0)]), logprobs=None)], created=1718258007, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 'n, MA'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 'n, MA'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '', 'tool_calls': [{'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 'n, MA'}}]}
self.sent_first_chunk: True
hold - False, model_response_str - 
returning model_response: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='n, MA', name=None), type='function', index=0)]), logprobs=None)], created=1718258007, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='n, MA', name=None), type='function', index=0)]), logprobs=None)], created=1718258007, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': '"'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': '"'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '', 'tool_calls': [{'id': None, 'type': 'function', 'function': {'name': None, 'arguments': '"'}}]}
self.sent_first_chunk: True
hold - False, model_response_str - 
returning model_response: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='"', name=None), type='function', index=0)]), logprobs=None)], created=1718258007, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='"', name=None), type='function', index=0)]), logprobs=None)], created=1718258007, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': ', '}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': ', '}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '', 'tool_calls': [{'id': None, 'type': 'function', 'function': {'name': None, 'arguments': ', '}}]}
self.sent_first_chunk: True
hold - False, model_response_str - 
returning model_response: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments=', ', name=None), type='function', index=0)]), logprobs=None)], created=1718258007, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments=', ', name=None), type='function', index=0)]), logprobs=None)], created=1718258007, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': '"unit": "f'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': '"unit": "f'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '', 'tool_calls': [{'id': None, 'type': 'function', 'function': {'name': None, 'arguments': '"unit": "f'}}]}
self.sent_first_chunk: True
hold - False, model_response_str - 
returning model_response: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='"unit": "f', name=None), type='function', index=0)]), logprobs=None)], created=1718258007, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='"unit": "f', name=None), type='function', index=0)]), logprobs=None)], created=1718258007, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 'ahrenhei'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 'ahrenhei'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '', 'tool_calls': [{'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 'ahrenhei'}}]}
self.sent_first_chunk: True
hold - False, model_response_str - 
returning model_response: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='ahrenhei', name=None), type='function', index=0)]), logprobs=None)], created=1718258007, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='ahrenhei', name=None), type='function', index=0)]), logprobs=None)], created=1718258007, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 't"}'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 't"}'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '', 'tool_calls': [{'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 't"}'}}]}
self.sent_first_chunk: True
hold - False, model_response_str - 
returning model_response: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='t"}', name=None), type='function', index=0)]), logprobs=None)], created=1718258007, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='t"}', name=None), type='function', index=0)]), logprobs=None)], created=1718258007, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ''}
self.sent_first_chunk: True
PROCESSED CHUNK POST CHUNK CREATOR: None
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': None, 'is_finished': True, 'finish_reason': 'tool_calls', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: tool_calls; response_obj={'text': '', 'tool_use': None, 'is_finished': True, 'finish_reason': 'tool_calls', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ''}
self.sent_first_chunk: True
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[StreamingChoices(finish_reason='tool_calls', index=0, delta=Delta(content=None, role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258007, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Goes into checking if chunk has hiddden created at param
Chunks have a created at hidden param
Chunks sorted
token_counter messages received: [{'role': 'user', 'content': "What's the weather like in Boston today?"}]
Token Counter - using generic token counter, for model=anthropic.claude-3-sonnet-20240229-v1:0
LiteLLM: Utils - Counting tokens for OpenAI model=gpt-3.5-turbo
Token Counter - using generic token counter, for model=anthropic.claude-3-sonnet-20240229-v1:0
LiteLLM: Utils - Counting tokens for OpenAI model=gpt-3.5-turbo
Async success callbacks: Got a complete streaming response
Looking up model=anthropic.claude-3-sonnet-20240229-v1:0 in model_cost_map
Success: model=anthropic.claude-3-sonnet-20240229-v1:0 in model_cost_map
prompt_tokens=16; completion_tokens=15
Returned custom cost for model=anthropic.claude-3-sonnet-20240229-v1:0 - prompt_tokens_cost_usd_dollar: 4.8e-05, completion_tokens_cost_usd_dollar: 0.000225
final cost: 0.000273; prompt_tokens_cost_usd_dollar: 4.8e-05; completion_tokens_cost_usd_dollar: 0.000225
Logging Details LiteLLM-Success Call: None
success callbacks: []
Goes into checking if chunk has hiddden created at param
Chunks have a created at hidden param
Chunks sorted
token_counter messages received: [{'role': 'user', 'content': "What's the weather like in Boston today?"}]
Token Counter - using generic token counter, for model=anthropic.claude-3-sonnet-20240229-v1:0
LiteLLM: Utils - Counting tokens for OpenAI model=gpt-3.5-turbo
Token Counter - using generic token counter, for model=anthropic.claude-3-sonnet-20240229-v1:0
LiteLLM: Utils - Counting tokens for OpenAI model=gpt-3.5-turbo
Logging Details LiteLLM-Success Call streaming complete
Looking up model=anthropic.claude-3-sonnet-20240229-v1:0 in model_cost_map
Success: model=anthropic.claude-3-sonnet-20240229-v1:0 in model_cost_map
prompt_tokens=16; completion_tokens=15
Returned custom cost for model=anthropic.claude-3-sonnet-20240229-v1:0 - prompt_tokens_cost_usd_dollar: 4.8e-05, completion_tokens_cost_usd_dollar: 0.000225
final cost: 0.000273; prompt_tokens_cost_usd_dollar: 4.8e-05; completion_tokens_cost_usd_dollar: 0.000225
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': {'inputTokens': 271, 'outputTokens': 77, 'totalTokens': 348}, 'index': 0}; custom_llm_provider: bedrock
Goes into checking if chunk has hiddden created at param
Chunks have a created at hidden param
Chunks sorted
token_counter messages received: [{'role': 'user', 'content': "What's the weather like in Boston today?"}]
Token Counter - using generic token counter, for model=anthropic.claude-3-sonnet-20240229-v1:0
LiteLLM: Utils - Counting tokens for OpenAI model=gpt-3.5-turbo
Token Counter - using generic token counter, for model=anthropic.claude-3-sonnet-20240229-v1:0
LiteLLM: Utils - Counting tokens for OpenAI model=gpt-3.5-turbo
ModelResponse(id='chatcmpl-9e83d138-88fb-4702-90aa-85aa5ac25244', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content="Okay, let's get the current weather for Boston using the available tool:", role='assistant'))], created=1718258003, model='anthropic.claude-3-sonnet-20240229-v1:0', object='chat.completion', system_fingerprint=None, usage=Usage(completion_tokens=15, prompt_tokens=16, total_tokens=31))



Request to litellm:
litellm.completion(model='bedrock/anthropic.claude-3-haiku-20240307-v1:0', messages=[{'role': 'user', 'content': "What's the weather like in Boston today?"}], tools=[{'type': 'function', 'function': {'name': 'get_current_weather', 'description': 'Get the current weather in a given location', 'parameters': {'type': 'object', 'properties': {'location': {'type': 'string', 'description': 'The city and state, e.g. San Francisco, CA'}, 'unit': {'type': 'string', 'enum': ['celsius', 'fahrenheit']}}, 'required': ['location']}}}], tool_choice='auto', stream=True)


self.optional_params: {}
SYNC kwargs[caching]: False; litellm.cache: None; kwargs.get('cache')['no-cache']: False
Final returned optional params: {'stream': True, 'tools': [{'type': 'function', 'function': {'name': 'get_current_weather', 'description': 'Get the current weather in a given location', 'parameters': {'type': 'object', 'properties': {'location': {'type': 'string', 'description': 'The city and state, e.g. San Francisco, CA'}, 'unit': {'type': 'string', 'enum': ['celsius', 'fahrenheit']}}, 'required': ['location']}}}], 'tool_choice': {'auto': {}}}
self.optional_params: {'stream': True, 'tools': [{'type': 'function', 'function': {'name': 'get_current_weather', 'description': 'Get the current weather in a given location', 'parameters': {'type': 'object', 'properties': {'location': {'type': 'string', 'description': 'The city and state, e.g. San Francisco, CA'}, 'unit': {'type': 'string', 'enum': ['celsius', 'fahrenheit']}}, 'required': ['location']}}}], 'tool_choice': {'auto': {}}}


POST Request Sent from LiteLLM:
curl -X POST \
https://bedrock-runtime.us-west-2.amazonaws.com/model/anthropic.claude-3-haiku-20240307-v1:0/converse-stream \
-H 'Content-Type: *****' -H 'X-Amz-Date: *****' -H 'X-Amz-Security-Token: FwoGZXIvYXdzEP///////////wEaDJ1NRyZnjUn0RVrAyiKvArQ2SYsKr5u4LFpK5SxzNvwv/f0NYrQ6j+HHTq21FHQfAPKxvDxU+Y881w4ANAx4EBqkMee4TUEGYAffwoTO81FO8GyMHw9pQQLQcOk4+scVm1fQyng+D748bG0SyVbdd6g0Hki0IzMvpKGIiJDJQhnkHTZN7P4jWDvNhhzbT4WZYohcWfgmlanPEGeXSTqsTGcKyK/0tZucmJ6Jhlk9TnYqejwsDhqqWnjoHVdZyEDQNwcg2d1LeqUSsczYwxiJDBb6I1Cqr7NH+pSoBN7dyabgRwimgZsqNKaX3qLztUc7DFqQzONy0+dluTMKzpWimCoCvAlfPdBGOQG/JTdGX9Fu3dgl2Ycu3glUSjG8ukSNK8fZwa7d4ZCdm9ePGI4/ARdXBHx3Ir+Kz5qC6aeEhyjRiqqzBjIqow1Os0Xyqnqp********************************************' -H 'Authorization: AWS4-HMAC-SHA256 Credential=ASIAUHQFY362MTIKRJRW/20240613/us-west-2/bedrock/aws4_request, SignedHeaders=content-type;host;x-amz-date;x-amz-security-token, Signature=536f4e7d554a009c9af1********************************************' -H 'Content-Length: *****' \
-d '{"messages": [{"role": "user", "content": [{"text": "What's the weather like in Boston today?"}]}], "additionalModelRequestFields": {}, "system": [], "inferenceConfig": {}, "toolConfig": {"tools": [{"toolSpec": {"inputSchema": {"json": {"type": "object", "properties": {"location": {"type": "string", "description": "The city and state, e.g. San Francisco, CA"}, "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}}, "required": ["location"]}}, "name": "get_current_weather", "description": "Get the current weather in a given location"}}], "toolChoice": {"auto": {}}}}'


RAW RESPONSE:
first stream response received


PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0, 'tool_use': None}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '', 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0, 'tool_use': None}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ''}
self.sent_first_chunk: False
PROCESSED CHUNK POST CHUNK CREATOR: None
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': {'id': 'tooluse_QQJ3ZQaaR_iDeFDeltevxg', 'type': 'function', 'function': {'name': 'get_current_weather', 'arguments': ''}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '', 'tool_use': {'id': 'tooluse_QQJ3ZQaaR_iDeFDeltevxg', 'type': 'function', 'function': {'name': 'get_current_weather', 'arguments': ''}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '', 'tool_calls': [{'id': 'tooluse_QQJ3ZQaaR_iDeFDeltevxg', 'type': 'function', 'function': {'name': 'get_current_weather', 'arguments': ''}}]}
self.sent_first_chunk: False
hold - False, model_response_str - 
returning model_response: ModelResponse(id='chatcmpl-0ca028b5-8658-456b-a876-be3ef00c23b6', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role='assistant', function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id='tooluse_QQJ3ZQaaR_iDeFDeltevxg', function=Function(arguments='', name='get_current_weather'), type='function', index=0)]), logprobs=None)], created=1718258008, model='anthropic.claude-3-haiku-20240307-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-0ca028b5-8658-456b-a876-be3ef00c23b6', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role='assistant', function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id='tooluse_QQJ3ZQaaR_iDeFDeltevxg', function=Function(arguments='', name='get_current_weather'), type='function', index=0)]), logprobs=None)], created=1718258008, model='anthropic.claude-3-haiku-20240307-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': ''}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': ''}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '', 'tool_calls': [{'id': None, 'type': 'function', 'function': {'name': None, 'arguments': ''}}]}
self.sent_first_chunk: True
hold - False, model_response_str - 
returning model_response: ModelResponse(id='chatcmpl-0ca028b5-8658-456b-a876-be3ef00c23b6', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='', name=None), type='function', index=0)]), logprobs=None)], created=1718258008, model='anthropic.claude-3-haiku-20240307-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-0ca028b5-8658-456b-a876-be3ef00c23b6', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='', name=None), type='function', index=0)]), logprobs=None)], created=1718258008, model='anthropic.claude-3-haiku-20240307-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': '{"location"'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': '{"location"'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '', 'tool_calls': [{'id': None, 'type': 'function', 'function': {'name': None, 'arguments': '{"location"'}}]}
self.sent_first_chunk: True
hold - False, model_response_str - 
returning model_response: ModelResponse(id='chatcmpl-0ca028b5-8658-456b-a876-be3ef00c23b6', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='{"location"', name=None), type='function', index=0)]), logprobs=None)], created=1718258008, model='anthropic.claude-3-haiku-20240307-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-0ca028b5-8658-456b-a876-be3ef00c23b6', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='{"location"', name=None), type='function', index=0)]), logprobs=None)], created=1718258008, model='anthropic.claude-3-haiku-20240307-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': ': "'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': ': "'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '', 'tool_calls': [{'id': None, 'type': 'function', 'function': {'name': None, 'arguments': ': "'}}]}
self.sent_first_chunk: True
hold - False, model_response_str - 
returning model_response: ModelResponse(id='chatcmpl-0ca028b5-8658-456b-a876-be3ef00c23b6', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments=': "', name=None), type='function', index=0)]), logprobs=None)], created=1718258008, model='anthropic.claude-3-haiku-20240307-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-0ca028b5-8658-456b-a876-be3ef00c23b6', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments=': "', name=None), type='function', index=0)]), logprobs=None)], created=1718258008, model='anthropic.claude-3-haiku-20240307-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 'Bosto'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 'Bosto'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '', 'tool_calls': [{'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 'Bosto'}}]}
self.sent_first_chunk: True
hold - False, model_response_str - 
returning model_response: ModelResponse(id='chatcmpl-0ca028b5-8658-456b-a876-be3ef00c23b6', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='Bosto', name=None), type='function', index=0)]), logprobs=None)], created=1718258008, model='anthropic.claude-3-haiku-20240307-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-0ca028b5-8658-456b-a876-be3ef00c23b6', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='Bosto', name=None), type='function', index=0)]), logprobs=None)], created=1718258008, model='anthropic.claude-3-haiku-20240307-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 'n, MA"'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 'n, MA"'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '', 'tool_calls': [{'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 'n, MA"'}}]}
self.sent_first_chunk: True
hold - False, model_response_str - 
returning model_response: ModelResponse(id='chatcmpl-0ca028b5-8658-456b-a876-be3ef00c23b6', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='n, MA"', name=None), type='function', index=0)]), logprobs=None)], created=1718258008, model='anthropic.claude-3-haiku-20240307-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-0ca028b5-8658-456b-a876-be3ef00c23b6', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='n, MA"', name=None), type='function', index=0)]), logprobs=None)], created=1718258008, model='anthropic.claude-3-haiku-20240307-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': ', "'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': ', "'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '', 'tool_calls': [{'id': None, 'type': 'function', 'function': {'name': None, 'arguments': ', "'}}]}
self.sent_first_chunk: True
hold - False, model_response_str - 
returning model_response: ModelResponse(id='chatcmpl-0ca028b5-8658-456b-a876-be3ef00c23b6', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments=', "', name=None), type='function', index=0)]), logprobs=None)], created=1718258008, model='anthropic.claude-3-haiku-20240307-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-0ca028b5-8658-456b-a876-be3ef00c23b6', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments=', "', name=None), type='function', index=0)]), logprobs=None)], created=1718258008, model='anthropic.claude-3-haiku-20240307-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 'unit": "fa'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 'unit": "fa'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '', 'tool_calls': [{'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 'unit": "fa'}}]}
self.sent_first_chunk: True
hold - False, model_response_str - 
returning model_response: ModelResponse(id='chatcmpl-0ca028b5-8658-456b-a876-be3ef00c23b6', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='unit": "fa', name=None), type='function', index=0)]), logprobs=None)], created=1718258008, model='anthropic.claude-3-haiku-20240307-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-0ca028b5-8658-456b-a876-be3ef00c23b6', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='unit": "fa', name=None), type='function', index=0)]), logprobs=None)], created=1718258008, model='anthropic.claude-3-haiku-20240307-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 'hrenhei'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 'hrenhei'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '', 'tool_calls': [{'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 'hrenhei'}}]}
self.sent_first_chunk: True
hold - False, model_response_str - 
returning model_response: ModelResponse(id='chatcmpl-0ca028b5-8658-456b-a876-be3ef00c23b6', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='hrenhei', name=None), type='function', index=0)]), logprobs=None)], created=1718258008, model='anthropic.claude-3-haiku-20240307-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-0ca028b5-8658-456b-a876-be3ef00c23b6', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='hrenhei', name=None), type='function', index=0)]), logprobs=None)], created=1718258008, model='anthropic.claude-3-haiku-20240307-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 't"}'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 't"}'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '', 'tool_calls': [{'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 't"}'}}]}
self.sent_first_chunk: True
hold - False, model_response_str - 
returning model_response: ModelResponse(id='chatcmpl-0ca028b5-8658-456b-a876-be3ef00c23b6', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='t"}', name=None), type='function', index=0)]), logprobs=None)], created=1718258008, model='anthropic.claude-3-haiku-20240307-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-0ca028b5-8658-456b-a876-be3ef00c23b6', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='t"}', name=None), type='function', index=0)]), logprobs=None)], created=1718258008, model='anthropic.claude-3-haiku-20240307-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ''}
self.sent_first_chunk: True
PROCESSED CHUNK POST CHUNK CREATOR: None
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': None, 'is_finished': True, 'finish_reason': 'tool_calls', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: tool_calls; response_obj={'text': '', 'tool_use': None, 'is_finished': True, 'finish_reason': 'tool_calls', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ''}
self.sent_first_chunk: True
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-0ca028b5-8658-456b-a876-be3ef00c23b6', choices=[StreamingChoices(finish_reason='tool_calls', index=0, delta=Delta(content=None, role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258008, model='anthropic.claude-3-haiku-20240307-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Goes into checking if chunk has hiddden created at param
Chunks have a created at hidden param
Chunks sorted
token_counter messages received: [{'role': 'user', 'content': "What's the weather like in Boston today?"}]
Token Counter - using generic token counter, for model=anthropic.claude-3-haiku-20240307-v1:0
LiteLLM: Utils - Counting tokens for OpenAI model=gpt-3.5-turbo
Token Counter - using generic token counter, for model=anthropic.claude-3-haiku-20240307-v1:0
LiteLLM: Utils - Counting tokens for OpenAI model=gpt-3.5-turbo
Async success callbacks: Got a complete streaming response
Looking up model=anthropic.claude-3-haiku-20240307-v1:0 in model_cost_map
Success: model=anthropic.claude-3-haiku-20240307-v1:0 in model_cost_map
prompt_tokens=16; completion_tokens=15
Returned custom cost for model=anthropic.claude-3-haiku-20240307-v1:0 - prompt_tokens_cost_usd_dollar: 4e-06, completion_tokens_cost_usd_dollar: 1.8750000000000002e-05
final cost: 2.275e-05; prompt_tokens_cost_usd_dollar: 4e-06; completion_tokens_cost_usd_dollar: 1.8750000000000002e-05
Logging Details LiteLLM-Success Call: None
success callbacks: []
Goes into checking if chunk has hiddden created at param
Chunks have a created at hidden param
Chunks sorted
token_counter messages received: [{'role': 'user', 'content': "What's the weather like in Boston today?"}]
Token Counter - using generic token counter, for model=anthropic.claude-3-haiku-20240307-v1:0
LiteLLM: Utils - Counting tokens for OpenAI model=gpt-3.5-turbo
Token Counter - using generic token counter, for model=anthropic.claude-3-haiku-20240307-v1:0
LiteLLM: Utils - Counting tokens for OpenAI model=gpt-3.5-turbo
Logging Details LiteLLM-Success Call streaming complete
Looking up model=anthropic.claude-3-haiku-20240307-v1:0 in model_cost_map
Success: model=anthropic.claude-3-haiku-20240307-v1:0 in model_cost_map
prompt_tokens=16; completion_tokens=15
Returned custom cost for model=anthropic.claude-3-haiku-20240307-v1:0 - prompt_tokens_cost_usd_dollar: 4e-06, completion_tokens_cost_usd_dollar: 1.8750000000000002e-05
final cost: 2.275e-05; prompt_tokens_cost_usd_dollar: 4e-06; completion_tokens_cost_usd_dollar: 1.8750000000000002e-05
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': {'inputTokens': 376, 'outputTokens': 59, 'totalTokens': 435}, 'index': 0}; custom_llm_provider: bedrock
Goes into checking if chunk has hiddden created at param
Chunks have a created at hidden param
Chunks sorted
token_counter messages received: [{'role': 'user', 'content': "What's the weather like in Boston today?"}]
Token Counter - using generic token counter, for model=anthropic.claude-3-haiku-20240307-v1:0
LiteLLM: Utils - Counting tokens for OpenAI model=gpt-3.5-turbo
Token Counter - using generic token counter, for model=anthropic.claude-3-haiku-20240307-v1:0
LiteLLM: Utils - Counting tokens for OpenAI model=gpt-3.5-turbo
ModelResponse(id='chatcmpl-0ca028b5-8658-456b-a876-be3ef00c23b6', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{"location": "Boston, MA", "unit": "fahrenheit"}', name='get_current_weather'), id='tooluse_QQJ3ZQaaR_iDeFDeltevxg', type='function')]))], created=1718258008, model='anthropic.claude-3-haiku-20240307-v1:0', object='chat.completion', system_fingerprint=None, usage=Usage(completion_tokens=15, prompt_tokens=16, total_tokens=31))



Request to litellm:
litellm.completion(model='bedrock/anthropic.claude-3-opus-20240229-v1:0', messages=[{'role': 'user', 'content': "What's the weather like in Boston today?"}], tools=[{'type': 'function', 'function': {'name': 'get_current_weather', 'description': 'Get the current weather in a given location', 'parameters': {'type': 'object', 'properties': {'location': {'type': 'string', 'description': 'The city and state, e.g. San Francisco, CA'}, 'unit': {'type': 'string', 'enum': ['celsius', 'fahrenheit']}}, 'required': ['location']}}}], tool_choice='auto', stream=True)


self.optional_params: {}
SYNC kwargs[caching]: False; litellm.cache: None; kwargs.get('cache')['no-cache']: False
Final returned optional params: {'stream': True, 'tools': [{'type': 'function', 'function': {'name': 'get_current_weather', 'description': 'Get the current weather in a given location', 'parameters': {'type': 'object', 'properties': {'location': {'type': 'string', 'description': 'The city and state, e.g. San Francisco, CA'}, 'unit': {'type': 'string', 'enum': ['celsius', 'fahrenheit']}}, 'required': ['location']}}}], 'tool_choice': {'auto': {}}}
self.optional_params: {'stream': True, 'tools': [{'type': 'function', 'function': {'name': 'get_current_weather', 'description': 'Get the current weather in a given location', 'parameters': {'type': 'object', 'properties': {'location': {'type': 'string', 'description': 'The city and state, e.g. San Francisco, CA'}, 'unit': {'type': 'string', 'enum': ['celsius', 'fahrenheit']}}, 'required': ['location']}}}], 'tool_choice': {'auto': {}}}


POST Request Sent from LiteLLM:
curl -X POST \
https://bedrock-runtime.us-west-2.amazonaws.com/model/anthropic.claude-3-opus-20240229-v1:0/converse-stream \
-H 'Content-Type: *****' -H 'X-Amz-Date: *****' -H 'X-Amz-Security-Token: FwoGZXIvYXdzEP///////////wEaDJ1NRyZnjUn0RVrAyiKvArQ2SYsKr5u4LFpK5SxzNvwv/f0NYrQ6j+HHTq21FHQfAPKxvDxU+Y881w4ANAx4EBqkMee4TUEGYAffwoTO81FO8GyMHw9pQQLQcOk4+scVm1fQyng+D748bG0SyVbdd6g0Hki0IzMvpKGIiJDJQhnkHTZN7P4jWDvNhhzbT4WZYohcWfgmlanPEGeXSTqsTGcKyK/0tZucmJ6Jhlk9TnYqejwsDhqqWnjoHVdZyEDQNwcg2d1LeqUSsczYwxiJDBb6I1Cqr7NH+pSoBN7dyabgRwimgZsqNKaX3qLztUc7DFqQzONy0+dluTMKzpWimCoCvAlfPdBGOQG/JTdGX9Fu3dgl2Ycu3glUSjG8ukSNK8fZwa7d4ZCdm9ePGI4/ARdXBHx3Ir+Kz5qC6aeEhyjRiqqzBjIqow1Os0Xyqnqp********************************************' -H 'Authorization: AWS4-HMAC-SHA256 Credential=ASIAUHQFY362MTIKRJRW/20240613/us-west-2/bedrock/aws4_request, SignedHeaders=content-type;host;x-amz-date;x-amz-security-token, Signature=c74bb1e1f618fcc3c512********************************************' -H 'Content-Length: *****' \
-d '{"messages": [{"role": "user", "content": [{"text": "What's the weather like in Boston today?"}]}], "additionalModelRequestFields": {}, "system": [], "inferenceConfig": {}, "toolConfig": {"tools": [{"toolSpec": {"inputSchema": {"json": {"type": "object", "properties": {"location": {"type": "string", "description": "The city and state, e.g. San Francisco, CA"}, "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}}, "required": ["location"]}}, "name": "get_current_weather", "description": "Get the current weather in a given location"}}], "toolChoice": {"auto": {}}}}'


RAW RESPONSE:
first stream response received


PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0, 'tool_use': None}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '', 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0, 'tool_use': None}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ''}
self.sent_first_chunk: False
PROCESSED CHUNK POST CHUNK CREATOR: None
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '<thinking>\nThe', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '<thinking>\nThe', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '<thinking>\nThe'}
self.sent_first_chunk: False
hold - False, model_response_str - <thinking>
The
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='<thinking>\nThe', role='assistant', function_call=None, tool_calls=None), logprobs=None)], created=1718258011, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='<thinking>\nThe', role='assistant', function_call=None, tool_calls=None), logprobs=None)], created=1718258011, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' get', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' get', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' get'}
self.sent_first_chunk: True
hold - False, model_response_str -  get
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' get', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258011, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' get', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258011, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '_', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '_', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '_'}
self.sent_first_chunk: True
hold - False, model_response_str - _
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='_', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258011, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='_', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258011, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': 'current', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': 'current', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': 'current'}
self.sent_first_chunk: True
hold - False, model_response_str - current
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='current', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258011, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='current', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258011, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '_', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '_', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '_'}
self.sent_first_chunk: True
hold - False, model_response_str - _
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='_', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258011, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='_', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258011, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': 'weather', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': 'weather', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': 'weather'}
self.sent_first_chunk: True
hold - False, model_response_str - weather
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='weather', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258011, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='weather', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258011, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' tool', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' tool', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' tool'}
self.sent_first_chunk: True
hold - False, model_response_str -  tool
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' tool', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258011, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' tool', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258011, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' seems', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' seems', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' seems'}
self.sent_first_chunk: True
hold - False, model_response_str -  seems
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' seems', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258011, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' seems', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258011, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' relevant', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' relevant', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' relevant'}
self.sent_first_chunk: True
hold - False, model_response_str -  relevant
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' relevant', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258011, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' relevant', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258011, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' to', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' to', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' to'}
self.sent_first_chunk: True
hold - False, model_response_str -  to
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' to', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258011, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' to', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258011, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' answer', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' answer', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' answer'}
self.sent_first_chunk: True
hold - False, model_response_str -  answer
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' answer', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258011, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' answer', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258011, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' this', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' this', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' this'}
self.sent_first_chunk: True
hold - False, model_response_str -  this
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' this', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258011, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' this', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258011, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' question', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' question', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' question'}
self.sent_first_chunk: True
hold - False, model_response_str -  question
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' question', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258012, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' question', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258012, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ',', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ',', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ','}
self.sent_first_chunk: True
hold - False, model_response_str - ,
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=',', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258012, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=',', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258012, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' as', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' as', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' as'}
self.sent_first_chunk: True
hold - False, model_response_str -  as
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' as', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258012, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' as', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258012, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' it', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' it', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' it'}
self.sent_first_chunk: True
hold - False, model_response_str -  it
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' it', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258012, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' it', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258012, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' can', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' can', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' can'}
self.sent_first_chunk: True
hold - False, model_response_str -  can
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' can', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258012, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' can', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258012, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' provide', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' provide', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' provide'}
self.sent_first_chunk: True
hold - False, model_response_str -  provide
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' provide', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258012, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' provide', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258012, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' the', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' the', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' the'}
self.sent_first_chunk: True
hold - False, model_response_str -  the
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' the', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258012, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' the', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258012, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' current', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' current', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' current'}
self.sent_first_chunk: True
hold - False, model_response_str -  current
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' current', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258013, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' current', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258013, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' weather', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' weather', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' weather'}
self.sent_first_chunk: True
hold - False, model_response_str -  weather
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' weather', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258013, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' weather', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258013, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' conditions', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' conditions', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' conditions'}
self.sent_first_chunk: True
hold - False, model_response_str -  conditions
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' conditions', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258013, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' conditions', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258013, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' for', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' for', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' for'}
self.sent_first_chunk: True
hold - False, model_response_str -  for
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' for', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258013, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' for', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258013, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' a', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' a', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' a'}
self.sent_first_chunk: True
hold - False, model_response_str -  a
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' a', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258013, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' a', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258013, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' given', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' given', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' given'}
self.sent_first_chunk: True
hold - False, model_response_str -  given
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' given', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258013, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' given', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258013, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' location', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' location', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' location'}
self.sent_first_chunk: True
hold - False, model_response_str -  location
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' location', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258013, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' location', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258013, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '.', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '.', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '.'}
self.sent_first_chunk: True
hold - False, model_response_str - .
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='.', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258013, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='.', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258013, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '\n\nThe', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '\n\nThe', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '\n\nThe'}
self.sent_first_chunk: True
hold - False, model_response_str - 

The
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='\n\nThe', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258013, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='\n\nThe', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258013, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' tool', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' tool', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' tool'}
self.sent_first_chunk: True
hold - False, model_response_str -  tool
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' tool', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258013, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' tool', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258013, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' requires', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' requires', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' requires'}
self.sent_first_chunk: True
hold - False, model_response_str -  requires
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' requires', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258013, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' requires', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258013, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' a', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' a', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' a'}
self.sent_first_chunk: True
hold - False, model_response_str -  a
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' a', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258013, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' a', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258013, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' location', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' location', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' location'}
self.sent_first_chunk: True
hold - False, model_response_str -  location
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' location', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258013, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' location', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258013, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' parameter', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' parameter', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' parameter'}
self.sent_first_chunk: True
hold - False, model_response_str -  parameter
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' parameter', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258014, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' parameter', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258014, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '.', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '.', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '.'}
self.sent_first_chunk: True
hold - False, model_response_str - .
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='.', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258014, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='.', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258014, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' The', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' The', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' The'}
self.sent_first_chunk: True
hold - False, model_response_str -  The
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' The', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258014, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' The', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258014, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' user', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' user', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' user'}
self.sent_first_chunk: True
hold - False, model_response_str -  user
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' user', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258014, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' user', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258014, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' provide', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' provide', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' provide'}
self.sent_first_chunk: True
hold - False, model_response_str -  provide
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' provide', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258014, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' provide', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258014, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': 'd "', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': 'd "', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': 'd "'}
self.sent_first_chunk: True
hold - False, model_response_str - d "
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='d "', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258014, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='d "', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258014, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': 'Boston', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': 'Boston', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': 'Boston'}
self.sent_first_chunk: True
hold - False, model_response_str - Boston
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='Boston', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258014, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='Boston', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258014, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '"', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '"', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '"'}
self.sent_first_chunk: True
hold - False, model_response_str - "
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='"', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258014, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='"', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258014, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' in', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' in', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' in'}
self.sent_first_chunk: True
hold - False, model_response_str -  in
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' in', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258014, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' in', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258014, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' the', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' the', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' the'}
self.sent_first_chunk: True
hold - False, model_response_str -  the
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' the', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258014, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' the', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258014, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' query', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' query', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' query'}
self.sent_first_chunk: True
hold - False, model_response_str -  query
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' query', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258014, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' query', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258014, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ',', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ',', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ','}
self.sent_first_chunk: True
hold - False, model_response_str - ,
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=',', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258014, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=',', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258014, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' so', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' so', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' so'}
self.sent_first_chunk: True
hold - False, model_response_str -  so
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' so', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258014, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' so', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258014, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' we', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' we', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' we'}
self.sent_first_chunk: True
hold - False, model_response_str -  we
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' we', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258014, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' we', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258014, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' have', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' have', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' have'}
self.sent_first_chunk: True
hold - False, model_response_str -  have
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' have', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258015, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' have', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258015, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' the', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' the', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' the'}
self.sent_first_chunk: True
hold - False, model_response_str -  the
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' the', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258015, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' the', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258015, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' necessary', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' necessary', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' necessary'}
self.sent_first_chunk: True
hold - False, model_response_str -  necessary
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' necessary', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258015, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' necessary', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258015, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' location', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' location', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' location'}
self.sent_first_chunk: True
hold - False, model_response_str -  location
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' location', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258015, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' location', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258015, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' information', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' information', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' information'}
self.sent_first_chunk: True
hold - False, model_response_str -  information
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' information', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258015, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' information', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258015, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '.', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '.', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '.'}
self.sent_first_chunk: True
hold - False, model_response_str - .
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='.', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258015, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='.', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258015, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '\n\nThe', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '\n\nThe', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '\n\nThe'}
self.sent_first_chunk: True
hold - False, model_response_str - 

The
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='\n\nThe', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258015, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='\n\nThe', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258015, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' unit', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' unit', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' unit'}
self.sent_first_chunk: True
hold - False, model_response_str -  unit
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' unit', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258015, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' unit', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258015, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' parameter', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' parameter', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' parameter'}
self.sent_first_chunk: True
hold - False, model_response_str -  parameter
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' parameter', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258015, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' parameter', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258015, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' is', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' is', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' is'}
self.sent_first_chunk: True
hold - False, model_response_str -  is
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' is', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' is', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' optional', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' optional', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' optional'}
self.sent_first_chunk: True
hold - False, model_response_str -  optional
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' optional', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' optional', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ',', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ',', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ','}
self.sent_first_chunk: True
hold - False, model_response_str - ,
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=',', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=',', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' so', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' so', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' so'}
self.sent_first_chunk: True
hold - False, model_response_str -  so
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' so', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' so', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' we', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' we', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' we'}
self.sent_first_chunk: True
hold - False, model_response_str -  we
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' we', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' we', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' don', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' don', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' don'}
self.sent_first_chunk: True
hold - False, model_response_str -  don
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' don', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' don', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': "'t", 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': "'t", 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': "'t"}
self.sent_first_chunk: True
hold - False, model_response_str - 't
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content="'t", role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content="'t", role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' nee', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' nee', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' nee'}
self.sent_first_chunk: True
hold - False, model_response_str -  nee
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' nee', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' nee', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': 'd to', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': 'd to', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': 'd to'}
self.sent_first_chunk: True
hold - False, model_response_str - d to
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='d to', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='d to', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' specify', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' specify', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' specify'}
self.sent_first_chunk: True
hold - False, model_response_str -  specify
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' specify', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' specify', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' that', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' that', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' that'}
self.sent_first_chunk: True
hold - False, model_response_str -  that
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' that', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' that', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '.', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '.', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '.'}
self.sent_first_chunk: True
hold - False, model_response_str - .
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='.', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='.', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '\n\nBase', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '\n\nBase', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '\n\nBase'}
self.sent_first_chunk: True
hold - False, model_response_str - 

Base
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='\n\nBase', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='\n\nBase', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': 'd on', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': 'd on', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': 'd on'}
self.sent_first_chunk: True
hold - False, model_response_str - d on
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='d on', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='d on', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' the', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' the', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' the'}
self.sent_first_chunk: True
hold - False, model_response_str -  the
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' the', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' the', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' provide', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' provide', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' provide'}
self.sent_first_chunk: True
hold - False, model_response_str -  provide
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' provide', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' provide', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': 'd information', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': 'd information', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': 'd information'}
self.sent_first_chunk: True
hold - False, model_response_str - d information
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='d information', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='d information', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ',', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ',', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ','}
self.sent_first_chunk: True
hold - False, model_response_str - ,
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=',', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=',', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' we', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' we', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' we'}
self.sent_first_chunk: True
hold - False, model_response_str -  we
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' we', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' we', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' can', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' can', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' can'}
self.sent_first_chunk: True
hold - False, model_response_str -  can
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' can', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' can', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' procee', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' procee', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' procee'}
self.sent_first_chunk: True
hold - False, model_response_str -  procee
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' procee', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' procee', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': 'd with', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': 'd with', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': 'd with'}
self.sent_first_chunk: True
hold - False, model_response_str - d with
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='d with', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='d with', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' calling', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' calling', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' calling'}
self.sent_first_chunk: True
hold - False, model_response_str -  calling
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' calling', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' calling', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' the', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' the', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' the'}
self.sent_first_chunk: True
hold - False, model_response_str -  the
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' the', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' the', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' get', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' get', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' get'}
self.sent_first_chunk: True
hold - False, model_response_str -  get
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' get', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' get', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258016, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '_', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '_', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '_'}
self.sent_first_chunk: True
hold - False, model_response_str - _
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='_', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258017, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='_', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258017, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': 'current', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': 'current', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': 'current'}
self.sent_first_chunk: True
hold - False, model_response_str - current
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='current', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258017, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='current', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258017, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '_', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '_', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '_'}
self.sent_first_chunk: True
hold - False, model_response_str - _
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='_', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258017, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='_', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258017, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': 'weather', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': 'weather', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': 'weather'}
self.sent_first_chunk: True
hold - False, model_response_str - weather
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='weather', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258017, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='weather', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258017, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' tool', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' tool', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' tool'}
self.sent_first_chunk: True
hold - False, model_response_str -  tool
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' tool', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258017, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' tool', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258017, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' with', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' with', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' with'}
self.sent_first_chunk: True
hold - False, model_response_str -  with
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' with', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258017, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' with', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258017, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' the', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' the', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' the'}
self.sent_first_chunk: True
hold - False, model_response_str -  the
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' the', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258017, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' the', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258017, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' location', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' location', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' location'}
self.sent_first_chunk: True
hold - False, model_response_str -  location
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' location', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258017, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' location', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258017, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' set', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' set', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' set'}
self.sent_first_chunk: True
hold - False, model_response_str -  set
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' set', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258017, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' set', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258017, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' to', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' to', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' to'}
self.sent_first_chunk: True
hold - False, model_response_str -  to
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' to', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258017, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' to', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258017, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': ' "', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': ' "', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ' "'}
self.sent_first_chunk: True
hold - False, model_response_str -  "
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' "', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258017, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' "', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258017, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': 'Boston', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': 'Boston', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': 'Boston'}
self.sent_first_chunk: True
hold - False, model_response_str - Boston
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='Boston', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258017, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='Boston', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258017, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '".', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '".', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '".'}
self.sent_first_chunk: True
hold - False, model_response_str - ".
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='".', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258017, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='".', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258017, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '\n</thinking', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '\n</thinking', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '\n</thinking'}
self.sent_first_chunk: True
hold - False, model_response_str - 
</thinking
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='\n</thinking', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258019, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='\n</thinking', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258019, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '>', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '>', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '>'}
self.sent_first_chunk: True
hold - False, model_response_str - >
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='>', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258019, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='>', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258019, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ''}
self.sent_first_chunk: True
PROCESSED CHUNK POST CHUNK CREATOR: None
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': {'id': 'tooluse_Ce5-yxmeSqGZv4Uk5QfPjA', 'type': 'function', 'function': {'name': 'get_current_weather', 'arguments': ''}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '', 'tool_use': {'id': 'tooluse_Ce5-yxmeSqGZv4Uk5QfPjA', 'type': 'function', 'function': {'name': 'get_current_weather', 'arguments': ''}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '', 'tool_calls': [{'id': 'tooluse_Ce5-yxmeSqGZv4Uk5QfPjA', 'type': 'function', 'function': {'name': 'get_current_weather', 'arguments': ''}}]}
self.sent_first_chunk: True
hold - False, model_response_str - 
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id='tooluse_Ce5-yxmeSqGZv4Uk5QfPjA', function=Function(arguments='', name='get_current_weather'), type='function', index=0)]), logprobs=None)], created=1718258019, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id='tooluse_Ce5-yxmeSqGZv4Uk5QfPjA', function=Function(arguments='', name='get_current_weather'), type='function', index=0)]), logprobs=None)], created=1718258019, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': ''}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': ''}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '', 'tool_calls': [{'id': None, 'type': 'function', 'function': {'name': None, 'arguments': ''}}]}
self.sent_first_chunk: True
hold - False, model_response_str - 
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='', name=None), type='function', index=0)]), logprobs=None)], created=1718258019, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='', name=None), type='function', index=0)]), logprobs=None)], created=1718258019, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': '{"locat'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': '{"locat'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '', 'tool_calls': [{'id': None, 'type': 'function', 'function': {'name': None, 'arguments': '{"locat'}}]}
self.sent_first_chunk: True
hold - False, model_response_str - 
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='{"locat', name=None), type='function', index=0)]), logprobs=None)], created=1718258019, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='{"locat', name=None), type='function', index=0)]), logprobs=None)], created=1718258019, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 'io'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 'io'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '', 'tool_calls': [{'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 'io'}}]}
self.sent_first_chunk: True
hold - False, model_response_str - 
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='io', name=None), type='function', index=0)]), logprobs=None)], created=1718258019, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='io', name=None), type='function', index=0)]), logprobs=None)], created=1718258019, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 'n": "'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 'n": "'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '', 'tool_calls': [{'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 'n": "'}}]}
self.sent_first_chunk: True
hold - False, model_response_str - 
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='n": "', name=None), type='function', index=0)]), logprobs=None)], created=1718258019, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='n": "', name=None), type='function', index=0)]), logprobs=None)], created=1718258019, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 'Boston"}'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '', 'tool_use': {'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 'Boston"}'}}, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': '', 'tool_calls': [{'id': None, 'type': 'function', 'function': {'name': None, 'arguments': 'Boston"}'}}]}
self.sent_first_chunk: True
hold - False, model_response_str - 
returning model_response: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='Boston"}', name=None), type='function', index=0)]), logprobs=None)], created=1718258019, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='Boston"}', name=None), type='function', index=0)]), logprobs=None)], created=1718258019, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Logging Details LiteLLM-Success Call: None
success callbacks: []
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}; custom_llm_provider: bedrock
model_response finish reason 3: None; response_obj={'text': '', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': None, 'index': 1}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ''}
self.sent_first_chunk: True
PROCESSED CHUNK POST CHUNK CREATOR: None
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': None, 'is_finished': True, 'finish_reason': 'tool_calls', 'usage': None, 'index': 0}; custom_llm_provider: bedrock
model_response finish reason 3: tool_calls; response_obj={'text': '', 'tool_use': None, 'is_finished': True, 'finish_reason': 'tool_calls', 'usage': None, 'index': 0}
model_response.choices[0].delta: Delta(content=None, role=None, function_call=None, tool_calls=None); completion_obj: {'content': ''}
self.sent_first_chunk: True
PROCESSED CHUNK POST CHUNK CREATOR: ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[StreamingChoices(finish_reason='tool_calls', index=0, delta=Delta(content=None, role=None, function_call=None, tool_calls=None), logprobs=None)], created=1718258019, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion.chunk', system_fingerprint=None)
Logging Details LiteLLM-Async Success Call
Goes into checking if chunk has hiddden created at param
Chunks have a created at hidden param
Chunks sorted
token_counter messages received: [{'role': 'user', 'content': "What's the weather like in Boston today?"}]
Token Counter - using generic token counter, for model=anthropic.claude-3-opus-20240229-v1:0
LiteLLM: Utils - Counting tokens for OpenAI model=gpt-3.5-turbo
Token Counter - using generic token counter, for model=anthropic.claude-3-opus-20240229-v1:0
LiteLLM: Utils - Counting tokens for OpenAI model=gpt-3.5-turbo
Async success callbacks: Got a complete streaming response
Looking up model=anthropic.claude-3-opus-20240229-v1:0 in model_cost_map
Success: model=anthropic.claude-3-opus-20240229-v1:0 in model_cost_map
prompt_tokens=16; completion_tokens=95
Returned custom cost for model=anthropic.claude-3-opus-20240229-v1:0 - prompt_tokens_cost_usd_dollar: 0.00024, completion_tokens_cost_usd_dollar: 0.007124999999999999
final cost: 0.007364999999999999; prompt_tokens_cost_usd_dollar: 0.00024; completion_tokens_cost_usd_dollar: 0.007124999999999999
Logging Details LiteLLM-Success Call: None
success callbacks: []
Goes into checking if chunk has hiddden created at param
Chunks have a created at hidden param
Chunks sorted
token_counter messages received: [{'role': 'user', 'content': "What's the weather like in Boston today?"}]
Token Counter - using generic token counter, for model=anthropic.claude-3-opus-20240229-v1:0
LiteLLM: Utils - Counting tokens for OpenAI model=gpt-3.5-turbo
Token Counter - using generic token counter, for model=anthropic.claude-3-opus-20240229-v1:0
LiteLLM: Utils - Counting tokens for OpenAI model=gpt-3.5-turbo
Logging Details LiteLLM-Success Call streaming complete
Looking up model=anthropic.claude-3-opus-20240229-v1:0 in model_cost_map
Success: model=anthropic.claude-3-opus-20240229-v1:0 in model_cost_map
prompt_tokens=16; completion_tokens=95
Returned custom cost for model=anthropic.claude-3-opus-20240229-v1:0 - prompt_tokens_cost_usd_dollar: 0.00024, completion_tokens_cost_usd_dollar: 0.007124999999999999
final cost: 0.007364999999999999; prompt_tokens_cost_usd_dollar: 0.00024; completion_tokens_cost_usd_dollar: 0.007124999999999999
PROCESSED CHUNK PRE CHUNK CREATOR: {'text': '', 'tool_use': None, 'is_finished': False, 'finish_reason': '', 'usage': {'inputTokens': 645, 'outputTokens': 146, 'totalTokens': 791}, 'index': 0}; custom_llm_provider: bedrock
Goes into checking if chunk has hiddden created at param
Chunks have a created at hidden param
Chunks sorted
token_counter messages received: [{'role': 'user', 'content': "What's the weather like in Boston today?"}]
Token Counter - using generic token counter, for model=anthropic.claude-3-opus-20240229-v1:0
LiteLLM: Utils - Counting tokens for OpenAI model=gpt-3.5-turbo
Token Counter - using generic token counter, for model=anthropic.claude-3-opus-20240229-v1:0
LiteLLM: Utils - Counting tokens for OpenAI model=gpt-3.5-turbo
ModelResponse(id='chatcmpl-c158ec24-5e47-4d6b-b6a9-83b6abc47ff3', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content='<thinking>\nThe get_current_weather tool seems relevant to answer this question, as it can provide the current weather conditions for a given location.\n\nThe tool requires a location parameter. The user provided "Boston" in the query, so we have the necessary location information.\n\nThe unit parameter is optional, so we don\'t need to specify that.\n\nBased on the provided information, we can proceed with calling the get_current_weather tool with the location set to "Boston".\n</thinking>', role='assistant'))], created=1718258011, model='anthropic.claude-3-opus-20240229-v1:0', object='chat.completion', system_fingerprint=None, usage=Usage(completion_tokens=95, prompt_tokens=16, total_tokens=111))

@iwamot
Copy link
Contributor Author

iwamot commented Jun 26, 2024

@krrishdholakia It appears that stream_chunk_builder omits tool_calls information.

for chunk in chunks:
    print(chunk)
response = litellm.stream_chunk_builder(chunks)
print()
print(response)
output
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='Certainly', role='assistant', function_call=None, tool_calls=None), logprobs=None)], created=1719390703, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='!', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390704, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' I', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390704, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content="'", role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390704, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='d be', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390704, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' happy', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390704, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' to', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390704, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' check', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390704, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' the', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390704, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' current', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390704, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' weather', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390704, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' in', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390704, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' Boston', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390704, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' for', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390704, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' you', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390704, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='.', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390704, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' I', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390704, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content="'ll", role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390704, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' use', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390704, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' the', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390704, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' get', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390704, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='_', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390704, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='current', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390704, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='_', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390704, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='weather', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390704, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' function', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390704, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' to', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390704, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' retrieve', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390704, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' this', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390704, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' information', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390704, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='.', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390704, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' Let', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390704, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' me', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390704, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' ', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390705, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='do that', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390705, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' for', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390705, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' you', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390705, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' right', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390705, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=' away', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390705, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='.', role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390705, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id='tooluse_zTxxCmxmSGin4tOxR39g3w', function=Function(arguments='', name='get_current_weather'), type='function', index=0)]), logprobs=None)], created=1719390706, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='', name=None), type='function', index=0)]), logprobs=None)], created=1719390706, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='{"location', name=None), type='function', index=0)]), logprobs=None)], created=1719390706, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='": "Boston', name=None), type='function', index=0)]), logprobs=None)], created=1719390706, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments=', ', name=None), type='function', index=0)]), logprobs=None)], created=1719390706, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='MA"', name=None), type='function', index=0)]), logprobs=None)], created=1719390706, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments=', "unit":', name=None), type='function', index=0)]), logprobs=None)], created=1719390706, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments=' "', name=None), type='function', index=0)]), logprobs=None)], created=1719390706, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='fahren', name=None), type='function', index=0)]), logprobs=None)], created=1719390706, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content='', role=None, function_call=None, tool_calls=[ChatCompletionDeltaToolCall(id=None, function=Function(arguments='heit"}', name=None), type='function', index=0)]), logprobs=None)], created=1719390706, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)
ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[StreamingChoices(finish_reason='tool_calls', index=0, delta=Delta(content=None, role=None, function_call=None, tool_calls=None), logprobs=None)], created=1719390706, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion.chunk', system_fingerprint=None)

ModelResponse(id='chatcmpl-33110b19-c247-4f0c-9078-1b1834fa78ac', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content="Certainly! I'd be happy to check the current weather in Boston for you. I'll use the get_current_weather function to retrieve this information. Let me do that for you right away.", role='assistant'))], created=1719390703, model='anthropic.claude-3-5-sonnet-20240620-v1:0', object='chat.completion', system_fingerprint=None, usage=Usage(completion_tokens=38, prompt_tokens=0, total_tokens=38))

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
bug Something isn't working
Projects
None yet
Development

Successfully merging a pull request may close this issue.

2 participants