From d7f0336b8b23f599fe4cf163aa9a1d30daf39f58 Mon Sep 17 00:00:00 2001 From: Tommy Date: Mon, 1 Dec 2025 16:21:26 +0100 Subject: [PATCH] add multipart request support --- cachy.jsonl | 2 + cachy/_modidx.py | 1 + cachy/core.py | 9 +- nbs/00_core.ipynb | 244 +++++++++++++++++++++++++++++++++++++++++++--- 4 files changed, 244 insertions(+), 12 deletions(-) diff --git a/cachy.jsonl b/cachy.jsonl index 5e3f037..00643d4 100644 --- a/cachy.jsonl +++ b/cachy.jsonl @@ -19,3 +19,5 @@ {"key": "d4142886", "response": "{\n \"candidates\": [\n {\n \"content\": {\n \"parts\": [\n {\n \"text\": \"Concurrency\\n\"\n }\n ],\n \"role\": \"model\"\n },\n \"finishReason\": \"STOP\",\n \"avgLogprobs\": -0.29076665639877319\n }\n ],\n \"usageMetadata\": {\n \"promptTokenCount\": 10,\n \"candidatesTokenCount\": 2,\n \"totalTokenCount\": 12,\n \"promptTokensDetails\": [\n {\n \"modality\": \"TEXT\",\n \"tokenCount\": 10\n }\n ],\n \"candidatesTokensDetails\": [\n {\n \"modality\": \"TEXT\",\n \"tokenCount\": 2\n }\n ]\n },\n \"modelVersion\": \"gemini-2.0-flash\",\n \"responseId\": \"1gvIaLDAGe2kvdIPsY6--Q0\"\n}\n"} {"key": "fe23aa62", "response": "data: {\"candidates\": [{\"content\": {\"parts\": [{\"text\": \"Concurrency\"}],\"role\": \"model\"}}],\"usageMetadata\": {\"promptTokenCount\": 12,\"totalTokenCount\": 12,\"promptTokensDetails\": [{\"modality\": \"TEXT\",\"tokenCount\": 12}]},\"modelVersion\": \"gemini-2.0-flash\",\"responseId\": \"1gvIaKSQOt3h1PIPwJO12Ac\"}\r\n\r\ndata: {\"candidates\": [{\"content\": {\"parts\": [{\"text\": \"\\n\"}],\"role\": \"model\"},\"finishReason\": \"STOP\"}],\"usageMetadata\": {\"promptTokenCount\": 11,\"candidatesTokenCount\": 2,\"totalTokenCount\": 13,\"promptTokensDetails\": [{\"modality\": \"TEXT\",\"tokenCount\": 11}],\"candidatesTokensDetails\": [{\"modality\": \"TEXT\",\"tokenCount\": 2}]},\"modelVersion\": \"gemini-2.0-flash\",\"responseId\": \"1gvIaKSQOt3h1PIPwJO12Ac\"}\r\n\r\n"} {"key": "c90feca2", "response": "{\"id\":\"msg_01HpiQTg22STqarE33JnuHdt\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[{\"type\":\"tool_use\",\"id\":\"toolu_0182nVBg1pTYTadKxS5qgCt4\",\"name\":\"get_current_weather\",\"input\":{\"location\":\"Reims\"}}],\"stop_reason\":\"tool_use\",\"stop_sequence\":null,\"usage\":{\"input_tokens\":427,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":0,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":57,\"service_tier\":\"standard\"}}"} +{"key": "79d28180", "response": "{\n \"id\": \"resp_0d29a574150b40b200692da5070d2c81a19a42ec84f97314ef\",\n \"object\": \"response\",\n \"created_at\": 1764599047,\n \"status\": \"completed\",\n \"background\": false,\n \"billing\": {\n \"payer\": \"developer\"\n },\n \"error\": null,\n \"incomplete_details\": null,\n \"instructions\": null,\n \"max_output_tokens\": null,\n \"max_tool_calls\": null,\n \"model\": \"gpt-4.1-2025-04-14\",\n \"output\": [\n {\n \"id\": \"msg_0d29a574150b40b200692da507957081a19436f848296dfbb9\",\n \"type\": \"message\",\n \"status\": \"completed\",\n \"content\": [\n {\n \"type\": \"output_text\",\n \"annotations\": [],\n \"logprobs\": [],\n \"text\": \"Hey! How can I help you today? \\ud83d\\ude0a\"\n }\n ],\n \"role\": \"assistant\"\n }\n ],\n \"parallel_tool_calls\": true,\n \"previous_response_id\": null,\n \"prompt_cache_key\": null,\n \"prompt_cache_retention\": null,\n \"reasoning\": {\n \"effort\": null,\n \"summary\": null\n },\n \"safety_identifier\": null,\n \"service_tier\": \"default\",\n \"store\": true,\n \"temperature\": 1.0,\n \"text\": {\n \"format\": {\n \"type\": \"text\"\n },\n \"verbosity\": \"medium\"\n },\n \"tool_choice\": \"auto\",\n \"tools\": [],\n \"top_logprobs\": 0,\n \"top_p\": 1.0,\n \"truncation\": \"disabled\",\n \"usage\": {\n \"input_tokens\": 9,\n \"input_tokens_details\": {\n \"cached_tokens\": 0\n },\n \"output_tokens\": 11,\n \"output_tokens_details\": {\n \"reasoning_tokens\": 0\n },\n \"total_tokens\": 20\n },\n \"user\": null,\n \"metadata\": {}\n}"} +{"key": "bccdd394", "response": "{\"type\":\"file\",\"id\":\"file_011CVgC7NxcXFGnoqb8FSb6A\",\"size_bytes\":11,\"created_at\":\"2025-12-01T15:19:40.864000Z\",\"filename\":\"ex.txt\",\"mime_type\":\"text/plain\",\"downloadable\":false}"} diff --git a/cachy/_modidx.py b/cachy/_modidx.py index e4be19d..52a0b53 100644 --- a/cachy/_modidx.py +++ b/cachy/_modidx.py @@ -8,6 +8,7 @@ 'syms': { 'cachy.core': { 'cachy.core._apply_async_patch': ('core.html#_apply_async_patch', 'cachy/core.py'), 'cachy.core._apply_sync_patch': ('core.html#_apply_sync_patch', 'cachy/core.py'), 'cachy.core._cache': ('core.html#_cache', 'cachy/core.py'), + 'cachy.core._content': ('core.html#_content', 'cachy/core.py'), 'cachy.core._key': ('core.html#_key', 'cachy/core.py'), 'cachy.core._should_cache': ('core.html#_should_cache', 'cachy/core.py'), 'cachy.core._write_cache': ('core.html#_write_cache', 'cachy/core.py'), diff --git a/cachy/core.py b/cachy/core.py index ceafb86..b96486a 100644 --- a/cachy/core.py +++ b/cachy/core.py @@ -25,10 +25,17 @@ def _cache(key, cfp): def _write_cache(key, content, cfp): with open(cfp, "a") as f: f.write(json.dumps({"key":key, "response": content})+"\n") +# %% ../nbs/00_core.ipynb +def _content(r): + "Extract content from request." + if not hasattr(r, '_content'): r.read() + boundary = httpx._multipart.get_multipart_boundary_from_content_type(r.headers.get("Content-Type", "").encode()) + return r.content.replace(boundary, b"cachy-boundary") if boundary else r.content + # %% ../nbs/00_core.ipynb def _key(r, is_stream=False): "Create a unique, deterministic id from the request `r`." - return hashlib.sha256(f"{r.url.host}{is_stream}".encode() + r.content).hexdigest()[:8] + return hashlib.sha256(f"{r.url.host}{is_stream}".encode() + _content(r)).hexdigest()[:8] # %% ../nbs/00_core.ipynb def _apply_async_patch(cfp, doms): diff --git a/nbs/00_core.ipynb b/nbs/00_core.ipynb index 14917d0..d6e86bb 100644 --- a/nbs/00_core.ipynb +++ b/nbs/00_core.ipynb @@ -75,6 +75,16 @@ "from fastcore.utils import *" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from httpx import RequestNotRead\n", + "from fastcore.test import *" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -391,6 +401,167 @@ " return httpx.Response(status_code=res.status_code, content=content, request=r)" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Multipart Requests" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "`_key` will throw the following error for multipart requests (e.g. file uploads).\n", + "\n", + "`RequestNotRead: Attempted to access streaming request content, without having called `read()`.`" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "rfu = httpx.Request('POST', 'https://api.openai.com/v1/chat/completions', files={\"file\": (\"test.txt\", b\"hello\")})\n", + "rfu" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "test_fail(lambda: _key(rfu), RequestNotRead)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "rfu.read(); _key(rfu);" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Each part of a multipart request is separated by a delimiter called a boundary with this structure `--b{RANDOM_ID}`. Here's an example for `rfu`.\n", + "\n", + "```txt\n", + "b'--f9ee33966b45cc8c80952bb57cc728c4\\r\\nContent-Disposition: form-data; name=\"file\"; filename=\"test.txt\"\\r\\nContent-Type: text/plain\\r\\n\\r\\nhello\\r\\n--f9ee33966b45cc8c80952bb57cc728c4--\\r\\n'\n", + "```\n", + "\n", + "As the boundary is a random id, two identical multipart requests will produce different boundaries. As the boundary is part of the request content, `_key` will generate different keys leading to cache misses 😞.\n", + "\n", + "Let's create a helper method `_content` that will extract content from any request and remove the non-deterministic boundary." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def _content(r):\n", + " \"Extract content from request.\"\n", + " if not hasattr(r, '_content'): r.read()\n", + " boundary = httpx._multipart.get_multipart_boundary_from_content_type(r.headers.get(\"Content-Type\", \"\").encode())\n", + " return r.content.replace(boundary, b\"cachy-boundary\") if boundary else r.content" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "rfu = httpx.Request('POST', 'https://api.openai.com/v1/chat/completions', files={\"file\": (\"test.txt\", b\"hello\")})\n", + "rfu" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "b'--cachy-boundary\\r\\nContent-Disposition: form-data; name=\"file\"; filename=\"test.txt\"\\r\\nContent-Type: text/plain\\r\\n\\r\\nhello\\r\\n--cachy-boundary--\\r\\n'" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "_content(rfu)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def _key(r): return hashlib.sha256(str(r.url.host).encode() + _content(r)).hexdigest()[:8]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's confirm that running `_key` multiple times on the same multipart request now returns the same key." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "('927355e6', '927355e6')" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "_key(rfu), _key(rfu)" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -416,7 +587,7 @@ "#| exports\n", "def _key(r, is_stream=False):\n", " \"Create a unique, deterministic id from the request `r`.\"\n", - " return hashlib.sha256(f\"{r.url.host}{is_stream}\".encode() + r.content).hexdigest()[:8]" + " return hashlib.sha256(f\"{r.url.host}{is_stream}\".encode() + _content(r)).hexdigest()[:8]" ] }, { @@ -1539,7 +1710,7 @@ { "data": { "text/plain": [ - "ModelResponse(id='chatcmpl-b2bea735-71f6-46bb-8645-09b96052d3e2', created=1757941046, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content='**Streamlined**', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}))], usage=Usage(completion_tokens=8, prompt_tokens=18, total_tokens=26, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0))" + "ModelResponse(id='chatcmpl-a755135b-491a-461c-9390-34358980cd63', created=1764602550, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content='**Streamlined**', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}))], usage=Usage(completion_tokens=8, prompt_tokens=18, total_tokens=26, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0))" ] }, "execution_count": null, @@ -1560,7 +1731,7 @@ { "data": { "text/plain": [ - "ModelResponse(id='chatcmpl-c31df5f6-2278-4f92-9fab-5756ec4855e5', created=1757941046, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content='**Streamlined**', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}))], usage=Usage(completion_tokens=8, prompt_tokens=18, total_tokens=26, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0))" + "ModelResponse(id='chatcmpl-e24eb764-67d6-424a-90a0-115ccdfbcfcd', created=1764602550, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content='**Streamlined**', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}))], usage=Usage(completion_tokens=8, prompt_tokens=18, total_tokens=26, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0))" ] }, "execution_count": null, @@ -1745,7 +1916,7 @@ { "data": { "text/plain": [ - "ModelResponse(id='zwvIaMfgK7CBvdIPsqDQqQY', created=1757941047, model='gemini-2.0-flash', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content='Synchronization.\\n', role='assistant', tool_calls=None, function_call=None, provider_specific_fields=None))], usage=Usage(completion_tokens=3, prompt_tokens=10, total_tokens=13, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=None, text_tokens=10, image_tokens=None)), vertex_ai_grounding_metadata=[], vertex_ai_url_context_metadata=[], vertex_ai_safety_results=[], vertex_ai_citation_metadata=[])" + "ModelResponse(id='zwvIaMfgK7CBvdIPsqDQqQY', created=1764602550, model='gemini-2.0-flash', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content='Synchronization.\\n', role='assistant', tool_calls=None, function_call=None, provider_specific_fields=None))], usage=Usage(completion_tokens=3, prompt_tokens=10, total_tokens=13, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=None, text_tokens=10, image_tokens=None)), vertex_ai_grounding_metadata=[], vertex_ai_url_context_metadata=[], vertex_ai_safety_results=[], vertex_ai_citation_metadata=[])" ] }, "execution_count": null, @@ -1766,7 +1937,7 @@ { "data": { "text/plain": [ - "ModelResponse(id='zwvIaMfgK7CBvdIPsqDQqQY', created=1757941047, model='gemini-2.0-flash', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content='Synchronization.\\n', role='assistant', tool_calls=None, function_call=None, provider_specific_fields=None))], usage=Usage(completion_tokens=3, prompt_tokens=10, total_tokens=13, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=None, text_tokens=10, image_tokens=None)), vertex_ai_grounding_metadata=[], vertex_ai_url_context_metadata=[], vertex_ai_safety_results=[], vertex_ai_citation_metadata=[])" + "ModelResponse(id='zwvIaMfgK7CBvdIPsqDQqQY', created=1764602550, model='gemini-2.0-flash', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content='Synchronization.\\n', role='assistant', tool_calls=None, function_call=None, provider_specific_fields=None))], usage=Usage(completion_tokens=3, prompt_tokens=10, total_tokens=13, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=None, text_tokens=10, image_tokens=None)), vertex_ai_grounding_metadata=[], vertex_ai_url_context_metadata=[], vertex_ai_safety_results=[], vertex_ai_citation_metadata=[])" ] }, "execution_count": null, @@ -1876,7 +2047,7 @@ { "data": { "text/plain": [ - "ModelResponse(id='chatcmpl-66487276-199b-4045-b980-eeb2c83148e9', created=1757941047, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content='**coroutine**', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}))], usage=Usage(completion_tokens=8, prompt_tokens=18, total_tokens=26, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0))" + "ModelResponse(id='chatcmpl-e947799b-6bdc-4296-861a-15360428b239', created=1764602550, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content='**coroutine**', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}))], usage=Usage(completion_tokens=8, prompt_tokens=18, total_tokens=26, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0))" ] }, "execution_count": null, @@ -1897,7 +2068,7 @@ { "data": { "text/plain": [ - "ModelResponse(id='chatcmpl-a1334d94-646c-4933-8b92-58178b067a88', created=1757941047, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content='**coroutine**', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}))], usage=Usage(completion_tokens=8, prompt_tokens=18, total_tokens=26, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0))" + "ModelResponse(id='chatcmpl-f96892b8-bcf4-4fba-9a23-b0e0f0a94b49', created=1764602550, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content='**coroutine**', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}))], usage=Usage(completion_tokens=8, prompt_tokens=18, total_tokens=26, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0))" ] }, "execution_count": null, @@ -2086,7 +2257,7 @@ { "data": { "text/plain": [ - "ModelResponse(id='1gvIaLDAGe2kvdIPsY6--Q0', created=1757941047, model='gemini-2.0-flash', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content='Concurrency\\n', role='assistant', tool_calls=None, function_call=None, provider_specific_fields=None))], usage=Usage(completion_tokens=2, prompt_tokens=10, total_tokens=12, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=None, text_tokens=10, image_tokens=None)), vertex_ai_grounding_metadata=[], vertex_ai_url_context_metadata=[], vertex_ai_safety_results=[], vertex_ai_citation_metadata=[])" + "ModelResponse(id='1gvIaLDAGe2kvdIPsY6--Q0', created=1764602550, model='gemini-2.0-flash', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content='Concurrency\\n', role='assistant', tool_calls=None, function_call=None, provider_specific_fields=None))], usage=Usage(completion_tokens=2, prompt_tokens=10, total_tokens=12, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=None, text_tokens=10, image_tokens=None)), vertex_ai_grounding_metadata=[], vertex_ai_url_context_metadata=[], vertex_ai_safety_results=[], vertex_ai_citation_metadata=[])" ] }, "execution_count": null, @@ -2107,7 +2278,7 @@ { "data": { "text/plain": [ - "ModelResponse(id='1gvIaLDAGe2kvdIPsY6--Q0', created=1757941047, model='gemini-2.0-flash', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content='Concurrency\\n', role='assistant', tool_calls=None, function_call=None, provider_specific_fields=None))], usage=Usage(completion_tokens=2, prompt_tokens=10, total_tokens=12, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=None, text_tokens=10, image_tokens=None)), vertex_ai_grounding_metadata=[], vertex_ai_url_context_metadata=[], vertex_ai_safety_results=[], vertex_ai_citation_metadata=[])" + "ModelResponse(id='1gvIaLDAGe2kvdIPsY6--Q0', created=1764602550, model='gemini-2.0-flash', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content='Concurrency\\n', role='assistant', tool_calls=None, function_call=None, provider_specific_fields=None))], usage=Usage(completion_tokens=2, prompt_tokens=10, total_tokens=12, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=None, text_tokens=10, image_tokens=None)), vertex_ai_grounding_metadata=[], vertex_ai_url_context_metadata=[], vertex_ai_safety_results=[], vertex_ai_citation_metadata=[])" ] }, "execution_count": null, @@ -2212,7 +2383,7 @@ { "data": { "text/plain": [ - "ModelResponse(id='chatcmpl-78f754da-9ec9-490e-86f5-07049d0446e5', created=1757941047, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(index=0, function=Function(arguments='{\"location\": \"Reims\"}', name='get_current_weather'), id='toolu_0182nVBg1pTYTadKxS5qgCt4', type='function')], function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}))], usage=Usage(completion_tokens=57, prompt_tokens=427, total_tokens=484, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0))" + "ModelResponse(id='chatcmpl-07d11f00-b1e3-4f6c-884e-2175b26bac22', created=1764602550, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(index=0, function=Function(arguments='{\"location\": \"Reims\"}', name='get_current_weather'), id='toolu_0182nVBg1pTYTadKxS5qgCt4', type='function')], function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}))], usage=Usage(completion_tokens=57, prompt_tokens=427, total_tokens=484, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0))" ] }, "execution_count": null, @@ -2233,7 +2404,7 @@ { "data": { "text/plain": [ - "ModelResponse(id='chatcmpl-73aac225-ac1b-42cc-80cf-61cefb999295', created=1757941047, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(index=0, function=Function(arguments='{\"location\": \"Reims\"}', name='get_current_weather'), id='toolu_0182nVBg1pTYTadKxS5qgCt4', type='function')], function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}))], usage=Usage(completion_tokens=57, prompt_tokens=427, total_tokens=484, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0))" + "ModelResponse(id='chatcmpl-2614ea50-25bb-47b4-9e4e-706ab6374c95', created=1764602550, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(index=0, function=Function(arguments='{\"location\": \"Reims\"}', name='get_current_weather'), id='toolu_0182nVBg1pTYTadKxS5qgCt4', type='function')], function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}))], usage=Usage(completion_tokens=57, prompt_tokens=427, total_tokens=484, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0))" ] }, "execution_count": null, @@ -2246,6 +2417,57 @@ "r" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Multipart Request" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "FileMetadata(id='file_011CVgC7NxcXFGnoqb8FSb6A', created_at=datetime.datetime(2025, 12, 1, 15, 19, 40, 864000, tzinfo=datetime.timezone.utc), filename='ex.txt', mime_type='text/plain', size_bytes=11, type='file', downloadable=False)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "cli = Anthropic()\n", + "r = cli.beta.files.upload(file=(\"ex.txt\", b\"hello world\", \"text/plain\"))\n", + "r" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "FileMetadata(id='file_011CVgC7NxcXFGnoqb8FSb6A', created_at=datetime.datetime(2025, 12, 1, 15, 19, 40, 864000, tzinfo=datetime.timezone.utc), filename='ex.txt', mime_type='text/plain', size_bytes=11, type='file', downloadable=False)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "cli = Anthropic()\n", + "r = cli.beta.files.upload(file=(\"ex.txt\", b\"hello world\", \"text/plain\"))\n", + "r" + ] + }, { "cell_type": "markdown", "metadata": {},