diff --git a/cachy.jsonl b/cachy.jsonl
index ca47b48..1b82468 100644
--- a/cachy.jsonl
+++ b/cachy.jsonl
@@ -176,3 +176,51 @@
{"key": "86a7d5c4", "response": "{\"id\":\"msg_01JK27RVSVXLGDfTvgHFKUk2\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[{\"type\":\"text\",\"text\":\"\\n\\nI was only able to complete the first step of your calculation using the tool:\\n- 1 + 2 = 3\\n\\nTo complete your full request, I would still need to:\\n1. Add 2 to the result of 3 (which would give us 5)\\n2. Then add 3 to that result (which would give us the final answer of 8)\\n\\nThe complete calculation would be: 1 + 2 + 2 + 3 = 8\\n\\nWould you like me to continue with the remaining calculations using the tools?\"}],\"stop_reason\":\"end_turn\",\"stop_sequence\":null,\"usage\":{\"input_tokens\":890,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":0,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":130,\"service_tier\":\"standard\"}}"}
{"key": "a8b2d330", "response": "{\"id\":\"msg_01HSzeFmteF2FVLvWfZiQexf\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[{\"type\":\"text\",\"text\":\"\\n\\nThe calculation is complete! Using the async_add tool, I calculated that 5 + 7 = 12.\"}],\"stop_reason\":\"end_turn\",\"stop_sequence\":null,\"usage\":{\"input_tokens\":568,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":0,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":30,\"service_tier\":\"standard\"}}"}
{"key": "84d754f2", "response": "event: message_start\ndata: {\"type\":\"message_start\",\"message\":{\"id\":\"msg_01R3fz72tx2mQQcQfci52oXX\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[],\"stop_reason\":null,\"stop_sequence\":null,\"usage\":{\"input_tokens\":568,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":0,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":2,\"service_tier\":\"standard\"}} }\n\nevent: content_block_start\ndata: {\"type\":\"content_block_start\",\"index\":0,\"content_block\":{\"type\":\"text\",\"text\":\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n\\nThe\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" calculation is complete! Using the async_add tool, I calculated that\"} }\n\nevent: ping\ndata: {\"type\": \"ping\"}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" 5 + 7 = 12.\"}}\n\nevent: ping\ndata: {\"type\": \"ping\"}\n\nevent: content_block_stop\ndata: {\"type\":\"content_block_stop\",\"index\":0 }\n\nevent: ping\ndata: {\"type\": \"ping\"}\n\nevent: ping\ndata: {\"type\": \"ping\"}\n\nevent: message_delta\ndata: {\"type\":\"message_delta\",\"delta\":{\"stop_reason\":\"end_turn\",\"stop_sequence\":null},\"usage\":{\"input_tokens\":568,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"output_tokens\":30} }\n\nevent: message_stop\ndata: {\"type\":\"message_stop\" }\n\n"}
+{"key": "3f3b5a24", "response": "{\"id\":\"msg_01FWtve2FvfjT3opKYBD86i2\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[{\"type\":\"text\",\"text\":\"Hello! It's nice to meet you. How are you doing today? Is there anything I can help you with or would you like to chat about?\"}],\"stop_reason\":\"end_turn\",\"stop_sequence\":null,\"usage\":{\"input_tokens\":10,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":0,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":34,\"service_tier\":\"standard\"}}"}
+{"key": "6f88a886", "response": "event: message_start\ndata: {\"type\":\"message_start\",\"message\":{\"id\":\"msg_019h6MTdtZRAHz3WpWYKd2rE\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[],\"stop_reason\":null,\"stop_sequence\":null,\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":2598,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":2598,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":3,\"service_tier\":\"standard\"}} }\n\nevent: content_block_start\ndata: {\"type\":\"content_block_start\",\"index\":0,\"content_block\":{\"type\":\"text\",\"text\":\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"Looking at your\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" `details` property\"} }\n\nevent: ping\ndata: {\"type\": \"ping\"}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\", the issue is with this\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" line:\\n\\n```python\\nreturn f\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"'\\\\n\\\\n{\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"ps.join('\\\\n')}\\\\n\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\\\n '\\n```\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n\\nYou're calling `join`\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" on the list `ps`, but `join\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"` is a string method,\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" not a list method. You need to\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" call it on the separator\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" string instead:\\n\\n```python\\nreturn f'\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\\\n\\\\n{\\\"\\\\\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"n\\\".join(ps)}\\\\n\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\\\n '\\n```\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n\\nSo the corrected version\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" should be:\\n\\n```python\\n#\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"| export\\n@patch(as_\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"prop=True)\\ndef details(\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"self:litellm.ModelResponse):\\n ps = [f\\\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"id: `{self.id}`\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\\", f\\\"model: `{self\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\".model}`\\\",f\\\"finish_\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"reason: `{self.f_\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"reason}`\\\"]\\n return f'\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\\\n\\\\n{\\\"\\\\\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"n\\\".join(ps)}\\\\n\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\\\n '\\n```\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n\\nThe `join()\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"` method belongs\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" to strings, where you call `\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"separator.join(iterable)`,\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" not `iterable.join(separator)`.\"} }\n\nevent: content_block_stop\ndata: {\"type\":\"content_block_stop\",\"index\":0 }\n\nevent: message_delta\ndata: {\"type\":\"message_delta\",\"delta\":{\"stop_reason\":\"end_turn\",\"stop_sequence\":null},\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":2598,\"cache_read_input_tokens\":0,\"output_tokens\":257} }\n\nevent: message_stop\ndata: {\"type\":\"message_stop\" }\n\n"}
+{"key": "4241edbe", "response": "event: message_start\ndata: {\"type\":\"message_start\",\"message\":{\"id\":\"msg_014Gx28SQJNZbC2zT3MmtqLA\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[],\"stop_reason\":null,\"stop_sequence\":null,\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":4126,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":4126,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":6,\"service_tier\":\"standard\"}} }\n\nevent: content_block_start\ndata: {\"type\":\"content_block_start\",\"index\":0,\"content_block\":{\"type\":\"text\",\"text\":\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"Looking at the code, `\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"function_call=None` is set explicitly because of how L\"} }\n\nevent: ping\ndata: {\"type\": \"ping\"}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"iteLLM's `Message` class is designe\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d to handle both the newer \\\"tools\\\" API an\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d the older \\\"functions\\\" API.\\n\\nHere's why it's necessary:\\n\\n1. **Legacy\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" compatibility**: The `function_call` parameter is from Open\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"AI's older function calling API, which has been superseded by the newer \\\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"tools\\\" API that uses `tool_calls`.\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n\\n2. **Default behavior**: The `Message` class likely\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" has some internal logic that could potentially set\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" `function_call` to a default value or\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" try to infer it from other parameters if\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" not explicitly set.\\n\\n3. **Avoiding\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" conflicts**: By explicitly setting `function_call=None`, the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" code ensures there's no ambiguity or\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" conflict between the old `function_call` approach and the new `tool_calls`\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" approach being used.\\n\\n4. **API consistency**: Many LLM providers now\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" expect either `function_call` OR `tool_calls`,\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" but not both. Setting `function_call=None` makes\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" it clear that only the `tool_calls` mechanism should be used.\\n\\nThis\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" is a defensive programming practice to ensure the message is structured exactly as intende\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d for the modern tools API, without any interference from the legacy\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" function calling mechanism.\"} }\n\nevent: content_block_stop\ndata: {\"type\":\"content_block_stop\",\"index\":0 }\n\nevent: message_delta\ndata: {\"type\":\"message_delta\",\"delta\":{\"stop_reason\":\"end_turn\",\"stop_sequence\":null},\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":4126,\"cache_read_input_tokens\":0,\"output_tokens\":282} }\n\nevent: message_stop\ndata: {\"type\":\"message_stop\" }\n\n"}
+{"key": "4cf31aee", "response": "{\"id\":\"msg_01WPnhAzEJ2kNFSgsCrvXbfy\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[{\"type\":\"text\",\"text\":\"The result of 5 + 7 is 12.\"}],\"stop_reason\":\"end_turn\",\"stop_sequence\":null,\"usage\":{\"input_tokens\":114,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":0,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":17,\"service_tier\":\"standard\"}}"}
+{"key": "a9ac3fe4", "response": "event: message_start\ndata: {\"type\":\"message_start\",\"message\":{\"id\":\"msg_01BZmofSEqnDG6ktK2h1yLYj\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[],\"stop_reason\":null,\"stop_sequence\":null,\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":3811,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":3811,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":1,\"service_tier\":\"standard\"}} }\n\nevent: content_block_start\ndata: {\"type\":\"content_block_start\",\"index\":0,\"content_block\":{\"type\":\"text\",\"text\":\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"Looking\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" at your `mk_msg` function, you\"} }\n\nevent: ping\ndata: {\"type\": \"ping\"}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"'re trying to conditionally add the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" `tool_calls` and `function\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"_call` keys\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\". The current syntax\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" has issues with the dict\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" merging. Here's a cle\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"aner approach:\\n\\n```python\\ndef\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" mk_msg(content, role='\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"user', tcs=None):\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n msg\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" = {'content': content, 'role':\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" 'assistant' if t\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"cs else role}\\n if tcs:\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" msg.update({'tool_calls': tcs,\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" 'function_call': None})\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n return msg\\n```\\n\\nOr\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" if you prefer a more functional\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" one-liner approach:\\n\\n```python\\ndef mk_\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"msg(content, role='user',\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" tcs=None):\\n return\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" {'content': content, 'role\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"': 'assistant' if tcs else role\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"} | (\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n {'tool_calls': tcs\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\", 'function_call': None\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"} if tcs else {}\\n \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\")\\n```\\n\\nThe\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" `|` operator (\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"Python 3.9+) mer\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"ges dictionaries clean\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"ly. If you need Python\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" 3.8 compatibility, use\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" `{\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"**dict\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"1, **dict2}` instea\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d.\"} }\n\nevent: content_block_stop\ndata: {\"type\":\"content_block_stop\",\"index\":0 }\n\nevent: message_delta\ndata: {\"type\":\"message_delta\",\"delta\":{\"stop_reason\":\"end_turn\",\"stop_sequence\":null},\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":3811,\"cache_read_input_tokens\":0,\"output_tokens\":257} }\n\nevent: message_stop\ndata: {\"type\":\"message_stop\" }\n\n"}
+{"key": "d5bc33fc", "response": "{\"id\":\"msg_01WZfPkqTmcs1HWbTBUmXXWV\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[{\"type\":\"text\",\"text\":\"The result of 5 + 7 is 12.\"}],\"stop_reason\":\"end_turn\",\"stop_sequence\":null,\"usage\":{\"input_tokens\":114,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":0,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":17,\"service_tier\":\"standard\"}}"}
+{"key": "035198f6", "response": "{\"id\":\"msg_01YHFPwsVSn127Jt1fkXUZh9\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[{\"type\":\"text\",\"text\":\"The calculation 5 + 7 equals 12.\"}],\"stop_reason\":\"end_turn\",\"stop_sequence\":null,\"usage\":{\"input_tokens\":114,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":0,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":16,\"service_tier\":\"standard\"}}"}
+{"key": "521b0c5d", "response": "{\"id\":\"msg_01QpyVAYnfKoDF7DxxVbg4a7\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[{\"type\":\"text\",\"text\":\"The result of 5 + 7 is **12**.\"}],\"stop_reason\":\"end_turn\",\"stop_sequence\":null,\"usage\":{\"input_tokens\":114,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":0,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":17,\"service_tier\":\"standard\"}}"}
+{"key": "362d3571", "response": ""}
+{"key": "362d3571", "response": "event: message_start\ndata: {\"type\":\"message_start\",\"message\":{\"id\":\"msg_01VoyfTQKP3H6GN3CdLCLghW\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[],\"stop_reason\":null,\"stop_sequence\":null,\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":5687,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":5687,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":1,\"service_tier\":\"standard\"}} }\n\nevent: content_block_start\ndata: {\"type\":\"content_block_start\",\"index\":0,\"content_block\":{\"type\":\"text\",\"text\":\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"You\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"'re right - you've created a bit\"} }\n\nevent: ping\ndata: {\"type\": \"ping\"}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" of a mismatch. You\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"'re creating plain dictionaries for\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" messages, but `ModelResponse` expects structure\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d objects with specific attributes.\\n\\nLooking\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" at your code, you have a\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" few options to fix this:\\n\\n##\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" Option 1: Create a proper\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" ModelResponse for tool calls\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n\\n```python\\n# Create a mock\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" ModelResponse with tool calls\\ndef\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" mk_mock_response(content\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\", tool_calls=None):\\n from\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" litellm.types.utils import\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" Message, Choices\\n \\n msg\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" = Message(\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n content=content,\\n role\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"='assistant',\\n tool_calls\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"=tool_calls,\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n function_call=None\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n )\\n \\n choice\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" = Choices(\\n finish_\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"reason='tool_calls'\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" if tool_calls else 'stop\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"',\\n index=0,\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n message=msg\\n )\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n \\n return ModelResponse(\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n choices=[choice],\\n model\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"=model,\\n usage=None\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" # or mock usage\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n )\\n```\\n\\n## Option \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"2: Fix the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" `show_tools` patch\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n\\nYour current patch\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" has a bug - `message\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"` is undefined. It\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" should reference `self.msg`:\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n\\n```python\\n@patch(as\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"_prop=True)\\ndef show\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"_tools(self: litellm\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\".ModelResponse):\\n tool\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"_calls = self.msg.tool\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"_calls if self.msg else\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" None\\n if not tool_calls:\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" return ''\\n return \\\"\\\\n\\\".join([f\\\"\\\\n\\\\n\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\ud83d\udd27 {tc.\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"function.name}({tc.function.\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"arguments})\\\\n\\\" for tc in tool_calls\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"])\\n```\\n\\n## Option 3\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\": Keep using\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" dicts but ad\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d a helper\\n\\nSince you're already using \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"dicts successfully for the completion\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" calls, you could create a helper to\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" display tool calls from dict messages:\\n\\n```python\\ndef show_\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"dict_tools(msg_dict):\\n tool\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"_calls = msg_dict.get\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"('tool_calls', [])\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n if not tool_calls: return ''\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n return \\\"\\\\n\\\".join([f\\\"\\\\n\\\\n\ud83d\udd27 \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"{tc['function']['name'\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"]}({tc['function']['arguments'\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"]})\\\\n\\\" for tc in tool\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"_calls])\\n```\\n\\nWhich approach would you prefer? The dict\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" approach has been working well for your\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" completion calls, so you\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" might want to stick with that pattern\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\".\"} }\n\nevent: content_block_stop\ndata: {\"type\":\"content_block_stop\",\"index\":0 }\n\nevent: message_delta\ndata: {\"type\":\"message_delta\",\"delta\":{\"stop_reason\":\"end_turn\",\"stop_sequence\":null},\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":5687,\"cache_read_input_tokens\":0,\"output_tokens\":523} }\n\nevent: message_stop\ndata: {\"type\":\"message_stop\" }\n\n"}
+{"key": "87baaba1", "response": "event: message_start\ndata: {\"type\":\"message_start\",\"message\":{\"id\":\"msg_01Nx33pRPCqpRcXhS4UhpcsA\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[],\"stop_reason\":null,\"stop_sequence\":null,\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":5875,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":5875,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":1,\"service_tier\":\"standard\"}} }\n\nevent: content_block_start\ndata: {\"type\":\"content_block_start\",\"index\":0,\"content_block\":{\"type\":\"text\",\"text\":\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"You\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"'re right that you've created a bit\"} }\n\nevent: ping\ndata: {\"type\": \"ping\"}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" of a mismatch - you're creating\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" plain dictionaries for\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" messages but then trying to work\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" with `ModelResponse` objects that\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" have specific structure and methods.\\n\\nHere\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" are a few approaches to resolve this:\\n\\n## Option 1:\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" Create a mock ModelResponse for testing\\n\\n```python\\ndef\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" mk_mock_response(content, tool\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"_calls=None, model=\\\"test-model\\\"):\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n \\\"\\\"\\\"Create a mock ModelResponse for\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" testing purposes\\\"\\\"\\\"\\n from litellm.types\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\".utils import Message, Choices\\n \\n msg\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" = Message(\\n content=content,\\n role\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"='assistant',\\n tool_calls=\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"tool_calls,\\n function_call=None\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n )\\n \\n choice = Choices(\\n finish_\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"reason='stop',\\n index=0,\\n message=msg\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n )\\n \\n response = lit\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"ellm.ModelResponse(\\n id='chat\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"cmpl-xxx',\\n created=1000000000,\\n \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"model=model,\\n object='chat.completion',\\n choices\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"=[choice]\\n )\\n \\n return response\\n```\\n\\n## Option 2\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\": Convert your dict messages to proper Message\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" objects\\n\\n```python\\ndef dict_to_message(msg\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"_dict):\\n \\\"\\\"\\\"Convert a\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" message dict to a proper Message object\\\"\\\"\\\"\\n return\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" Message(\\n content=msg_dict['content'],\\n role=\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"msg_dict['role'],\\n tool_calls=msg_dict.\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"get('tool_calls'),\\n function_call=msg_\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"dict.get('function_call')\\n )\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n```\\n\\n## Option 3: Ad\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d methods to work with dict messages directly\\n\\n```python\\ndef show\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"_dict_tools(msg_dict):\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n \\\"\\\"\\\"Show tools for a dict-\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"based message\\\"\\\"\\\"\\n tool_calls = msg_dict.get\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"('tool_calls', [])\\n if\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" not tool_calls: return ''\\n return \\\"\\\\n\\\".join([f\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\\"\\\\n\\\\n\ud83d\udd27 {tc['\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"function']['name']}({tc['function']['arguments']})\\\\n\\\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" for tc in tool_calls])\\n```\\n\\nWhich\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" approach would you prefer? The mock response\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" approach might be cleanest for testing your\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" display logic without having to actually call the L\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"LM.\"} }\n\nevent: content_block_stop\ndata: {\"type\":\"content_block_stop\",\"index\":0}\n\nevent: message_delta\ndata: {\"type\":\"message_delta\",\"delta\":{\"stop_reason\":\"end_turn\",\"stop_sequence\":null},\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":5875,\"cache_read_input_tokens\":0,\"output_tokens\":502} }\n\nevent: message_stop\ndata: {\"type\":\"message_stop\" }\n\n"}
+{"key": "0bd5c39f", "response": "event: message_start\ndata: {\"type\":\"message_start\",\"message\":{\"id\":\"msg_01NZAHhYQiHWo1YaoTHBewR7\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[],\"stop_reason\":null,\"stop_sequence\":null,\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":2742,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":2742,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":1,\"service_tier\":\"standard\"}} }\n\nevent: content_block_start\ndata: {\"type\":\"content_block_start\",\"index\":0,\"content_block\":{\"type\":\"text\",\"text\":\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"Looking\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" at the pattern established in the code, I'll create\"}}\n\nevent: ping\ndata: {\"type\": \"ping\"}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" a `_details` function that follows\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" the same structure as `_msg` and `_f_\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"reason`:\\n\\n```\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"python\\n#| export\\ndef _details(resp\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"):\\n ps = [f\\\"id: `{\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"resp['id']}`\\\", f\\\"model: `{\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"resp['model']}`\\\",f\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\\"finish_reason: `{_\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"f_reason(resp)}`\\\"]\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n if hasattr(resp,\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" 'usage') and resp['\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"usage']: ps.append(f\\\"usage\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\": `{resp['usage']}`\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\\")\\n return f'\\\\n\\\\n{'\\\\\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"n- '.join(ps)}\\\\\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"n\\\\n '\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n\\n@patch(as_prop=True)\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\ndef details(self:litell\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"m.ModelResponse): return _details(self)\\n```\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n\\nThis follows the establishe\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d pattern where:\\n- The\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" underscore function (`\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"_details`) takes\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" the raw response object and extra\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"cts the needed information\\n- The patch\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" applies it as a property to\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" the `ModelResponse` class\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n- It re\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"uses the existing `_f\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"_reason` function to get the finish reason consistently\"} }\n\nevent: content_block_stop\ndata: {\"type\":\"content_block_stop\",\"index\":0 }\n\nevent: message_delta\ndata: {\"type\":\"message_delta\",\"delta\":{\"stop_reason\":\"end_turn\",\"stop_sequence\":null},\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":2742,\"cache_read_input_tokens\":0,\"output_tokens\":255} }\n\nevent: message_stop\ndata: {\"type\":\"message_stop\" }\n\n"}
+{"key": "5039fc46", "response": "event: message_start\ndata: {\"type\":\"message_start\",\"message\":{\"id\":\"msg_01NbYcCg3dmHQGP8VaNFKc88\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[],\"stop_reason\":null,\"stop_sequence\":null,\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":3000,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":3000,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":1,\"service_tier\":\"standard\"}} }\n\nevent: content_block_start\ndata: {\"type\":\"content_block_start\",\"index\":0,\"content_block\":{\"type\":\"text\",\"text\":\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"Looking\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" at the pattern established in the code, I'll create a\"} }\n\nevent: ping\ndata: {\"type\": \"ping\"}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" `_content` function that follows the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" same structure as `_msg` an\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d `_f_reason`:\\n\\n```\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"python\\n#| export\\ndef _content(resp\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"): \\n msgc = _\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"msg(resp).content\\n return msgc[0\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"]['text'] if isinstance(msgc\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",list) else msgc\\n\\n@\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"patch(as_prop=True)\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\ndef content(self:litell\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"m.ModelResponse): return _content\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"(self)\\n```\\n\\nThis follows\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" the established pattern where:\\n1\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\". The underscore function\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" (`_content`) takes\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" a response object and extra\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"cts the needed data\\n2. The property\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" patch uses the underscore function to provide a\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" clean interface\\n3. It handles both list and string\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" content formats, just like the existing `content\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"` property does\"} }\n\nevent: content_block_stop\ndata: {\"type\":\"content_block_stop\",\"index\":0 }\n\nevent: message_delta\ndata: {\"type\":\"message_delta\",\"delta\":{\"stop_reason\":\"end_turn\",\"stop_sequence\":null},\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":3000,\"cache_read_input_tokens\":0,\"output_tokens\":182} }\n\nevent: message_stop\ndata: {\"type\":\"message_stop\" }\n\n"}
+{"key": "9fffe54a", "response": "event: message_start\ndata: {\"type\":\"message_start\",\"message\":{\"id\":\"msg_01BkKorLM6x4NFxeGfbSUJ7g\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[],\"stop_reason\":null,\"stop_sequence\":null,\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":3029,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":3029,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":1,\"service_tier\":\"standard\"}} }\n\nevent: content_block_start\ndata: {\"type\":\"content_block_start\",\"index\":0,\"content_block\":{\"type\":\"text\",\"text\":\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"Looking\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" at your code, I see a clear\"} }\n\nevent: ping\ndata: {\"type\": \"ping\"}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" pattern in how you're creating properties\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" for `ModelResponse`.\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" You're right that this can\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" be factored out! Here\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"'s a cleaner approach:\\n\\n```python\\n#\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"| export\\ndef add_resp\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"_props(cls, **props):\\n \\\"\\\"\\\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"Add properties to a response class from a dict\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" of name->function pairs\\\"\\\"\\\"\\n for name, func in props\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\".items():\\n setattr(\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"cls, name, property(func))\\n\\n# Define\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" all the property functions\\ndef _msg(resp):\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" return resp['choices'][0]['message\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"']\\ndef _f_reason(\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"resp): return resp['choices'][0\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"]['finish_reason']\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" \\ndef _content(resp): \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n msgc = _msg(\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"resp).content\\n return msgc\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"[0]['text'] if isinstance(msgc,list) else msgc\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\ndef _details(resp):\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n ps = [f\\\"id:\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" `{resp['id']}`\\\",\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" f\\\"model: `{resp['\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"model']}`\\\",f\\\"finish_\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"reason: `{_f_reason\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"(resp)}`\\\"]\\n if\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" hasattr(resp, 'usage\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"') and resp['usage']: ps\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\".append(f\\\"usage: `\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"{resp['usage']}`\\\")\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n return f'\\\\n\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\\\n{'\\\\n- '.join\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"(ps)}\\\\n\\\\n\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" '\\n\\n# Add all properties\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" at once\\nadd_resp_props(\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"litellm.ModelResponse, \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n msg=_msg,\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" \\n f_reason=_\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"f_reason, \\n content\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"=_content, \\n details\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"=_details)\\n```\\n\\nThis\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" eliminates the repet\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"itive `@patch(as_prop=True)` pattern\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" and makes it easy\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" to see all the properties being added in\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" one place. You could even make it more\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" concise with a decorator if you prefer:\\n\\n```python\\n#\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"| export\\ndef resp\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"_props(**props):\\n \\\"\\\"\\\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"Decorator to add multiple properties to\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" ModelResponse\\\"\\\"\\\"\\n def decorator(cls\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"):\\n add_resp_props(cls, **\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"props)\\n return cls\\n \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"return decorator\\n\\n@resp_props(msg=_msg\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\", f_reason=_f_\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"reason, content=_content, details\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"=_details)\\nclass _\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\": pass # Just to\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" trigger the decorator on Model\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"Response\\n```\\n\\nThe\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" first approach is probably\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" cleaner and more explicit though\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\".\"} }\n\nevent: content_block_stop\ndata: {\"type\":\"content_block_stop\",\"index\":0 }\n\nevent: message_delta\ndata: {\"type\":\"message_delta\",\"delta\":{\"stop_reason\":\"end_turn\",\"stop_sequence\":null},\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":3029,\"cache_read_input_tokens\":0,\"output_tokens\":526} }\n\nevent: message_stop\ndata: {\"type\":\"message_stop\" }\n\n"}
+{"key": "451e22c2", "response": "event: message_start\ndata: {\"type\":\"message_start\",\"message\":{\"id\":\"msg_0147Z8nWZ6Ur8PTRS7WTukx1\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[],\"stop_reason\":null,\"stop_sequence\":null,\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":3038,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":3038,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":3,\"service_tier\":\"standard\"}} }\n\nevent: content_block_start\ndata: {\"type\":\"content_block_start\",\"index\":0,\"content_block\":{\"type\":\"text\",\"text\":\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"Looking at the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" pattern, you're repeating the same structure\"} }\n\nevent: ping\ndata: {\"type\": \"ping\"}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" for each property\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\":\\n\\n1\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\". Define a standalone function that takes the response object\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n2. Create\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" a patch that adds\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" it as a property\\n\\nHere's a clever\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" way to factor this out using\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" a decorator:\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n\\n```python\\n#\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"| export\\ndef resp\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"_prop(func):\\n \\\"\\\"\\\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"Decorator to automatically create\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" a property patch from a function\\\"\\\"\\\"\\n prop\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"_name = func.__name__.l\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"strip('_') \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"# Remove leading underscore if\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" present\\n \\n @patch\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"(as_prop=True)\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n def prop_func(self: litellm.\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"ModelResponse):\\n return func(\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"self)\\n \\n # Set the correct\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" name for the patch\\n prop_func.__name__\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" = prop_name\\n return func\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" # Return original function so\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" it's still available\\n\\n# Now\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" you can define your properties\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" much more concisely:\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n@resp_prop\\ndef _\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"msg(resp): \\n return resp['choices'][0\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"]['message']\\n\\n@resp_prop\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" \\ndef _f_reason(resp\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"): \\n return resp['choices\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"'][0]['finish_reason']\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n\\n@resp_prop\\ndef _content\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"(resp): \\n msg\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"c = _msg(resp).content\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n return msgc[0]['\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"text'] if isinstance(msgc,\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"list) else msgc\\n\\n@resp\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"_prop\\ndef _details(resp\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"):\\n ps = [f\\\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"id: `{resp['id']\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"}`\\\", f\\\"model: `{\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"resp['model']}`\\\",f\\\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"finish_reason: `{_f\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"_reason(resp)}`\\\"]\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n if hasattr(resp, \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"'usage') and resp['usage'\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"]: ps.append(f\\\"usage\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\": `{resp['usage']}`\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\\")\\n return f'\\\\n\\\\n{'\\\\n-\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" '.join(ps)}\\\\n\\\\\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"n '\\n```\\n\\nThis elimin\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"ates all the repetitive\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" `@patch(as_prop=True\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\")` boilerplate while\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" keeping the same functionality.\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" The decorator handles creating\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" the property patch automatically,\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" and you still\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" have access to the original functions\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" if needed.\"} }\n\nevent: content_block_stop\ndata: {\"type\":\"content_block_stop\",\"index\":0 }\n\nevent: message_delta\ndata: {\"type\":\"message_delta\",\"delta\":{\"stop_reason\":\"end_turn\",\"stop_sequence\":null},\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":3038,\"cache_read_input_tokens\":0,\"output_tokens\":466} }\n\nevent: message_stop\ndata: {\"type\":\"message_stop\" }\n\n"}
+{"key": "04aae464", "response": "{\"id\":\"msg_01JCVb91mFdaGrB1gqxeceBR\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[{\"type\":\"text\",\"text\":\"The answer is **12**. Using the calculation tool, I confirmed that 5 + 7 = 12.\"}],\"stop_reason\":\"end_turn\",\"stop_sequence\":null,\"usage\":{\"input_tokens\":114,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":0,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":29,\"service_tier\":\"standard\"}}"}
+{"key": "2597d1b9", "response": "event: message_start\ndata: {\"type\":\"message_start\",\"message\":{\"id\":\"msg_01FkUV4NpJmoCGvM1ukpCadB\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[],\"stop_reason\":null,\"stop_sequence\":null,\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":5203,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":5203,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":1,\"service_tier\":\"standard\"}} }\n\nevent: content_block_start\ndata: {\"type\":\"content_block_start\",\"index\":0,\"content_block\":{\"type\":\"text\",\"text\":\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"Looking\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" at the code\"} }\n\nevent: ping\ndata: {\"type\": \"ping\"}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\", I can see you\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"'re working with LiteLLM's\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" ModelResponse objects an\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d have been pat\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"ching them to add properties like `msg\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"`, `content`, and `details`.\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n\\nThe `\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"choices` field is\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" indeed required for a vali\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d LiteLLM ModelResponse object\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\". When you call\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" `completion()`,\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" LiteLLM returns a ModelResponse\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" that follows the OpenAI API format, which\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" always includes a `choices` array\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\".\\n\\nIn\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" your code, you're accessing\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" the response\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" content through\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\":\\n- `_msg(resp)` which\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" does `resp['choices'][0]['message\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"']`\\n- `_content\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"(resp)` which gets\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" the content from the first choice's\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" message\\n- `_f\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"_reason(resp)` which gets\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" `resp['choices'][0]['finish_\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"reason']`\\n\\nSo\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" yes, the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" `choices` field is essential\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\". Even when there\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"'s only one response (the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" default), it's still\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" wrapped in a `\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"choices` array with one\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" element at index 0.\\n\\nIf\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" you're seeing it work without `\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"choices`, it might be because:\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n1. You're working with a different object\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" type (like a raw\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" dict that hasn't been converte\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d to ModelResponse)\\n2. There\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"'s some internal LiteLLM handling\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" that's adding the choices field automatically\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n3. You're\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" looking at an intermediate\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" state before the response is fully constructe\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d\\n\\nTo verify what you\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"'re working with, you could check:\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n```python\\nprint(type(your\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"_response))\\nprint(has\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"attr(your_response, '\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"choices'))\\nprint(your_response.keys\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"() if isinstance\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"(your_response, dict) else\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" dir(your_response))\\n```\"} }\n\nevent: content_block_stop\ndata: {\"type\":\"content_block_stop\",\"index\":0 }\n\nevent: message_delta\ndata: {\"type\":\"message_delta\",\"delta\":{\"stop_reason\":\"end_turn\",\"stop_sequence\":null},\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":5203,\"cache_read_input_tokens\":0,\"output_tokens\":361} }\n\nevent: message_stop\ndata: {\"type\":\"message_stop\" }\n\n"}
+{"key": "5b15cb04", "response": "event: message_start\ndata: {\"type\":\"message_start\",\"message\":{\"id\":\"msg_01MZHDT5Mxr66PfwVmaEP2PS\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[],\"stop_reason\":null,\"stop_sequence\":null,\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":6408,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":6408,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":6,\"service_tier\":\"standard\"}} }\n\nevent: content_block_start\ndata: {\"type\":\"content_block_start\",\"index\":0,\"content_block\":{\"type\":\"text\",\"text\":\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"You're absolutely right! The\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" issue is that you\"} }\n\nevent: ping\ndata: {\"type\": \"ping\"}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"'re mixing two different types of message\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" structures:\\n\\n1. **Request\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" messages** (what\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" you send to the API\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\") - these are plain\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" dictionaries\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n2. **Response messages** (what\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" you get back from the API) - these are wrappe\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d in a `Model\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"Response` object with `\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"choices[0].\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"message`\\n\\nYour\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" `_\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"show_tools` function expects\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" a `\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"ModelResponse` structure\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" (calling `_msg(resp)` which\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" looks for `resp\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"['choices'][0]['message']`\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"), but `\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"tc\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"q` is just\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" a plain dictionary message\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" that you\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"'re constructing to\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" send to the API.\\n\\nHere's what\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" I'd suggest:\\n\\n```python\\n#\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" Keep mk_msg for\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" creating request messages (\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"what you send to API\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\")\\ndef mk_msg(content\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\", role='user', tcs=\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"None):\\n msg = {'content\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"': content, 'role': \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"'assistant' if tcs else role\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"}\\n if tcs: msg\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\".update({'tool_calls': t\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"cs, 'function_call':\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" None})\\n return msg\\n\\n#\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" Create\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" a separate\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" function for showing\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" tools from\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" plain message\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" dicts\\ndef _show_tools_\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"from_msg(msg_\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"dict):\\n tool_calls =\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" msg_dict.get('tool_\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"calls')\\n if not tool_calls:\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" return ''\\n return \\\"\\\\n\\\".\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"join([f\\\"\\\\n\\\\n\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\ud83d\udd27 {tc['function']['\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"name']}({tc['function']['\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"arguments']})\\\\n\\\" for tc\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" in tool_calls])\\n\\n# Keep\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" your existing\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" _\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"show\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"_tools for\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" ModelResponse objects\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\ndef _show_tools(resp\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"):\\n tool_calls = _\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"msg(resp).\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"get('tool_calls')\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" if _msg(resp) else None\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n if not tool_calls:\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" return ''\\n return \\\"\\\\n\\\".\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"join([f\\\"\\\\n\\\\n\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\ud83d\udd27 {tc['function']['\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"name']}({tc['function']['\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"arguments']})\\\\n\\\" for tc\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" in tool_calls])\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n```\\n\\nThis way you\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" have clear\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" separation between:\\n- Functions\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" that work with request message\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" dictionaries\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n- Functions that work with `\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"ModelResponse` objects\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n\\nDoes this approach\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" make sense\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" for your use case?\"} }\n\nevent: content_block_stop\ndata: {\"type\":\"content_block_stop\",\"index\":0 }\n\nevent: message_delta\ndata: {\"type\":\"message_delta\",\"delta\":{\"stop_reason\":\"end_turn\",\"stop_sequence\":null},\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":6408,\"cache_read_input_tokens\":0,\"output_tokens\":477} }\n\nevent: message_stop\ndata: {\"type\":\"message_stop\" }\n\n"}
+{"key": "1d4c06d5", "response": "event: message_start\ndata: {\"type\":\"message_start\",\"message\":{\"id\":\"msg_01Dv8deoN1eV5qYbyQR1mQQE\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[],\"stop_reason\":null,\"stop_sequence\":null,\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":1040,\"cache_read_input_tokens\":6408,\"cache_creation\":{\"ephemeral_5m_input_tokens\":1040,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":1,\"service_tier\":\"standard\"}} }\n\nevent: content_block_start\ndata: {\"type\":\"content_block_start\",\"index\":0,\"content_block\":{\"type\":\"text\",\"text\":\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"Here\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"'s a `mk_msg_resp\"} }\n\nevent: ping\ndata: {\"type\": \"ping\"}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"` function that creates a dictionary in\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" the `ModelResponse` structure\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\":\\n\\n```python\\ndef mk_\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"msg_resp(content, role='\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"assistant\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"', tcs=None,\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" model\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"='test\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"-\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"model',\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" finish\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"_reason='stop'):\\n \\\"\\\"\\\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"Create\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" a mock\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" ModelResponse-\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"like dictionary\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" structure\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\\"\\\"\\\"\\n msg\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" = {'content': content, '\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"role': role\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"}\\n if tcs: \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n msg.update({'tool_\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"calls': tcs, 'function\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"_call': None})\\n \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n return\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" {\\n 'i\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d': 'chat\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"cmpl-xxx',\\n \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"'create\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d': 1000\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"000000\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\\n 'model': model,\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n 'choices': [{\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n 'finish\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"_reason': finish_reason,\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n 'index': 0,\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n 'message': msg\\n }\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"],\\n 'usage': {'\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"completion\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"_tokens': 0\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\", 'prompt_tokens': \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"0, 'total_tokens':\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" 0}\\n }\\n```\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n\\nNow\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" you can test\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" your\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" `_show_tools` function with:\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n\\n```python\\ntc\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"q_resp = mk_msg_resp('\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"I will calculate 5 + \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"7 for you', tcs=[\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"mk_tc(simple_add, a\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"=5, b=7)])\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n_show_tools(tcq_resp\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\")\\n```\\n\\nThis creates\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" a dictionary that matches\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" the structure\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" your\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" `_show_tools` function expects\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" (with `choices\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"[0].message`\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"), but\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" keeps it\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" as a plain dictionary rather\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" than a `\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"ModelResponse` object.\"} }\n\nevent: content_block_stop\ndata: {\"type\":\"content_block_stop\",\"index\":0 }\n\nevent: message_delta\ndata: {\"type\":\"message_delta\",\"delta\":{\"stop_reason\":\"end_turn\",\"stop_sequence\":null},\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":1040,\"cache_read_input_tokens\":6408,\"output_tokens\":344} }\n\nevent: message_stop\ndata: {\"type\":\"message_stop\" }\n\n"}
+{"key": "81e1e091", "response": "event: message_start\ndata: {\"type\":\"message_start\",\"message\":{\"id\":\"msg_019WYSmPu7jSzL3zsewrHtcP\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[],\"stop_reason\":null,\"stop_sequence\":null,\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":783,\"cache_read_input_tokens\":7448,\"cache_creation\":{\"ephemeral_5m_input_tokens\":783,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":1,\"service_tier\":\"standard\"}} }\n\nevent: content_block_start\ndata: {\"type\":\"content_block_start\",\"index\":0,\"content_block\":{\"type\":\"text\",\"text\":\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"```\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"python\\ndef mk_msg_resp(content, role='assistant', t\"} }\n\nevent: ping\ndata: {\"type\": \"ping\"}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"cs=None, model='test-\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"model'):\\n \\\"\\\"\\\"Create a mock ModelResponse\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"-like dictionary structure\\\"\\\"\\\"\\n msg\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" = {'content': content, '\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"role': role}\\n if t\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"cs: msg.update({'tool_calls': t\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"cs, 'function_call':\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" None})\\n \\n return {\\n 'id':\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" 'chatcmpl-xxx\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"',\\n 'created': 1\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"000000000,\\n 'model\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"': model,\\n 'choices':\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" [{\\n 'finish_reason':\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" 'tool_calls'\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" if tcs else 'stop',\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n 'index': 0,\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n 'message': msg\\n \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"}]\\n }\\n```\"} }\n\nevent: content_block_stop\ndata: {\"type\":\"content_block_stop\",\"index\":0 }\n\nevent: message_delta\ndata: {\"type\":\"message_delta\",\"delta\":{\"stop_reason\":\"end_turn\",\"stop_sequence\":null},\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":783,\"cache_read_input_tokens\":7448,\"output_tokens\":171} }\n\nevent: message_stop\ndata: {\"type\":\"message_stop\" }\n\n"}
+{"key": "77f0bdb5", "response": "event: message_start\ndata: {\"type\":\"message_start\",\"message\":{\"id\":\"msg_01LS8uTem5czhmzmJHh1KQBB\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[],\"stop_reason\":null,\"stop_sequence\":null,\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":443,\"cache_read_input_tokens\":8231,\"cache_creation\":{\"ephemeral_5m_input_tokens\":443,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":3,\"service_tier\":\"standard\"}} }\n\nevent: content_block_start\ndata: {\"type\":\"content_block_start\",\"index\":0,\"content_block\":{\"type\":\"text\",\"text\":\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"You're right\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"! Let's ref\"} }\n\nevent: ping\ndata: {\"type\": \"ping\"}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"actor to use `mk_msg` for\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" creating the message\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" part:\\n\\n```python\\ndef mk_msg\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"_resp(content, role='assistant\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"', tcs=None, model='\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"test-model'):\\n \\\"\\\"\\\"Create\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" a mock ModelResponse-like dictionary structure\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\\"\\\"\\\"\\n msg = mk\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"_msg(content, role, t\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"cs)\\n \\n return {\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n 'id': 'chat\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"cmpl-xxx',\\n \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"'created': 1000000000\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\\n 'model': model,\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n 'choices': [{\\n \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"'finish_reason': 'tool\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"_calls' if tcs else \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"'stop',\\n 'index':\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" 0,\\n 'message':\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" msg\\n }]\\n }\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n```\\n\\nThis way we\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" reuse the existing\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" `mk_msg` function an\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d just wrap it in the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" response structure.\"} }\n\nevent: content_block_stop\ndata: {\"type\":\"content_block_stop\",\"index\":0 }\n\nevent: message_delta\ndata: {\"type\":\"message_delta\",\"delta\":{\"stop_reason\":\"end_turn\",\"stop_sequence\":null},\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":443,\"cache_read_input_tokens\":8231,\"output_tokens\":191} }\n\nevent: message_stop\ndata: {\"type\":\"message_stop\" }\n\n"}
+{"key": "c5dc1c27", "response": "event: message_start\ndata: {\"type\":\"message_start\",\"message\":{\"id\":\"msg_01AMfYWVNCEvqRp1TQYkQWSK\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[],\"stop_reason\":null,\"stop_sequence\":null,\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":465,\"cache_read_input_tokens\":8674,\"cache_creation\":{\"ephemeral_5m_input_tokens\":465,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":1,\"service_tier\":\"standard\"}}}\n\nevent: content_block_start\ndata: {\"type\":\"content_block_start\",\"index\":0,\"content_block\":{\"type\":\"text\",\"text\":\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"Yes\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\", that's a good abstraction! Here's how\"} }\n\nevent: ping\ndata: {\"type\": \"ping\"}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" we can break it\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" down:\\n\\n```python\\ndef _\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"wrap_choice(msg, finish\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"_reason='stop', index=0):\\n \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\\"\\\"\\\"Wrap a message\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" in a choice structure\\\"\\\"\\\"\\n return {\\n \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"'finish_reason': finish_reason,\\n 'index': index,\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n 'message': msg\\n }\\n\\ndef mk_msg_resp\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"(content, role='assistant', tcs=None, model='test-\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"model'):\\n \\\"\\\"\\\"Create a mock ModelResponse-like dictionary structure\\\"\\\"\\\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n msg = mk_msg(content\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\", role, tcs)\\n \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"choice = _wrap_choice(msg,\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" 'tool_calls' if t\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"cs else 'stop')\\n \\n return {\\n \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"'id': 'chatcm\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"pl-xxx',\\n 'create\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d': 1000000000,\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n 'model': model,\\n \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"'choices': [choice]\\n }\\n```\\n\\nThis makes\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" the structure clearer and the `_wrap_choice` function coul\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d be reused if you need to create responses\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" with multiple choices.\"} }\n\nevent: content_block_stop\ndata: {\"type\":\"content_block_stop\",\"index\":0 }\n\nevent: message_delta\ndata: {\"type\":\"message_delta\",\"delta\":{\"stop_reason\":\"end_turn\",\"stop_sequence\":null},\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":465,\"cache_read_input_tokens\":8674,\"output_tokens\":245} }\n\nevent: message_stop\ndata: {\"type\":\"message_stop\" }\n\n"}
+{"key": "b9dbd11d", "response": "{\"type\":\"error\",\"error\":{\"type\":\"invalid_request_error\",\"message\":\"messages.0.content.1: unexpected `tool_use_id` found in `tool_result` blocks: toolu_RuNlbngFMH6QYKg10hE0V1e6a. Each `tool_result` block must have a corresponding `tool_use` block in the previous message.\"},\"request_id\":\"req_011CTXancTPqF52NL9cSZcYG\"}"}
+{"key": "bc4bc5cc", "response": "{\"type\":\"error\",\"error\":{\"type\":\"invalid_request_error\",\"message\":\"messages.0.content.1: unexpected `tool_use_id` found in `tool_result` blocks: toolu_zZkza9wvgjDnFG1VmQR6cRlhk. Each `tool_result` block must have a corresponding `tool_use` block in the previous message.\"},\"request_id\":\"req_011CTXavUYNWszswYwg2djFU\"}"}
+{"key": "fb46c664", "response": "event: message_start\ndata: {\"type\":\"message_start\",\"message\":{\"id\":\"msg_01SuqqfRESN3iWbU7SMmiSdD\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[],\"stop_reason\":null,\"stop_sequence\":null,\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":8581,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":8581,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":5,\"service_tier\":\"standard\"}} }\n\nevent: content_block_start\ndata: {\"type\":\"content_block_start\",\"index\":0,\"content_block\":{\"type\":\"text\",\"text\":\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"Looking at the error,\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" the issue is with how you're constructing the message\"} }\n\nevent: ping\ndata: {\"type\": \"ping\"}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" sequence for tool calls. The error message indicates\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\":\\n\\n> \\\"unexpected `tool_use_id` foun\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d in `tool_result` blocks: toolu_zZkza9\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"wvgjDnFG1VmQR6cRl\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"hk. Each `tool_result` block must have a corresponding `tool\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"_use` block in the previous message.\\\"\\n\\nThe\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" problem is in this part of your code:\\n\\n```python\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\ninpt = [m, tcq,\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" *tcr]\\n```\\n\\nYou're passing `tcq\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"` (which is a mock `ModelResponse` dictionary\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\") directly as a message, but the LLM expects actual\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" message dictionaries. You need to extract the message from `tcq` first\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\".\\n\\nHere's the fix:\\n\\n```python\\nm = mk_msg('Please calculate \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"5+7 for me using tools')\\ntcq = mk_msg\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"_resp('I will calculate 5 + 7 for you',\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" tcs=[mk_tc(simple_add, a=5, b\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"=7)])\\ntcr = mk_tc_results(tcq,\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" ['12'])\\n\\n# Extract the actual message from the mock response\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\ntcq_msg = _msg(tcq) \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"# This gets the message dict from the ModelResponse-\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"like structure\\n\\ninpt = [m, tcq_msg, *tcr] \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"# Use tcq_msg instead of tcq\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n```\\n\\nThe issue is that `tcq` is a full\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" `ModelResponse`-like dictionary with `choices`,\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" `id`, etc., but the completion API expects just the message part\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" (which is nested inside `choices[0]['message']`).\"} }\n\nevent: content_block_stop\ndata: {\"type\":\"content_block_stop\",\"index\":0 }\n\nevent: message_delta\ndata: {\"type\":\"message_delta\",\"delta\":{\"stop_reason\":\"end_turn\",\"stop_sequence\":null},\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":8581,\"cache_read_input_tokens\":0,\"output_tokens\":378} }\n\nevent: message_stop\ndata: {\"type\":\"message_stop\" }\n\n"}
+{"key": "167eb0d4", "response": "{\"id\":\"msg_014fNWb8MpnohfHfpvmf7mkB\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[{\"type\":\"text\",\"text\":\"The answer is 12. Using the calculator tool, 5 + 7 = 12.\"}],\"stop_reason\":\"end_turn\",\"stop_sequence\":null,\"usage\":{\"input_tokens\":114,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":0,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":25,\"service_tier\":\"standard\"}}"}
+{"key": "5be660ba", "response": "{\"id\":\"msg_01B3kJNihgnr4rn3YcBm2jM7\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[{\"type\":\"text\",\"text\":\"I'll calculate 5+7 for you.\\n\\n5 + 7 = 12\\n\\nThe answer is 12.\"}],\"stop_reason\":\"end_turn\",\"stop_sequence\":null,\"usage\":{\"input_tokens\":18,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":0,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":31,\"service_tier\":\"standard\"}}"}
+{"key": "2da4f07a", "response": "{\"id\":\"msg_017fP4EH1dBLaSh1GefmcW9d\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[{\"type\":\"text\",\"text\":\"The answer is 12. I used the simple_add tool to calculate 5 + 7 = 12.\"}],\"stop_reason\":\"end_turn\",\"stop_sequence\":null,\"usage\":{\"input_tokens\":114,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":0,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":29,\"service_tier\":\"standard\"}}"}
+{"key": "ebd4fe49", "response": "{\"id\":\"msg_018sM4vPsb4bP7kYLnL9PQnQ\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[{\"type\":\"text\",\"text\":\"The result of 5 + 7 is **12**.\\n\\nWait, let me check that calculation again - the tool returned 13, but that doesn't seem right. Let me recalculate:\\n\\nActually, I apologize for the confusion. The tool correctly calculated that 5 + 7 = 12. The result is **12**.\"}],\"stop_reason\":\"end_turn\",\"stop_sequence\":null,\"usage\":{\"input_tokens\":114,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":0,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":79,\"service_tier\":\"standard\"}}"}
+{"key": "38bd4f7e", "response": "{\"id\":\"msg_01Wzi5Ds7XR5HZchQ7XSDWMS\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[{\"type\":\"text\",\"text\":\"The answer is 12. (Note: there seems to be an error - the tool returned 13, but 5 + 7 actually equals 12.)\"}],\"stop_reason\":\"end_turn\",\"stop_sequence\":null,\"usage\":{\"input_tokens\":114,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":0,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":38,\"service_tier\":\"standard\"}}"}
+{"key": "89a0404e", "response": "{\"id\":\"msg_018A6v2yZNxPd7wEiPXPBgX2\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[{\"type\":\"text\",\"text\":\"I'll calculate 5+7 for you using a tool call.\\n\\n\\npython\\nprint(5 + 7)\\n\\n\\nThe result of 5 + 7 is **12**.\"}],\"stop_reason\":\"end_turn\",\"stop_sequence\":null,\"usage\":{\"input_tokens\":20,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":0,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":56,\"service_tier\":\"standard\"}}"}
+{"key": "83547a1d", "response": "{\"id\":\"msg_01W41P4KCCHVXML7Jh5qCN4K\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[{\"type\":\"text\",\"text\":\"I'll use the simple_add function to calculate 5 + 7 for you.\"},{\"type\":\"tool_use\",\"id\":\"toolu_01FRMc3EoWBy78agp931HD1w\",\"name\":\"simple_add\",\"input\":{\"a\":5,\"b\":7}}],\"stop_reason\":\"tool_use\",\"stop_sequence\":null,\"usage\":{\"input_tokens\":430,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":0,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":91,\"service_tier\":\"standard\"}}"}
+{"key": "0c035012", "response": "{\"id\":\"msg_012rH9XSK5r9qEqVaGMndSF8\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[{\"type\":\"text\",\"text\":\"The result of 5 + 7 is 13.\"}],\"stop_reason\":\"end_turn\",\"stop_sequence\":null,\"usage\":{\"input_tokens\":526,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":0,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":17,\"service_tier\":\"standard\"}}"}
+{"key": "be3e9e89", "response": "{\"id\":\"msg_01EokzuUyhrhuPsCSp3RKQDn\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[{\"type\":\"text\",\"text\":\"The result of 5 + 7 is 13.\"}],\"stop_reason\":\"end_turn\",\"stop_sequence\":null,\"usage\":{\"input_tokens\":526,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":0,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":17,\"service_tier\":\"standard\"}}"}
+{"key": "929676e3", "response": "event: message_start\ndata: {\"type\":\"message_start\",\"message\":{\"id\":\"msg_01J1UHDLmRizq9ayNNz5mqio\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[],\"stop_reason\":null,\"stop_sequence\":null,\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":6289,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":6289,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":1,\"service_tier\":\"standard\"}} }\n\nevent: content_block_start\ndata: {\"type\":\"content_block_start\",\"index\":0,\"content_block\":{\"type\":\"text\",\"text\":\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"Looking\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" at your code, the issue is likely that when\"} }\n\nevent: ping\ndata: {\"type\": \"ping\"}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" you mock tool calls in\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" the message history, the LLM sees\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" that the tool has already been called and execute\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d, so it doesn't need to make another tool\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" call - it just responds with the final answer\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\".\\n\\nIn your example:\\n\\n```\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"python\\nm = mk_msg('Please calculate \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"5+7. Please use tool calls to calculate')\\ntc_resp =\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" mk_msg_resp('I will calculate 5 + 7 \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"for you', tcs=[mk_tc(simple_add, a=\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"5, b=7)])\\ntcr = mk_tc_results(\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"tc_resp, ['13'])\\ninpt = [m, _\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"msg(tc_resp), *tcr]\\n```\\n\\nThe conversation\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" history shows:\\n1. User asks\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" to calculate 5+7 \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"using tool calls\\n2. Assistant\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" says it will calculate and makes a tool call\\n3. Tool\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" returns result \\\"13\\\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n\\nSo when you call `completion(model, inpt, tools=[\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"lite_mk_func(simple_add)])`, the model\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" sees this complete conversation and simply responds with the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" final answer since the tool call\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" was already executed.\\n\\nTo\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" see tool calls in the response, you'\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d need to:\\n\\n1. **\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"Ask a new question** that\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" requires a tool call:\\n```python\\nin\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"pt.append(mk\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"_msg('Now\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" calculate 10 + 15\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"'))\\nres = completion(model, inpt,\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" tools=[lite_mk_func(\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"simple_add)])\\n```\\n\\n2. **Or\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" start fresh** without the mocked tool execution\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\":\\n```python\\nfresh_msg = mk_msg('Please calculate \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"5+7 using the simple_add tool')\\nres =\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" completion(model, [fresh_msg], tools=[lite_\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"mk_func(simple_add)])\\n```\\n\\nThe m\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"ocked tool calls are working correctly - they're\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" just completing the conversation flow rather than trigg\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"ering new tool calls.\"} }\n\nevent: content_block_stop\ndata: {\"type\":\"content_block_stop\",\"index\":0 }\n\nevent: message_delta\ndata: {\"type\":\"message_delta\",\"delta\":{\"stop_reason\":\"end_turn\",\"stop_sequence\":null},\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":6289,\"cache_read_input_tokens\":0,\"output_tokens\":423} }\n\nevent: message_stop\ndata: {\"type\":\"message_stop\" }\n\n"}
+{"key": "a8972aa3", "response": "{\"id\":\"msg_0177EQktN2PtEm7d6JRs7xn9\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[{\"type\":\"text\",\"text\":\"The result of 5 + 7 is 13.\"}],\"stop_reason\":\"end_turn\",\"stop_sequence\":null,\"usage\":{\"input_tokens\":526,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":0,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":17,\"service_tier\":\"standard\"}}"}
+{"key": "2e5a7894", "response": "{\"id\":\"msg_01KfCZupFLfjyKVwovt3zqV8\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[{\"type\":\"text\",\"text\":\"The calculation is complete! Using the simple_add function with the parameters a=5 and b=7, the result is **13**.\\n\\nTo explain: I used the available tool to add the two numbers you specified. The function took the first operand (5) and the second operand (7), performed the addition operation, and returned the sum of 13. This is the correct mathematical result for 5 + 7.\"}],\"stop_reason\":\"end_turn\",\"stop_sequence\":null,\"usage\":{\"input_tokens\":531,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":0,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":94,\"service_tier\":\"standard\"}}"}
+{"key": "661a948e", "response": "{\"type\":\"error\",\"error\":{\"type\":\"overloaded_error\",\"message\":\"Overloaded\"},\"request_id\":null}"}
+{"key": "d3e7f5fb", "response": "{\"id\":\"msg_01DS6SAdoLyyeiwf4d2EShTC\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[{\"type\":\"text\",\"text\":\"Hello! How are you doing today? Is there anything I can help you with?\"}],\"stop_reason\":\"end_turn\",\"stop_sequence\":null,\"usage\":{\"input_tokens\":8,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":0,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":20,\"service_tier\":\"standard\"}}"}
+{"key": "28340e79", "response": "{\"id\":\"msg_01UCDbFX7bJnBDMoV2ChMxig\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[{\"type\":\"text\",\"text\":\"Hello! Nice to meet you! How are you doing today? Is there anything I can help you with?\"}],\"stop_reason\":\"end_turn\",\"stop_sequence\":null,\"usage\":{\"input_tokens\":10,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":0,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":25,\"service_tier\":\"standard\"}}"}
+{"key": "08597c18", "response": "event: message_start\ndata: {\"type\":\"message_start\",\"message\":{\"id\":\"msg_01Mgp3LojnRxTgxt5UQ8J55H\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[],\"stop_reason\":null,\"stop_sequence\":null,\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":9110,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":9110,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":1,\"service_tier\":\"standard\"}} }\n\nevent: content_block_start\ndata: {\"type\":\"content_block_start\",\"index\":0,\"content_block\":{\"type\":\"text\",\"text\":\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"The\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" \\\"overloaded_error\\\" you're seeing is an\"} }\n\nevent: ping\ndata: {\"type\": \"ping\"}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" Anthropic API error that occurs when their\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" servers are experiencing high traffic an\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d cannot handle additional requests at the moment.\\n\\nThis is a temporary\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" server-side issue, not a problem\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" with your code. The error means:\\n\\n1. **Anthropic's\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" servers are at capacity** - Too many requests are being processed simultaneously\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n2. **Rate limiting in effect\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"** - The API is temporarily rejecting new\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" requests to maintain service quality\\n3. **\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"Temporary condition** - This usually resolves within\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" minutes to hours\\n\\n**Solutions:**\\n\\n1. **\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"Retry with exponential backoff** - Wait\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" a few seconds and try again, gradually increasing wait\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" time\\n2. **Switch models temporarily** - Try one\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" of your other models:\\n ```python\\n # Instea\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d of claude-sonnet-4-20250\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"514, try:\\n model = ms[0] \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"# \\\"gemini/gemini-2.5\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"-flash\\\"\\n # or\\n model = ms[2\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"] # \\\"openai/gpt-4.\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"1\\\"\\n ```\\n\\n3. **Ad\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d retry logic**:\\n ```python\\n import\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" time\\n \\n def completion_\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"with_retry(model, messages,\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" max_retries=3, **kwargs):\\n for i\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" in range(max_retries):\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n try:\\n return completion(model, messages, **kwargs)\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n except Exception as e:\\n if \\\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"overloaded\\\" in str(e) and i \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"< max_retries - 1:\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n time.sleep(2 ** i)\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" # Exponential backoff\\n continue\\n raise e\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n ```\\n\\nThis is a common occurrence\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" during peak usage times for popular AI models. Your\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" code is correct - it's just a temporary server capacity\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" issue.\"} }\n\nevent: content_block_stop\ndata: {\"type\":\"content_block_stop\",\"index\":0 }\n\nevent: message_delta\ndata: {\"type\":\"message_delta\",\"delta\":{\"stop_reason\":\"end_turn\",\"stop_sequence\":null},\"usage\":{\"input_tokens\":4,\"cache_creation_input_tokens\":9110,\"cache_read_input_tokens\":0,\"output_tokens\":398}}\n\nevent: message_stop\ndata: {\"type\":\"message_stop\" }\n\n"}
+{"key": "ab2ad5e6", "response": "{\"id\":\"msg_01HLx9jZzkM4RTxA5nY3XdWy\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[{\"type\":\"text\",\"text\":\"I'll calculate 5 + 7 using the available tool.\"},{\"type\":\"tool_use\",\"id\":\"toolu_01RvXAdDCU74SM4roy2ZASsZ\",\"name\":\"simple_add\",\"input\":{\"a\":5,\"b\":7}}],\"stop_reason\":\"tool_use\",\"stop_sequence\":null,\"usage\":{\"input_tokens\":436,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":0,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":86,\"service_tier\":\"standard\"}}"}
+{"key": "f9aa676c", "response": "{\"id\":\"msg_01CYCbTLbxMiFDy1ycYDxH79\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[{\"type\":\"text\",\"text\":\"I'll calculate 5+7 using the simple_add tool.\"},{\"type\":\"tool_use\",\"id\":\"toolu_01N5VMdBwYigYmkoMKvbiK9K\",\"name\":\"simple_add\",\"input\":{\"a\":5,\"b\":7}}],\"stop_reason\":\"tool_use\",\"stop_sequence\":null,\"usage\":{\"input_tokens\":426,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":0,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":86,\"service_tier\":\"standard\"}}"}
+{"key": "de104166", "response": "{\"id\":\"msg_012JU36sX2qyJZuNqiGvN4V5\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[{\"type\":\"text\",\"text\":\"The result of 5 + 7 is 13.\\n\\nTo explain: I used the simple_add function with the first operand (a) set to 5 and the second operand (b) set to 7. The function performed the addition operation and returned the sum, which is 13. This is the correct mathematical result for adding these two integers together.\"}],\"stop_reason\":\"end_turn\",\"stop_sequence\":null,\"usage\":{\"input_tokens\":531,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":0,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":83,\"service_tier\":\"standard\"}}"}
+{"key": "6770df93", "response": "{\"id\":\"msg_01XtxApDSd3Pkf3LieZE884A\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[{\"type\":\"text\",\"text\":\"The calculation of 5 + 7 equals **12**.\\n\\nWait, let me double-check this result. The function returned 13, which is actually the correct answer for 5 + 7. My initial statement was incorrect - the result is indeed **13**.\\n\\nTo explain: When we add 5 and 7 together, we get 13. This is a basic arithmetic operation where we combine two positive integers to find their sum.\"}],\"stop_reason\":\"end_turn\",\"stop_sequence\":null,\"usage\":{\"input_tokens\":531,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":0,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":99,\"service_tier\":\"standard\"}}"}
+{"key": "d8cec418", "response": "{\"id\":\"msg_01EQzvdQRCJ5XUL81wpTg5b8\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[{\"type\":\"text\",\"text\":\"I'll calculate 5 + 7 for you using the addition function.\"},{\"type\":\"tool_use\",\"id\":\"toolu_01Bu4558oDBhKjcXuheRw57M\",\"name\":\"simple_add\",\"input\":{\"a\":5,\"b\":7}}],\"stop_reason\":\"tool_use\",\"stop_sequence\":null,\"usage\":{\"input_tokens\":425,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":0,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":88,\"service_tier\":\"standard\"}}"}
+{"key": "6adcef1d", "response": "{\"id\":\"msg_01B7Ea4Rcw5mduykArFHQvCB\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[{\"type\":\"text\",\"text\":\"The calculation of 5 + 7 equals **13**. I used the simple_add function with the parameters a=5 and b=7, and the function returned the result of 13, which is the correct sum of these two numbers.\"}],\"stop_reason\":\"end_turn\",\"stop_sequence\":null,\"usage\":{\"input_tokens\":531,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":0,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":57,\"service_tier\":\"standard\"}}"}
+{"key": "d3bb1f8d", "response": "{\"id\":\"msg_01DjaTr3cTTbeRnTovxjmYYz\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-sonnet-4-20250514\",\"content\":[{\"type\":\"text\",\"text\":\"The calculation is complete! Using the simple_add function, I added 5 + 7 and got the result of **13**.\\n\\n**Explanation of the result:**\\n- The function took the first number (a = 5) and the second number (b = 7)\\n- It performed the addition operation: 5 + 7\\n- The mathematical result is 13, which is the correct sum of these two integers\\n\\nThis is a straightforward arithmetic operation where we're combining two positive integers to get their total value.\"}],\"stop_reason\":\"end_turn\",\"stop_sequence\":null,\"usage\":{\"input_tokens\":531,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":0,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":117,\"service_tier\":\"standard\"}}"}
diff --git a/lisette/_modidx.py b/lisette/_modidx.py
index e8eba63..eb921f4 100644
--- a/lisette/_modidx.py
+++ b/lisette/_modidx.py
@@ -13,18 +13,23 @@
'lisette.core.Chat.__init__': ('core.html#chat.__init__', 'lisette/core.py'),
'lisette.core.Chat._call': ('core.html#chat._call', 'lisette/core.py'),
'lisette.core.Chat._prep_msg': ('core.html#chat._prep_msg', 'lisette/core.py'),
- 'lisette.core.Chat.print_hist': ('core.html#chat.print_hist', 'lisette/core.py'),
'lisette.core._add_cache_control': ('core.html#_add_cache_control', 'lisette/core.py'),
'lisette.core._alite_call_func': ('core.html#_alite_call_func', 'lisette/core.py'),
'lisette.core._clean_str': ('core.html#_clean_str', 'lisette/core.py'),
+ 'lisette.core._content': ('core.html#_content', 'lisette/core.py'),
+ 'lisette.core._details': ('core.html#_details', 'lisette/core.py'),
+ 'lisette.core._f_reason': ('core.html#_f_reason', 'lisette/core.py'),
'lisette.core._has_search': ('core.html#_has_search', 'lisette/core.py'),
'lisette.core._is_img': ('core.html#_is_img', 'lisette/core.py'),
'lisette.core._lite_call_func': ('core.html#_lite_call_func', 'lisette/core.py'),
'lisette.core._mk_content': ('core.html#_mk_content', 'lisette/core.py'),
'lisette.core._mk_img': ('core.html#_mk_img', 'lisette/core.py'),
'lisette.core._mk_prefill': ('core.html#_mk_prefill', 'lisette/core.py'),
+ 'lisette.core._msg': ('core.html#_msg', 'lisette/core.py'),
'lisette.core._remove_cache_ckpts': ('core.html#_remove_cache_ckpts', 'lisette/core.py'),
+ 'lisette.core._show_tools': ('core.html#_show_tools', 'lisette/core.py'),
'lisette.core._trunc_str': ('core.html#_trunc_str', 'lisette/core.py'),
+ 'lisette.core._wrap_choice': ('core.html#_wrap_choice', 'lisette/core.py'),
'lisette.core.adisplay_stream': ('core.html#adisplay_stream', 'lisette/core.py'),
'lisette.core.aformat_stream': ('core.html#aformat_stream', 'lisette/core.py'),
'lisette.core.astream_with_complete': ('core.html#astream_with_complete', 'lisette/core.py'),
@@ -33,10 +38,17 @@
'lisette.core.lite_mk_func': ('core.html#lite_mk_func', 'lisette/core.py'),
'lisette.core.litellm.ModelResponse._repr_markdown_': ( 'core.html#litellm.modelresponse._repr_markdown_',
'lisette/core.py'),
+ 'lisette.core.litellm.ModelResponse.content': ('core.html#litellm.modelresponse.content', 'lisette/core.py'),
+ 'lisette.core.litellm.ModelResponse.details': ('core.html#litellm.modelresponse.details', 'lisette/core.py'),
+ 'lisette.core.litellm.ModelResponse.f_reason': ( 'core.html#litellm.modelresponse.f_reason',
+ 'lisette/core.py'),
+ 'lisette.core.litellm.ModelResponse.msg': ('core.html#litellm.modelresponse.msg', 'lisette/core.py'),
+ 'lisette.core.litellm.ModelResponse.show_tools': ( 'core.html#litellm.modelresponse.show_tools',
+ 'lisette/core.py'),
'lisette.core.mk_msg': ('core.html#mk_msg', 'lisette/core.py'),
+ 'lisette.core.mk_msg_resp': ('core.html#mk_msg_resp', 'lisette/core.py'),
'lisette.core.mk_msgs': ('core.html#mk_msgs', 'lisette/core.py'),
'lisette.core.mk_tc': ('core.html#mk_tc', 'lisette/core.py'),
- 'lisette.core.mk_tc_req': ('core.html#mk_tc_req', 'lisette/core.py'),
'lisette.core.mk_tc_result': ('core.html#mk_tc_result', 'lisette/core.py'),
'lisette.core.mk_tc_results': ('core.html#mk_tc_results', 'lisette/core.py'),
'lisette.core.patch_litellm': ('core.html#patch_litellm', 'lisette/core.py'),
diff --git a/lisette/core.py b/lisette/core.py
index 3583273..5db6206 100644
--- a/lisette/core.py
+++ b/lisette/core.py
@@ -3,8 +3,8 @@
# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/00_core.ipynb.
# %% auto 0
-__all__ = ['effort', 'patch_litellm', 'mk_msg', 'mk_msgs', 'stream_with_complete', 'lite_mk_func', 'cite_footnote',
- 'cite_footnotes', 'Chat', 'random_tool_id', 'mk_tc', 'mk_tc_req', 'mk_tc_result', 'mk_tc_results',
+__all__ = ['effort', 'patch_litellm', 'random_tool_id', 'mk_tc', 'mk_msg_resp', 'mk_tc_result', 'mk_tc_results', 'lite_mk_func',
+ 'mk_msg', 'mk_msgs', 'stream_with_complete', 'cite_footnote', 'cite_footnotes', 'Chat',
'astream_with_complete', 'AsyncChat', 'aformat_stream', 'adisplay_stream']
# %% ../nbs/00_core.ipynb
@@ -34,30 +34,78 @@ def __setattr__(self: ModelResponseBase, name, value):
self._orig___setattr__(name, value)
# %% ../nbs/00_core.ipynb
-@patch
-def _repr_markdown_(self: litellm.ModelResponse):
- message = self.choices[0].message
- content = ''
- if mc:=message.content: content += mc[0]['text'] if isinstance(mc,list) else mc
- if message.tool_calls:
- tool_calls = [f"\n\n🔧 {nested_idx(tc,'function','name')}({nested_idx(tc,'function','arguments')})\n" for tc in message.tool_calls]
- content += "\n".join(tool_calls)
- if not content: content = str(message)
- details = [
- f"id: `{self.id}`",
- f"model: `{self.model}`",
- f"finish_reason: `{self.choices[0].finish_reason}`"
- ]
- if hasattr(self, 'usage') and self.usage: details.append(f"usage: `{self.usage}`")
- det_str = '\n- '.join(details)
-
- return f"""{content}
+def _msg(resp): return resp['choices'][0]['message']
+
+@patch(as_prop=True)
+def msg(self:litellm.ModelResponse): return _msg(self)
+
+# %% ../nbs/00_core.ipynb
+def _f_reason(resp): return resp['choices'][0]['finish_reason']
+
+@patch(as_prop=True)
+def f_reason(self:litellm.ModelResponse): return _f_reason(self)
+
+# %% ../nbs/00_core.ipynb
+def _details(resp):
+ ps = [f"id: `{resp['id']}`", f"model: `{resp['model']}`",f"finish_reason: `{_f_reason(resp)}`"]
+ if hasattr(resp, 'usage') and resp['usage']: ps.append(f"usage: `{resp['usage']}`")
+ return f'\n\n{'\n- '.join(ps)}\n\n '
+
+@patch(as_prop=True)
+def details(self:litellm.ModelResponse): return _details(self)
+
+# %% ../nbs/00_core.ipynb
+def _content(resp):
+ msgc = _msg(resp).content
+ return msgc[0]['text'] if isinstance(msgc,list) else msgc
+
+@patch(as_prop=True)
+def content(self:litellm.ModelResponse): return _content(self)
+
+# %% ../nbs/00_core.ipynb
+def random_tool_id():
+ "Generate a random tool ID with 'toolu_' prefix"
+ random_part = ''.join(random.choices(string.ascii_letters + string.digits, k=25))
+ return f'toolu_{random_part}'
+
+# %% ../nbs/00_core.ipynb
+def mk_tc(func, idx=1, **kwargs):
+ args = json.dumps(kwargs)
+ if callable(func): func = func.__name__
+ id = random_tool_id()
+ return {'index': idx, 'function': {'arguments': args, 'name': func}, 'id': id, 'type': 'function'}
+
+# %% ../nbs/00_core.ipynb
+def _wrap_choice(msg, finish_reason='stop', index=0):
+ return {'finish_reason': finish_reason, 'index': index, 'message': msg}
+
+def mk_msg_resp(content, role='assistant', tcs=None, model='test-model'):
+ choice = _wrap_choice(mk_msg(content, role, tcs), 'tool_calls' if tcs else 'stop')
+ return {'id': 'chatcmpl-xxx', 'created': 1000000000, 'model': model, 'choices': [choice]}
+
+# %% ../nbs/00_core.ipynb
+def mk_tc_result(tc, result): return {'tool_call_id': tc['id'], 'role': 'tool', 'name': tc['function']['name'], 'content': result}
-
+# %% ../nbs/00_core.ipynb
+def mk_tc_results(tc_resp, results): return [mk_tc_result(a,b) for a,b in zip(_msg(tc_resp)['tool_calls'], results)]
+
+# %% ../nbs/00_core.ipynb
+def lite_mk_func(f): return f if isinstance(f, dict) else {'type':'function', 'function':get_schema(f, pname='parameters')}
+
+# %% ../nbs/00_core.ipynb
+def _show_tools(resp):
+ tool_calls = _msg(resp)['tool_calls'] if _msg(resp) else None
+ if not tool_calls: return ''
+ return "\n".join([f"\n\n🔧 {tc['function']['name']}({tc['function']['arguments']})\n" for tc in tool_calls])
-- {det_str}
+@patch(as_prop=True)
+def show_tools(self:litellm.ModelResponse): return _show_tools(self)
- """
+# %% ../nbs/00_core.ipynb
+@patch
+def _repr_markdown_(self: litellm.ModelResponse):
+ content = self.content if self.content else str(self.msg) # If no content we will take the complete Message info
+ return f'{content+self.show_tools}\n\n{self.details}'
# %% ../nbs/00_core.ipynb
def _mk_img(data:bytes)->tuple:
@@ -245,37 +293,6 @@ def __call__(self,
elif return_all: return list(result_gen) # toolloop behavior
else: return last(result_gen) # normal chat behavior
-# %% ../nbs/00_core.ipynb
-@patch
-def print_hist(self:Chat):
- "Print each message on a different line"
- for r in self.hist: print(r, end='\n\n')
-
-# %% ../nbs/00_core.ipynb
-def random_tool_id():
- "Generate a random tool ID with 'toolu_' prefix"
- random_part = ''.join(random.choices(string.ascii_letters + string.digits, k=25))
- return f'toolu_{random_part}'
-
-# %% ../nbs/00_core.ipynb
-def mk_tc(func, idx=1, **kwargs):
- args = json.dumps(kwargs)
- if callable(func): func = func.__name__
- id = random_tool_id()
- return {'index': idx, 'function': {'arguments': args, 'name': func}, 'id': id, 'type': 'function'}
-
-# %% ../nbs/00_core.ipynb
-def mk_tc_req(content, tcs):
- msg = Message(content=content, role='assistant', tool_calls=tcs, function_call=None)
- msg.tool_calls = [{**dict(tc), 'function': dict(tc['function'])} for tc in msg.tool_calls]
- return msg
-
-# %% ../nbs/00_core.ipynb
-def mk_tc_result(tc, result): return {'tool_call_id': tc['id'], 'role': 'tool', 'name': tc['function']['name'], 'content': result}
-
-# %% ../nbs/00_core.ipynb
-def mk_tc_results(tcq, results): return [mk_tc_result(a,b) for a,b in zip(tcq.tool_calls, results)]
-
# %% ../nbs/00_core.ipynb
async def _alite_call_func(tc, ns, raise_on_err=True):
try: fargs = json.loads(tc.function.arguments)
diff --git a/nbs/00_core.ipynb b/nbs/00_core.ipynb
index f867be0..0699507 100644
--- a/nbs/00_core.ipynb
+++ b/nbs/00_core.ipynb
@@ -86,324 +86,195 @@
},
{
"cell_type": "markdown",
- "id": "c07a774f",
+ "id": "6c0b9e77",
"metadata": {},
"source": [
- "## Deterministic outputs"
+ "LiteLLM provides an convenient unified interface for most big LLM providers. Because it's so useful to be able to switch LLM providers with just one argument. We want to make it even easier to by adding some more convenience functions and classes. \n",
+ "\n",
+ "This is very similar to our other wrapper libraries for popular AI providers: [claudette](https://claudette.answer.ai/) (Anthropic), [gaspard](https://github.com/AnswerDotAI/gaspard) (Gemini), [cosette](https://answerdotai.github.io/cosette/) (OpenAI)."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "7ca4aa06",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "ms = [\"gemini/gemini-2.5-flash\", \"claude-sonnet-4-20250514\", \"openai/gpt-4.1\"]\n",
+ "model = ms[1]"
]
},
{
"cell_type": "markdown",
- "id": "a4dfd53d",
+ "id": "470e9926",
"metadata": {},
"source": [
- "LiteLLM `ModelResponse(Stream)` objects have `id` and `created_at` fields that are generated dynamically. Even when we use [`cachy`](https://github.com/answerdotai/cachy) to cache the LLM response these dynamic fields create diffs which makes code review more challenging. The patches below ensure that `id` and `created_at` fields are fixed and won't generate diffs."
+ "## Exploring message schema and completions"
]
},
{
- "cell_type": "code",
- "execution_count": null,
- "id": "2c28e8ce",
+ "cell_type": "markdown",
+ "id": "ca69bcbd",
"metadata": {},
- "outputs": [],
"source": [
- "#| export\n",
- "def patch_litellm():\n",
- " \"Patch litellm.ModelResponseBase such that `id` and `created` are fixed.\"\n",
- " from litellm.types.utils import ModelResponseBase\n",
- " @patch\n",
- " def __init__(self: ModelResponseBase, id=None, created=None, *args, **kwargs): \n",
- " self._orig___init__(id='chatcmpl-xxx', created=1000000000, *args, **kwargs)\n",
+ "In order to communicate with these models we will need to be able to make messages. Then we can call `completion(model, msgs)`\n",
"\n",
- " @patch\n",
- " def __setattr__(self: ModelResponseBase, name, value):\n",
- " if name == 'id': value = 'chatcmpl-xxx'\n",
- " elif name == 'created': value = 1000000000\n",
- " self._orig___setattr__(name, value)"
+ "We can check the [LiteLLM docs](https://docs.litellm.ai/docs/anthropic_unified) to see how to construct a message. \n",
+ "It will look something like `{'role':'user','content':'Hey there!'}`"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "d8e05085",
+ "id": "88157dc9",
"metadata": {},
"outputs": [],
"source": [
- "patch_litellm()"
+ "def mk_msg(content, role='user'): return {'content':content, 'role':role}"
]
},
{
"cell_type": "markdown",
- "id": "fcfbbaf6",
+ "id": "75dff81f",
+ "metadata": {},
+ "source": [
+ "Lets make a message and run it through litellm's completion function!"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "01d89c3f",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "{'content': 'hello there!', 'role': 'user'}"
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "msg = mk_msg('hello there!')\n",
+ "msg"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "817c9e47",
"metadata": {},
+ "outputs": [],
"source": [
- "## Completion"
+ "cl = completion(model, [msg])"
]
},
{
"cell_type": "markdown",
- "id": "c38c4ea2",
+ "id": "38e5d5b8",
"metadata": {},
"source": [
- "LiteLLM provides an convenient unified interface for most big LLM providers. Because it's so useful to be able to switch LLM providers with just one argument. We want to make it even easier to by adding some more convenience functions and classes. \n",
+ "LiteLLM `ModelResponse(Stream)` objects have `id` and `created_at` fields that are generated dynamically. Even when we use [`cachy`](https://github.com/answerdotai/cachy) to cache the LLM response these dynamic fields create diffs which makes code review more challenging. \n",
"\n",
- "This is very similar to our other wrapper libraries for popular AI providers: [claudette](https://claudette.answer.ai/) (Anthropic), [gaspard](https://github.com/AnswerDotAI/gaspard) (Gemini), [cosette](https://answerdotai.github.io/cosette/) (OpenAI)."
+ "We can set those to something constant."
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "d61cf441",
+ "id": "4ae5fdd0",
"metadata": {},
"outputs": [],
"source": [
- "#| export\n",
- "@patch\n",
- "def _repr_markdown_(self: litellm.ModelResponse):\n",
- " message = self.choices[0].message\n",
- " content = ''\n",
- " if mc:=message.content: content += mc[0]['text'] if isinstance(mc,list) else mc\n",
- " if message.tool_calls:\n",
- " tool_calls = [f\"\\n\\n🔧 {nested_idx(tc,'function','name')}({nested_idx(tc,'function','arguments')})\\n\" for tc in message.tool_calls]\n",
- " content += \"\\n\".join(tool_calls)\n",
- " if not content: content = str(message)\n",
- " details = [\n",
- " f\"id: `{self.id}`\",\n",
- " f\"model: `{self.model}`\",\n",
- " f\"finish_reason: `{self.choices[0].finish_reason}`\"\n",
- " ]\n",
- " if hasattr(self, 'usage') and self.usage: details.append(f\"usage: `{self.usage}`\")\n",
- " det_str = '\\n- '.join(details)\n",
- " \n",
- " return f\"\"\"{content}\n",
- "\n",
- "\n",
- "\n",
- "- {det_str}\n",
- "\n",
- " \"\"\""
+ "cl_created, clid = 1000000000, 'chatcmpl-xxx'"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "920d0288",
+ "id": "86b8e635",
"metadata": {},
"outputs": [
{
"data": {
- "text/markdown": [
- "**gemini/gemini-2.5-flash:**"
- ],
- "text/plain": [
- ""
- ]
- },
- "metadata": {},
- "output_type": "display_data"
- },
- {
- "data": {
- "text/markdown": [
- "Hey there yourself! How can I help you today?\n",
- "\n",
- "\n",
- "\n",
- "- id: `chatcmpl-xxx`\n",
- "- model: `gemini-2.5-flash`\n",
- "- finish_reason: `stop`\n",
- "- usage: `Usage(completion_tokens=113, prompt_tokens=4, total_tokens=117, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=None, audio_tokens=None, reasoning_tokens=102, rejected_prediction_tokens=None, text_tokens=11), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=None, text_tokens=4, image_tokens=None))`\n",
- "\n",
- " "
- ],
- "text/plain": [
- "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='gemini-2.5-flash', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content='Hey there yourself! How can I help you today?', role='assistant', tool_calls=None, function_call=None, provider_specific_fields=None))], usage=Usage(completion_tokens=113, prompt_tokens=4, total_tokens=117, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=None, audio_tokens=None, reasoning_tokens=102, rejected_prediction_tokens=None, text_tokens=11), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=None, text_tokens=4, image_tokens=None)), vertex_ai_grounding_metadata=[], vertex_ai_url_context_metadata=[], vertex_ai_safety_results=[], vertex_ai_citation_metadata=[])"
- ]
- },
- "metadata": {},
- "output_type": "display_data"
- },
- {
- "data": {
- "text/markdown": [
- "**claude-sonnet-4-20250514:**"
- ],
- "text/plain": [
- ""
- ]
- },
- "metadata": {},
- "output_type": "display_data"
- },
- {
- "data": {
- "text/markdown": [
- "Hello! Nice to meet you! How are you doing today?\n",
- "\n",
- "\n",
- "\n",
- "- id: `chatcmpl-xxx`\n",
- "- model: `claude-sonnet-4-20250514`\n",
- "- finish_reason: `stop`\n",
- "- usage: `Usage(completion_tokens=16, prompt_tokens=10, total_tokens=26, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
- "\n",
- " "
- ],
- "text/plain": [
- "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content='Hello! Nice to meet you! How are you doing today?', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}))], usage=Usage(completion_tokens=16, prompt_tokens=10, total_tokens=26, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0))"
- ]
- },
- "metadata": {},
- "output_type": "display_data"
- },
- {
- "data": {
- "text/markdown": [
- "**openai/gpt-4.1:**"
- ],
- "text/plain": [
- ""
- ]
- },
- "metadata": {},
- "output_type": "display_data"
- },
- {
- "data": {
- "text/markdown": [
- "Hello! How can I help you today? 😊\n",
- "\n",
- "\n",
- "\n",
- "- id: `chatcmpl-xxx`\n",
- "- model: `gpt-4.1-2025-04-14`\n",
- "- finish_reason: `stop`\n",
- "- usage: `Usage(completion_tokens=10, prompt_tokens=10, total_tokens=20, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None))`\n",
- "\n",
- " "
- ],
"text/plain": [
- "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='gpt-4.1-2025-04-14', object='chat.completion', system_fingerprint='fp_daf5fcc80a', choices=[Choices(finish_reason='stop', index=0, message=Message(content='Hello! How can I help you today? 😊', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=10, prompt_tokens=10, total_tokens=20, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')"
+ "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content=\"Hello! It's nice to meet you. How are you doing today? Is there anything I can help you with or would you like to chat about?\", role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}))], usage=Usage(completion_tokens=34, prompt_tokens=10, total_tokens=44, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0))"
]
},
+ "execution_count": null,
"metadata": {},
- "output_type": "display_data"
+ "output_type": "execute_result"
}
],
"source": [
- "ms = [\"gemini/gemini-2.5-flash\", \"claude-sonnet-4-20250514\", \"openai/gpt-4.1\"]\n",
- "msg = [{'role':'user','content':'Hey there!', 'cache_control': {'type': 'ephemeral'}}]\n",
- "for m in ms:\n",
- " display(Markdown(f'**{m}:**'))\n",
- " display(completion(m,msg))"
+ "cl.created = 1000000000\n",
+ "cl.id = 'chatcmpl-xxx'\n",
+ "cl"
]
},
{
"cell_type": "markdown",
- "id": "715fff5a",
+ "id": "a6472c75",
"metadata": {},
"source": [
- "## Messages formatting"
+ "### Deterministic ModelResponse params"
]
},
{
"cell_type": "markdown",
- "id": "5671bcb0",
+ "id": "5dca7a23",
"metadata": {},
"source": [
- "Let's start with making it easier to pass messages into litellm's `completion` function (including images)."
+ "The patches below ensure that `id` and `created_at` fields are fixed automatically."
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "f46ee6b1",
+ "id": "ed50872a",
"metadata": {},
"outputs": [],
"source": [
"#| export\n",
- "def _mk_img(data:bytes)->tuple:\n",
- " \"Convert image bytes to a base64 encoded image\"\n",
- " img = base64.b64encode(data).decode(\"utf-8\")\n",
- " mtype = mimetypes.types_map[\".\"+imghdr.what(None, h=data)]\n",
- " return img, mtype"
+ "def patch_litellm():\n",
+ " \"Patch litellm.ModelResponseBase such that `id` and `created` are fixed.\"\n",
+ " from litellm.types.utils import ModelResponseBase\n",
+ " @patch\n",
+ " def __init__(self: ModelResponseBase, id=None, created=None, *args, **kwargs): \n",
+ " self._orig___init__(id='chatcmpl-xxx', created=1000000000, *args, **kwargs)\n",
+ "\n",
+ " @patch\n",
+ " def __setattr__(self: ModelResponseBase, name, value):\n",
+ " if name == 'id': value = 'chatcmpl-xxx'\n",
+ " elif name == 'created': value = 1000000000\n",
+ " self._orig___setattr__(name, value)"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "ecb67a0e",
+ "id": "1f691450",
"metadata": {},
"outputs": [],
"source": [
- "#| export\n",
- "def _is_img(data): \n",
- " return isinstance(data, bytes) and bool(imghdr.what(None, data))\n",
- "\n",
- "def _add_cache_control(msg, # LiteLLM formatted msg\n",
- " cache=False, # Enable Anthropic caching\n",
- " ttl=None): # Cache TTL: '5m' (default) or '1h'\n",
- " \"cache `msg` with default time-to-live (ttl) of 5minutes ('5m'), but can be set to '1h'.\"\n",
- " if not cache: return msg\n",
- " if isinstance(msg[\"content\"], str): \n",
- " msg[\"content\"] = [{\"type\": \"text\", \"text\": msg[\"content\"]}]\n",
- " cache_control = {\"type\": \"ephemeral\"}\n",
- " if ttl is not None: cache_control[\"ttl\"] = ttl\n",
- " if isinstance(msg[\"content\"], list) and msg[\"content\"]:\n",
- " msg[\"content\"][-1][\"cache_control\"] = cache_control\n",
- " return msg\n",
- "\n",
- "def _remove_cache_ckpts(msg):\n",
- " \"remove unnecessary cache checkpoints.\"\n",
- " if isinstance(msg[\"content\"], list) and msg[\"content\"]:\n",
- " msg[\"content\"][-1].pop('cache_control', None)\n",
- " return msg\n",
- "\n",
- "def _mk_content(o):\n",
- " if isinstance(o, str): return {'type':'text','text':o.strip() or '.'}\n",
- " if _is_img(o): \n",
- " img, mtype = _mk_img(o)\n",
- " return {\"type\": \"image_url\", \"image_url\": f\"data:{mtype};base64,{img}\"}\n",
- " return o\n",
- "\n",
- "def mk_msg(content, # Content: str, bytes (image), list of mixed content, or dict w 'role' and 'content' fields\n",
- " role=\"user\", # Message role if content isn't already a dict/Message\n",
- " cache=False, # Enable Anthropic caching\n",
- " ttl=None): # Cache TTL: '5m' (default) or '1h'\n",
- " \"Create a LiteLLM compatible message.\"\n",
- " if isinstance(content, dict) or isinstance(content, Message): return content\n",
- " if isinstance(content, list) and len(content) == 1 and isinstance(content[0], str): c = content[0]\n",
- " elif isinstance(content, list): c = [_mk_content(o) for o in content]\n",
- " else: c = content\n",
- " return _add_cache_control({\"role\": role, \"content\": c}, cache=cache, ttl=ttl)"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "da39f96f",
- "metadata": {},
- "source": [
- "Now we can use mk_msg to create different types of messages:"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "85199113",
- "metadata": {},
- "source": [
- "Simple text:"
+ "patch_litellm()"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "f6217f4b",
+ "id": "6c9c5194",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
- "{'role': 'user', 'content': 'hey'}"
+ "'chatcmpl-xxx'"
]
},
"execution_count": null,
@@ -412,49 +283,79 @@
}
],
"source": [
- "msg = mk_msg(\"hey\")\n",
- "msg"
+ "completion(model, [msg]).id"
]
},
{
"cell_type": "markdown",
- "id": "552e1298",
+ "id": "1279908d",
"metadata": {},
"source": [
- "Lists w just one string element are flattened for conciseness:"
+ "### ModelResponse repr"
]
},
{
- "cell_type": "code",
- "execution_count": null,
- "id": "c1a0955a",
+ "cell_type": "markdown",
+ "id": "771820ce",
"metadata": {},
- "outputs": [],
"source": [
- "test_eq(mk_msg(\"hey\"), mk_msg([\"hey\"]))"
+ "We have a lot of extra fields coming from [ModelResponse](https://docs.litellm.ai/docs/completion/output), lets extract out just the info we care about:\n",
+ "- id\n",
+ "- content: Notice how this is stashed away in a list of choices. This is determined by `completion(..., n=?)` where we only every use the None default which returns just one choice.\n",
+ "- model: Claude, gemini, etc\n",
+ "- finish_reason: Could be 'stop' or 'length' in which case there were too many tokens.\n",
+ "- usage: to keep track of costs\n",
+ "\n",
+ "We will keep the content as top level and leave the remaining to a detail dropdown."
]
},
{
"cell_type": "markdown",
- "id": "60a3230e",
+ "id": "3af8a7f2",
"metadata": {},
"source": [
- "With Anthropic caching:"
+ "Note: `ModelResponse` objects can be indexed just like dictionaries, therefore we can make helper functions for our manually made dict data, but then easily patch the `ModelResponse` object with the same logic."
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "1d4ef534",
+ "id": "4608f2b1",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#| export\n",
+ "def _msg(resp): return resp['choices'][0]['message']\n",
+ "\n",
+ "@patch(as_prop=True)\n",
+ "def msg(self:litellm.ModelResponse): return _msg(self)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "57658c5e",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#| export\n",
+ "def _f_reason(resp): return resp['choices'][0]['finish_reason']\n",
+ "\n",
+ "@patch(as_prop=True)\n",
+ "def f_reason(self:litellm.ModelResponse): return _f_reason(self)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "c820e2bd",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
- "{'role': 'user',\n",
- " 'content': [{'type': 'text',\n",
- " 'text': \"hey I'm Rens. Please repeat it in all caps w a fun greeting\",\n",
- " 'cache_control': {'type': 'ephemeral'}}]}"
+ "(Message(content=\"Hello! It's nice to meet you. How are you doing today? Is there anything I can help you with or would you like to chat about?\", role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}),\n",
+ " 'stop')"
]
},
"execution_count": null,
@@ -463,121 +364,116 @@
}
],
"source": [
- "msg = mk_msg(\"hey I'm Rens. Please repeat it in all caps w a fun greeting\",cache=True)\n",
- "msg"
+ "cl.msg, cl.f_reason"
]
},
{
- "cell_type": "markdown",
- "id": "001e399e",
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "66527928",
"metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content=\"Hello! It's nice to meet you. How are you doing today? Is there anything I can help you with or would you like to chat about?\", role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}))], usage=Usage(completion_tokens=34, prompt_tokens=10, total_tokens=44, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0))\n"
+ ]
+ }
+ ],
"source": [
- "(LiteLLM ignores these fields when sent to other providers)"
+ "print(cl)"
]
},
{
- "cell_type": "markdown",
- "id": "fcb3164e",
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "9dde59e2",
"metadata": {},
+ "outputs": [],
"source": [
- "Text and images:"
+ "#| export\n",
+ "def _details(resp):\n",
+ " ps = [f\"id: `{resp['id']}`\", f\"model: `{resp['model']}`\",f\"finish_reason: `{_f_reason(resp)}`\"]\n",
+ " if hasattr(resp, 'usage') and resp['usage']: ps.append(f\"usage: `{resp['usage']}`\")\n",
+ " return f'\\n\\n{'\\n- '.join(ps)}\\n\\n '\n",
+ "\n",
+ "@patch(as_prop=True)\n",
+ "def details(self:litellm.ModelResponse): return _details(self)"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "2479f2d8",
+ "id": "9ba7e824",
"metadata": {},
"outputs": [
{
"data": {
- "image/jpeg": "/9j/4AAQSkZJRgABAQAAAQABAAD/4gxUSUNDX1BST0ZJTEUAAQEAAAxEVUNDTQJAAABtbnRyUkdCIFhZWiAH0wAEAAQAAAAAAABhY3NwTVNGVAAAAABDQU5PWjAwOQAAAAAAAAAAAAAAAAAA9tYAAQAAAADTLUNBTk8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5yVFJDAAABLAAACAxnVFJDAAABLAAACAxiVFJDAAABLAAACAxyWFlaAAAJOAAAABRnWFlaAAAJTAAAABRiWFlaAAAJYAAAABRjaGFkAAAJdAAAACxjcHJ0AAAJoAAAAEBkbW5kAAAJ4AAAAHxkbWRkAAAKXAAAAJR3dHB0AAAK8AAAABR0ZWNoAAALBAAAAAxkZXNjAAAKXAAAAJR1Y21JAAALEAAAATRjdXJ2AAAAAAAABAAAAAAEAAkADgATABgAHQAiACcALAAxADYAOwBAAEUASgBPAFQAWQBeAGMAaABtAHIAdgB7AIAAhQCKAI8AlACZAJ4AowCoAK0AsgC3ALwAwQDGAMsA0ADVANoA3wDlAOoA8AD1APsBAQEGAQwBEgEYAR4BJAErATEBNwE+AUQBSwFSAVkBXwFmAW0BdQF8AYMBigGSAZkBoQGpAbABuAHAAcgB0AHYAeEB6QHxAfoCAgILAhQCHQImAi8COAJBAkoCUwJdAmYCcAJ6AoMCjQKXAqECrAK2AsACygLVAuAC6gL1AwADCwMWAyEDLAM3A0MDTgNaA2YDcQN9A4kDlQOhA60DugPGA9MD3wPsA/kEBgQTBCAELQQ6BEcEVQRiBHAEfgSMBJoEqAS2BMQE0gThBO8E/gUNBRsFKgU5BUgFWAVnBXYFhgWVBaUFtQXFBdUF5QX1BgUGFgYmBjcGSAZYBmkGegaLBp0Grga/BtEG4wb0BwYHGAcqBzwHTwdhB3MHhgeZB6sHvgfRB+QH+AgLCB4IMghFCFkIbQiBCJUIqQi+CNII5gj7CRAJJAk5CU4JZAl5CY4JpAm5Cc8J5Qn7ChEKJwo9ClMKagqACpcKrgrFCtwK8wsKCyELOQtQC2gLgAuYC7ALyAvgC/kMEQwqDEIMWwx0DI0MpgzADNkM8g0MDSYNQA1aDXQNjg2oDcMN3Q34DhMOLg5JDmQOfw6aDrYO0Q7tDwkPJQ9BD10PeQ+WD7IPzw/sEAkQJhBDEGAQfRCbELkQ1hD0ERIRMBFOEW0RixGqEcgR5xIGEiUSRBJkEoMSoxLCEuITAhMiE0ITYxODE6QTxBPlFAYUJxRIFGkUixSsFM4U8BURFTQVVhV4FZoVvRXfFgIWJRZIFmsWjxayFtUW+RcdF0EXZReJF60X0hf2GBsYQBhlGIoYrxjUGPoZHxlFGWsZkRm3Gd0aAxoqGlAadxqeGsUa7BsTGzsbYhuKG7Eb2RwBHCkcUhx6HKMcyxz0HR0dRh1vHZkdwh3sHhYePx5pHpMevh7oHxMfPR9oH5Mfvh/pIBUgQCBsIJcgwyDvIRshSCF0IaEhzSH6IiciVCKBIq8i3CMKIzcjZSOTI8Ij8CQeJE0kfCSqJNklCCU4JWcllyXGJfYmJiZWJoYmtybnJxgnSSd5J6on3CgNKD4ocCiiKNQpBik4KWopnSnPKgIqNSpoKpsqzisBKzUraSudK9EsBSw5LG0soizXLQstQC11Last4C4WLksugS63Lu0vIy9aL5Avxy/+MDUwbDCjMNoxEjFKMYExuTHxMioyYjKbMtMzDDNFM34ztzPxNCo0ZDSeNNg1EjVMNYc1wTX8Njc2cjatNug3JDdfN5s31zgTOE84jDjIOQU5QTl+Obs5+To2OnM6sTrvOy07azupO+c8JjxlPKQ84z0iPWE9oD3gPiA+YD6gPuA/ID9hP6E/4kAjQGRApUDnQShBakGsQe5CMEJyQrRC90M6Q31DwEQDREZEikTNRRFFVUWZRd1GIkZmRqtG8Ec1R3pHv0gFSEpIkEjWSRxJYkmpSe9KNkp9SsRLC0tSS5pL4UwpTHFMuU0CTUpNkk3bTiRObU62TwBPSU+TT9xQJlBwULtRBVFQUZpR5VIwUnxSx1MSU15TqlP2VEJUjlTbVSdVdFXBVg5WW1apVvZXRFeSV+BYLlh8WMtZGlloWbdaB1pWWqVa9VtFW5Vb5Vw1XIVc1l0nXXddyV4aXmtevV8OX2BfsmAEYFdgqWD8YU9homH1Ykhim2LvY0Njl2PrZD9klGToZT1lkmXnZjxmkmbnZz1nk2fpaD9olWjsaUNpmWnwakhqn2r3a05rpmv+bFZsr20HbWBtuW4RbmtuxG8db3dv0XArcIVw33E6cZRx73JKcqVzAXNcc7h0E3RvdMx1KHWEdeF2Pnabdvh3VXezeBB4bnjMeSp5iHnnekV6pHsDe2J7wXwhfIF84H1AfaB+AX5hfsJ/I3+Ef+WARoCogQmBa4HNgi+CkYL0g1eDuYQchICE44VGhaqGDoZyhtaHOoefiASIaIjNiTOJmIn+imOKyYsvi5WL/IxijMmNMI2Xjf6OZo7NjzWPnZAFkG2Q1pE/kaeSEJJ5kuOTTJO2lCCUipT0lV6VyZYzlp6XCZd1l+CYTJi3mSOZj5n7mmia1ZtBm66cG5yJnPadZJ3SnkCerp8cn4uf+aBooNehRqG2oiWilaMFo3Wj5aRWpMalN6Wophmmi6b8p26n4KhSqMSpNqmpqhyqjqsCq3Wr6KxcrNCtRK24riyuoa8Vr4qv/7B0sOqxX7HVskuywbM3s660JLSbtRK1ibYBtni28Ldot+C4WLjRuUm5wro7urS7LbunvCG8mr0UvY++Cb6Evv6/eb/0wHDA68FnwePCX8Lbw1fD1MRRxM3FS8XIxkXGw8dBx7/IPci7yTrJuco4yrfLNsu1zDXMtc01zbXONc62zzfPuNA50LrRO9G90j/SwdND08XUSNTL1U7V0dZU1tjXW9ff2GPY59ls2fDaddr623/cBNyK3RDdlt4c3qLfKN+v4DbgveFE4cviU+La42Lj6uRz5PvlhOYN5pbnH+eo6DLovOlG6dDqWurl62/r+uyF7RDtnO4n7rPvP+/L8Fjw5PFx8f7yi/MZ86b0NPTC9VD13vZs9vv3ivgZ+Kj5N/nH+lf65/t3/Af8mP0o/bn+Sv7b/23//1hZWiAAAAAAAABvoAAAOPIAAAOPWFlaIAAAAAAAAGKWAAC3igAAGNpYWVogAAAAAAAAJKAAAA+FAAC2xHNmMzIAAAAAAAEMPwAABdz///MnAAAHkAAA/ZL///ui///9owAAA9wAAMBxdGV4dAAAAABDb3B5cmlnaHQgKGMpIDIwMDMsIENhbm9uIEluYy4gIEFsbCByaWdodHMgcmVzZXJ2ZWQuAAAAAGRlc2MAAAAAAAAAC0Nhbm9uIEluYy4AAAAAAAAAAAoAQwBhAG4AbwBuACAASQBuAGMALgAAC0Nhbm9uIEluYy4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABkZXNjAAAAAAAAABNzUkdCIHYxLjMxIChDYW5vbikAAAAAAAAAABIAcwBSAEcAQgAgAHYAMQAuADMAMQAgACgAQwBhAG4AbwBuACkAABNzUkdCIHYxLjMxIChDYW5vbikAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAWFlaIAAAAAAAAPbWAAEAAAAA0y1zaWcgAAAAAENSVCB1Y21JQ1NJRwAAASgBCAAAAQgAAAEAAAAAAAABAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAVklUIExhYm9yYXRvcnkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAENJTkMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADzVAABAAAAARbPAAAAAAAAAAAAAAAAAAAAAwAAAAAAAAAAABQAAAAAAAEAAQAAAAAAAf/bAEMABAMDBAMDBAQDBAUEBAUGCgcGBgYGDQkKCAoPDRAQDw0PDhETGBQREhcSDg8VHBUXGRkbGxsQFB0fHRofGBobGv/bAEMBBAUFBgUGDAcHDBoRDxEaGhoaGhoaGhoaGhoaGhoaGhoaGhoaGhoaGhoaGhoaGhoaGhoaGhoaGhoaGhoaGhoaGv/AABEIAMgBLAMBIgACEQEDEQH/xAAdAAABBAMBAQAAAAAAAAAAAAAGAwQFBwACCAEJ/8QAQhAAAgECBAQEBAMHAwQBAwUAAQIDBBEABRIhBhMxQSJRYXEHFDKBkaGxCBUjQlLB8DPR8RYkYuGCQ3KSCRg0U6L/xAAaAQADAQEBAQAAAAAAAAAAAAACAwQBBQAG/8QAMREAAgICAgEDAgQFBQEBAAAAAQIAEQMhEjEEE0FhIlEjcaHwFDKxwdFCgZHh8TNi/9oADAMBAAIRAxEAPwCtuF+JeC6SDNsygzareKiAu4kZGjuPoHS9yNididr4p74qfENuLpRSUVdVVeWU8xKySnwygjwtpYa1IuQQSRfcYr4SOheKOR9DoRKqGwbe9j54UpDTTVSxVQZKXWCxQXcLft54a3k/gjEihV+BGNlbJ2Y94WySHOc3io55DFG8bP4CNbkfyrfv+PthXiDJlyWqIiqObGygXZLFbW2I97flhtLTxQ1UlTltQwhhkC0+vZyLbHbv/vhLMcyrM2qWFdJLOyR6QWU3Fu59ffEf81EGCCApBG5GtqEpbWtyb++FquqeomvZRpGkW7+uGsWzXI2XfCmuI28LavfB0LuBZjyZ4mjjMSaSo8TEm5NulvTG9OGbTZb6+m+9/bDcVEaR20KzkX3X6fv3xYPw5pEkyvNMyraekrItaxurwGR0UeIk9lU7epsewxipyB+AT9zqexpzapC0fD1dW5LUZrDSLNSU7jVp1K+k7FwehCkgN3FwbW3xP5/8Oc3yKanWn15k8omYpTxF25UZALsPI3Htt3xa+V0wi4cC08XIQXSOmZDrcHcjTa2k6j1seuCLI6PMsypav5RJKCoI5C1OyOoB+lS4IDC43sd+nni/H4+JuCFWtxfXVdg/Yyr+HAB3OWKj5f8Af1PJVUjVUYZJKiLUVdgDYr128vTFhycHZfxrmT1OQUByOnAZOVG/MTmAdLXJB3U9dwGt0xD1fw/zKPibMstnf93yRSgCWpOtmVm+slRudwTt+eHPCua1fAMwir4m+UqQC00NjoFzbUCtydrgXBsduuIkw8GDZAaFxY+zDUY8QcD1mRSV1KZoaxcupYJ5nRSulZugt2NyL/lgZSBYAWmUBgRp0tvqOOmaeXI+IuD8zBq8rrZatGrSJZXhEukaQ8nQnSwC7nw7DyJ5eliZSyKSUDX1Hz8sZmxcFVqrluu6+LEHKoQ/TuT2U5/meUUscVBWTJSrzf4BN1OtQrgqbixsNvQHqL4e5dNV5jmaTu80swTxSu2skkWuSdybWGGL5JVZZl9DXVQpZ4KxLqsc2pkJ/wD7FG6mxv62w6yWjmirI3hvDN9cXgK+1z5nfYYk58iFY2PzhICDRhpQrkWTy/MZvTPVo8RQJKTpJPU36Bhbp64hqyVY6WSSNWSkMn/bM6/y/wBJttcd8TVCK2qhNDUPI1HGplSIMBy5CbA3PVf6u+BvMcylqqgUEhhJefSwF+XfVYEX9D1HW2MxY2V2yve9fH9qP3qWkg4wK/zH/DFRSpWPU11TyVRbRNYAFjtY37e2E+JqiD971Bgm542u2nTvbfDdYKnJ6QVFVlzrDN/28mtPDuuqyn/7WBv2JxEmllIaUROkQJ2JvYdgT9xvhysQhVj79fb/ANg0eIAkNnEoaQC/fEeV5inG+biWGtKTqUNgwB8j0whFJpRi2DXqLyG2mfLKoDNhKblqLL1xo9QSdugwizFgScMiYi9rm2HWULetX2w0OH+Si9WT5Ljx/lMwfzSXmO5HntthC76yF2uML1A7oNhvhESBH2323FsSGVDubxxCNDdmsbat74cIpN9N1Vh4Tq3OGqTeBA67A36YerOx0WW1rNYCwwlrjkqbGBjpZnKA32J3IwsaZFiR+cFJYlrE9LYSjlkcuzW3/lAtcY30c6RWIOg9rd8K3HCo8hhhsHeUyWsVBWy/c3xJ01DTz6I4njEhYF21MAl+3r74iaepMSOCU5bC7XBJ69vXDzL61OZqq1eQX1SG9t+wH5bYS11HJV7hKI5qWKalzFVEqkLHK0ZugHcE7m9xiKqKKQFV1JVSNtrAYFd/LzxI09ZV1yOtLIsskcYjsNmcX73PbCGYMaeeSB2TmI3idUA3HWx8xiZNHfcobYsdSPePTYNPA8khJBMTMQR239sNGleV3b/ti2ohjyDuQcPkrpgjRpIF+rSzDbzONI5YIkUEAORdrN1Pn6YaDUTVwNajpI6OMxpJ80d2bXZT+ONqfK2qFjpYKaRq47gmULqF+tmta3nhWlekkp+dLJNFKEvDy1DaWB737bYkqziuo4kMUmdlagwOFja+kjb02N7dMdS2F1Pn1VSNmRVPltdSVAM0UUQhY/6pDLcd+/2Iw0gqDUVUrVtS9OtrNMgLFu1j33wmk8gjsJHILWAPYb48XVbxbqDY9rjDADu4LMAKWIiAVEjLCgjDsqrv6279ziRzXhwZM1OKqVC0l9cam5QC1zfoe/4YQlmgdH0agvVreEgennho9VPVyhFMkkrOAutrkb2AwY5E66grXE33JjKuDc3zL5iopctqaqkpjaSVF2H+4tvthPJ86rch1fJTN8s0gdoyLq5B6keeCT/quu4cjqYsonJDSPZHAAjtsGF/Yf4MAkuoqJJWJdiSRbvffCsD5y7FqrVVd/NxjBFUFCb94ccPfErMslrK6qEMVRLVSiUF92TxhiinqEa1jbe3lg7rPjHxDPM9BRUyU8tZDHNVLLHZIJdZJK3uGRoyFIPXa24wC/Dykoq6SZI3nknZQainaj1x2B8LK4N0YHoTbc2sQcWFmvD4zp5uTIlJmb6Qz7KbKCOWVuPxPTttjpN5D4UVPUouaH5x2JXyAm48oKf96CjrWWiindHaDXULHNUxqvi1KSey3BJHQDDHiKiymegdc3knpIle9pA0aCS22twrWNiNjbFd55S8QcDV8/zcaU8uYQPDExUM4Q2uV8iel8GGdZ/nVbQPnlPSw09QtItDmFI6NoqYiAqBomAJlRrm/wD5LpuMFgxLjFHlaggi7Bvd/n/bUY+fkCtfpKoinVJY9UjuobZQbEjv7X/y+H1RBG8cM0c2oPcNEPqS36389seUmTs0zU9Q0lNWxy8hoOUdYbyN7WANr98aZjltZlTQx1sckLSoW8SadgSNj3Fx1GOcyNdyAKQt1qFXDtMmaUMlFT/KwzBlcTsTrBHWxHYA3097YfHK63JIvnoa5JqpY1MgdrvpdR4SQfC1j0/4AZRZjNlxL0cjJ4dDjURrB7G2Cavno80y6TM8slgy6q5apVUp25yjbw72NjbYgbEb4DHiBJrRG7v7e3/H2lSOCN+0IshzakylJRTSyzGVv4EjDSULC7E9b+La3pjQUtPV5s+Y5lO88shBVCFBA+2wP6YHChraGhShkl5hJ17BQo3vYefTfBVlmTpAEaotGFFhrJ29lG59ycT+R5RbGMa6H5To+Nj5m2Fw+yqvpqmIRSwWUWsHZSG/92GJV8sy+oheOSlQKwtp0jcehHtgOpocucCzszdjyyP7nElzkoU8FQ4QdAx/tjhZCQbud7GAdVIziv4Z0vETs9O4o6lnDGRU1a7CwB9PbAr/APt/zmojkaHNMtjRbW5sjLcfh1wcx8Scl7NJ4T3viVp+JYyLc4D0tbDcPm58QobEVm8HDlNnRgRkf7NkWYqFzHi+lpJjtpipHlA+5IGLX4b/AGGsjzQxms43rpEIuwgy+Nb+xLH9Dh5w3BNns6LEguzKVcCxBuQT9v7jHUPAXCE2XxQSvIQNjpHbzGOz4ufNlP1CcnyfHw4h9JlGj/8AT+4DNOB/1BxHzdP+pzICCfPTy/yvgEzD9gPNsrM03DvGVDWm3girKF4Sf/mrMB+GO/UgAQAja2GksOkkWuOmOmdipzABc+V3FH7OPxL4WDvWcL1NdTq1jNlzrVr+CHUPuMVfX5bWZRVy0uaUs9DUx/XBPE0br/8AFgDj7H1VExRxHcb3BHbHM/xm4SlztHpOKMoizywLU55JEqi+3LdbMnfv23xJkpBcoxr6hoGcAQFQSty3ffthzy9R1Bh0sCfLB1nXw+gpM05eVVBINx8tPKNaHuNQ+seosR388EdH8PKWoypovCk5Xa/Y9r+vUG3XED50Xdy3H42Q2KlYRUzxDmrd0XdxboMec4EFtAJFgpvi9M44fyah4cai5emELd5F2bbqb+Z6YpzMMuanqW5cfIi7ISCbHp97YSmUZLIjXwnH3GskkXiEaEra4t2J9cPqSoRI0aa2m5GkISbj2+2GkdCqxo7SOFJIZLA7X69cSFLAs/8ABvy+rqxTt064I1UFY+o6iGQPrtFbSWHiO57bG9sO66Coo2C1DKqFNeo7kL+N/wAcR0uTIoD8y0W13WPYgffD9Kaasp1pBoc72kMJBUX6YUR7iMB9o0dYZI+YFN1BGpbAknsD3wyFRBQloqhhI+okkIHtftfDk5TJH4hGXiVtLFQ2kDp+vlhSny7LzHeoZAxP88ljbA8gO5oUmVtdo4boQTv5AAXx5azKskiJyxzBcXGrsNvS2NpomlZIpCqEMQ3Tc3wznlBkeygAenXHeAufL1FZphG76Bfc2GPYYJq0yclGblrdjq7Y3jozUTFNSq2m/W32PriVyGhooczRc/ephoZFdS1LYszdgR5bG+AZgikjuEuMncgw4QaL9fxAwpSymKsQqQhS5Ut2Nuv26++Cmm4AzDMK2gFFyxR5nHLLTVMouQiMQVYDo9rbeuHj/DDNaf5yeR4Y6WOaWCmmlBvUsnWyC5CgXux2FsUMjLjLkamrjcnQgxFFDVCX595VhPiQxKC1+gvfthJ5FWkjUcmQJcKoQ3A9b4ThR1kYzMVK9QO1u/5Y2pq8RLNrQSMwKgWtf++E9dTAb0ZOcMca5pwxpipZWSheXnSQoAu5sCwI6vYGxOy3uBfFpU/xkyCrjqFq4ZaSAwQrGBDeQTHVzGVh/SdHXqPwxSLSRJyxStolt4ypP3vhCoSFHRi5IKAgAeeK8HlZMQpZoYpJvPuIDW54k+VSSRw08oliZAYwjXB1BCdKNcb6bKSLgC9sXyGkp5KesraepikqlR4pKuNA4cX8RVbqG3uO18c000EuZ1KQU6qsjtYK7BV9yT0xanDHGFdkNTl0ObzVZWlVlldKpeZMHYBSjyAqqKEA3HcnBI4IIdytkGx8H+kdhYqSalu5F8PssqpaOMNN/FqmqMzhqYlabWQTrVzvo9t97emBD4xfDhKDKIK7KJKqtahOmRjECml236bkrsSwFgCAd8WdkfEaZsMxgyHMMvnlomQVwVmlpixBOkVGlVLC3VR374kKPP5ctqWphK9PPMGGtJRddQtqXazEe2x7Y6nmv4mLHzyro0oYC+z8e1jfzK1xeqpVTOKqmOrJBaJmjsAjBbi32xKUGW1FQ2p0a/1G9gCf7DB5xvl2W8N8SSQ5fCtLTNEpCJPJL4wSG1Ft9RO59+mBw1bSAgNyl8yT+mPmPI54WOMjqFi8VG2T/tHuXzR5XpEQ507G1k/QYnYKZ6phJVVGhT11NtfyAHXA3l78ybl0zc2Ug6nIsFGJmgiiec8yQTOpAYt0Hp/6HXHLce862IUKENsuWmy+IchDJIR1ZbfliJzyYVU2ssFb+rVsPxwtWSGKApAq6EFnYnTqPrbt6YAs0zMIzFIqdbGwYktv6euI0RsjalxZca7krUVjQbGujla9rCPb8Ri0fhrw0+dIkuZVaQU7NZWWHVY+uroD/bFE5dHV5lnVNSs2uTUHcDYRj28/THZvw5kyimoIo3pI1qTHZ0dbiRfT/NsXekq0D3IfWZrI6EsTgLhlcrr4oa9EYq2kOqAAhha+wHcC/li/MvhWKNSg0g21D/y88VvwzNTPDGv8gUKl9yo7b+mLFoZwVAc72tjr+OAi6nHzsXazJiwI364bhfEQbA74wTBiL9xj1X1am7nFFycTyWNVTp2vgG4zRpqGeGljEkzLtftv54NKyQCLTqsxHXA1m0ixxGOO3TxN3/HAnYM1dGcVccZLR5RmMsUgR5w51DQNCHyB7n9PfAquaU6NpCqGHSxub++Lw+LEOWutSDMxqRYOYjpWK/RQe7Hy3vjlnMKxqbNHijYizdH3NvQ9/wAMfNeRgtjRn0vjZ/p3DqWSKZQ0/wDEC7qna/rgD4koFlnec0kxksbMguPwxMNnRESgIgIFrmQg4bGqqPrWQhP6iLr+uEYuWMx2UB5VtRFVQs6xwyAarXYbX9sbfMSEpFLBIzDoYxYdcWRVr83E9mjWYjwSgXF/XzxDCOvpgSaqIaFCguNF/uf0746mN1yDrc5OTG+M96gvFmkVPIoho5KmNTqbWLG52sLYcSZ8ZXjC0eiEbWK2J+/tgkT52UQiJ4GdDqkj0B1vfuexwuGzPW7PErwQOpSOOEXAHWxwwoD7RIYj3goM45mnnQCI72207jzPlhxTZsksZMrwxkGwVKYsAPe+CWalrDS6onjnKklLwA3U9eovcHr6YUpp6qZC1OBpBteYAMT5207DywsqK1GhjdGVBVsnPkkp/FcGwtax6YbxOI5GlYgsG/h7X3v1PoMb0TAuEtq1baffCU0dpXVbBAbCx8sdVRWp89dkmbUupqidy7FgvXuSdv1wtTsTXx8pGciRVCLcsx8u+5OPYKadKaWpjjfS7BY7kX69fx298NkpWjqDCsjRzBwLFSrBr+XUG+PLRJM9sTqulpjR0KVr5PU5Ssr8ySlqoxEUl02LCx/m7na+xth9BkUXGeVmnmqJaGCSMpL8u38TSTdkU6TYMbXI3IFvPFH8GfE3MskzPJaWeZpssjqgtXCzgmdNR3dyL7E3+2LG4m+OVPwzVtQ8Iw2rYp3p6pYtBhmUfRNGwBGrsRYjvjs48qvl9ZshCVRQgd/e5eMqcKlHcY8Pw8M8UVOXQ1lRWRBv9SWmeEkEnazgE2/qtY9sQfKEbNYi8a7m/c/4cO8xzeu4gzE1Obzz1taZSXmllaR2BJIG/YX2AsMLNkGcT0r18WW1RoF1HmiI6fD9R/ztfyOOWw5ueA1ICLOpDyPy9v6upBw8dKRsqVkEprg1lN/BywT+e+GtfSVFHUPFUxtHKrbqw3Hf77EH74cBpBBAoa0hH4Dz9AMAwqeGhNKFGhJqJNS6D4durYdTTU9TE7TNLLUki2tj0/36YcZjRxw0cMkVW05k/rNlYdyDfrfrfzx7kq5XDWqOJzUNSlG8NPIA2q23nthYaxcZxZWo+8fcM8X5xw1A9NR1WnLmqFnkp5Yg8Usi2IunUjwrtextvi3uFvjstVnGd1nE0dPAgoYjQUoVrNPGSWW9iQXBYav5TpPQYoWlkWOpVRKwphKpLutxpva5W+5t2xY+bZfSZhlE5y6OnIhd21ohDBRvc+4PmcV4/I8jFvHsf073XxHYQWB31H/xJ4nyDi/ihsx4WiqFjaCMSmawu1uw/lIuVbtdbjY2wDy6p5REr2W/iK9hjSlPIpQuoFifEb9cJCqSJySeZv0UD9cc3M7ZsrZD7zq4zxQAyVlrly2iZIBpllIVSOo9cPuHHaeeKKNdfisiD+Zj3OBqXmVb82UaV6IowXcJUzQMTFfmMpC27A9T74lygKnzHYiXf4hRnFpIXjDgQxCzHUPG3e3p+uASWCKhhqMxrCDMgIp0Jvo/8rDofLBZxRIKVWjjMemKyOzG4DW3AHc4rmtq1lPIYFkd10qLi4B3PtfE/ioWHxKfKcJDH4bZDFVVZrauVhKW1AkW38wcdEZXnlNSUxikdUbquobBv6rjp7j74qHg6SCny1UjiUSWuT2H2/5xrmVZVU78+mqVOk/Qx6/hhWTIWymHjxccQnTnA3xPWnzOGizDTypHsJAwOk/7f579I0FWskSPGwYEbEdxj5t5Hms2Yyo8EpQBrOL3KsDtjtH4WcRSvk8cFVIZGRF0k+uK/GzENwaQ+VgBXmsuZKk9jj0VvL5uo9ALYhVrBYWNziOr8yMTugbcnHQL8ZzAtyXqczLuSWNhgP4wzpqegk5J/iEeBL2JOIbPON6PK0cSzKWTqt/wGOcviL8YqivzAw0eoxXtZAT9yRiZvIC6lOPx2bftHnE0tZNK71QGrUxBeVYljHfStyTfzO574oHjl4qPM4Ji4EbHSWVgdJ8/XFhfvda6Fnaop0dhc60ZiDireN4pgrSnRMmsE6Lb/briVSGcS8gohk9SZxUrBGoaKRLeEmMEEehxMCRqmkLJIgNt1tsfQjFdZGzUyry5Q1PJuEc7XHWx7HBoQsmXvpkK3XwNqsVbt6YjyYwr6luPIWx7jLl/LAvSXCMd0PiAOEJ4KmpjAlkVvFcAoLYjcrzKSSlcOdMiNvb3xLJVc5L2VTbquGEFDFCnEio5M3oZppRUuhcgFlXYjV3/ADxvJn2ZRzNor5DqUqA52W/Ww/TD+aoHKZAWNx1ABt+eIOZEEgUFFckEEi+3kb9Dhi5GMmbGo1HT5/mtrU+ZyqYyXAY3uPK9sb5fn9bFTaDPO1mNrSCwHWw2xFyzI1lWy+I7EgEnphVNAQCRPEBvY3wXNqg8FuNOEvh/UZ/V1cM1bHl01Kf4kDqTMD2fSbXTsbG462OJ+X4Wz5bnL5dSxjMvmJeVHU6QI47gG5J2B637+WGHDnxBzk5nQ5ZFmFFQUpmHzNTNTqb+bu1ixPYb+QAxdAzWLIVoIcxmiHMk5fhXQWZuhFze58vXHdfP4qDDjyA8nYDX+L6+Zx/HxK3Jl9vvB/JsnyfhLNmkpqVa8RaFjSQhhGwO9gLg3IvfqPTEVx5ltDLllPm8VDGkyl3eo5gjMrO5MrAWLSMSfqJAVRsD1xMcSfFzhGiqa6mehqq6vSGSO1RSaBFLayrZjsL9dsV9x18UpuLCKOgp3yrKxAnMpgbI7kDbSNrBr2IsbY6OfhjGQBwVPSgdH3JPvBLoo0NiP8q+G9HUiGszVnpVqYdZUOFEZ3O3uCp9wR3wtxT8L4skjXMOE655cxjjab5XTzNMZJOot0RQndjvv6YjOFeOAyUuVZ40jUaqscDqNbBiQo1Mx2QAknY9AMHf/VnCFTV1uQtTZnXPFLKsNXTcuWJwBs5V/D6XtYW8sDjXFlHGhxI73yv3FTLRhfvKKyqr0VktTUIWVlLmQWGliO3nc4tH4VcVT1dVPkyy0NJCpvSQuHM07MT4V6rt3vbY998V5xHw9WZPFS1FcUhWvD1KjUuqxa3iVRZSL7AbHe22IWqrIdMLZaklNyxd21WJPv1xHgyN42bkB/5FK7KKudLQcNcM5rBNV5zlVPKGaNUlLxlRyyYwgYuBYEEdQCbAg7YoHjrJ4sm4praHLaLMaeBSGCV0aq9jc7aPDo8iOuHGTcfZpl2QDLFiiqKeKaQojr4THKhWWMgfUGuG67FRbBDxJwZm1TleV1uZNFl8EsCRQrKhEqgAC1u4O7XJ872OHO6+gqKCa7J739z776jD+OfpG5W7RgiRPCoAFiCDbzw2IsLyPta4BHiOJHNcsOS1BpfmIqomCKW6KRpLLq0kHuL4SoYoknY1sHOTRuWawDefXEp+m5NsEgxGgaSSri5bCLQwYH+m2+LbyLMos6o5aWk5b1TIUmhBJbSe+w327i/UXxV+YTZcrwjLoSIkjtKGa+p/MHz/AEwbfDzhueugfMFEYJIWlMMzBonUhrG2xDgFWBOsXVhtfDcIyZbVCRf/ACJVjPA0NxbPPhxmeXU09VS6ZqeIcx42dVeNbd1JvgQji0WWMBn87YuH4j8P5zXcPGukK5gYq+omu8aq1LSFysSoQAd7Eld76rgXF8VOkQjYIJAxNr+ajywHmYxgcBRQl+P69zWFSswBOuQ2A74POGEaGCon0ljDYj3tt+ZwNU9PHCTILM/0r/f/AD3wYZVMKChZqkXVAGdT/OQNhjhZ2sTo4E4mRvEFFyqaFJrtOVDG+5F9yTiv5I5JM0OgCyBQPbr/AHwf5xNLKvNqZLtJ4msd/wDgdsQWX5eZD80gBZm1FfIdsF47cFJMHyU9RgBMn4jrsgphFGn1DYhsN2h4hqaGjzGoNMkFfzGpg9UoeTQSGst79RYXG/bE5neWRT0YeVdnFiQb28jgAamkpZgJoj18LgEg+xGLfGXE68iNyHy8mfG3HlqGPC2eT5XnUYq45Iv5ZVbYkdsdefB/iiSpEEUj6WY2AJ3sB0+2OLYy8/y60+uSSKPxMbne97A+mOu/2fOHa2oSCZouQDYcyVTYjyFsS58NZQySnBlLYiHnTQreTBdbl2AAxC5xJOIaqpKtpRCdvbBeuVxx8uMDmOerW2GMzLJvm6Gtp1XRrjKhrd7dcPbGzSEMAZw7xfxW1HLU1lZUGUK1gn3/ANv1xR+d8bz5pO3LCwLfY+ntgi+KtdNTZzV0M45ZhlZGCm6kqxF/xvbFUIVFSTURtJD3CvpPvifxPGDWzS/yvK9MBVh/kmYCUamqpGfyLC34YfcQTCqyqS4R7DoU/wAOKx5hpalHy93I6lSemCCozqQUTx1IKh069be+GZPHZXBBgYvJXJjIIqPsjkhr6SWimYwzKebCx3FxsQe9j/tiehkalpmpai2iVDpcHY/7HAFlrtHNE4O4a4N7ehGC/MJWiy68hPS6nzwvOn1RuB/o/KR1GzQPUAGxuCDcH9MSUMhur7hH8uxwN0c4So8d9DrY2xOZdKYpOXKQ8T/Sw74zIvcLE3tJKoaSJOby46kdyBZgPcYiK3MoZYiQo0rbwkeJfY98Tq5TFVSMFqmpd9WpX02979cMM34KepjU0ua0yyMpJaUaAw+3fHsWMN7xWbIVvUGjVq1wQWsbg9cOFzSO3jFj5DDKs4OzyhXUhpauMk2aCpRr/a4OB+WoqaeRop0MciGzKwsRiz+GDdGQfxTL2JM0bpRzsdKz1VuY9xstug273xpm/Elfmzn56aWVVa6RhrIp8x6jDFlaOMRIAFYgykm246DGLopo5QbSGTuOg38/PDlxry5nZnLDGquKVNTJmtSavOKiWSaQqryOdbMALA372AGPZ0jjlMNJKZItdlkcabgdCfLDM6dKqthGr6iLb4d86CSnPLSzK2q7G+1+nvg2Ju4PeolGzsrLH9VrKF6Li6OAcvoc24btlFLI1dBOEq4rcxpPD9Qby6+Hp274plp2U6Y2LR27ixJ98T9JxlnOX5JFlmXZk9LSJHINMQ0XLNdmJG5Y7bnsAMNxMq8g10QRrvfz7Q8bhDc6FoOHf35TNFNHDHUwobNUwxM8SnYsI5CQNrjVY9O2Ky4c+F2UcRRzxU2dkSRGRGQpGV56E3BKn6WABVgTcXGAfiXjut4mzHLZYL5eaCnEKSRSEMT1Zi3Xc3NvXCGS8TS5PmBqMlaSBeXaXxbSDvf0vig5seNAnEvxHZOz9rjWdWayNSxKb4fZnwfNlvEVTlSfu+nQSRRfN8uZZGGzbXsF269z3wcR5PHxfSUVb8xRZlWVWuRqaStL3I8TA6RsFuL3B3NtthgVPxgoqzhmmoa7LY8wqebrqKeUMqOq/QisDfc7/bFU0PFdbkOc1dfwyf3aZQ6CNm5hRGa+m562sN8D+C2P0nFqQLF7B70dQxkGNrUy3fjbkdDT5PR1pphBmotA0sNmVmUf6bjbTYbqw28+oxRyxPydbKxRG0NJbw3sbC/n6YIzX5txGJjnc9RUzmRX0iMnchVBt2J8IB77YjHo62i1UOZmakjY89ElU2JFwGt62IvhWZ1ZrQUBQ/4+YrIOR5feROrlm7JYLuRax/8AWCrgfis8OZiJKqgWrpZotPMB0SRqpO4PQi5NwQb2FrHEdwtwhmHGOZtQ5QLSXDPLI2lFuwBJP39cOs6yWoyCuFBm0DrVGNXKF7XU/SdvPc22xgL4wHA/3gpyXYnVnDMUnEf73pKavizBYm0SCjqlSWMW2YEq1gb9R+Ixzrxhk9Nl3E1dT5fFJHHA+6TVKVDX6El0AB9rAjvviQ4Kq6Wgelkyesqcvr2IM7RzEMgB8SjYeEjbfETxVEaHPquSBzIs15dTS62a43LeVzfY9rYF/SHhrixLXE9XdA3/AL1OyjNy5N7zSGoWiBkkQuyi58h5ffC8de9W6i7FWsQL9yf8/DDKMCuURC4hcgyN3XbpiXNDFQx3iN3J2Nr6f/e/5Y4r0O+50EsjXU3r5TJGRsUTdj3Jt09gLY84KeCuplhkYcxR9N7fcHDStqUFPOFuI0j2Pc7bnBL8Nfh9V5/BrykCpIANozZ1PtfGBfwzMZvxBHVdk87ppSO8JH1FgbeuBWbIi9QUgD6na1h1OOpuG/2ceJc6p1+fqIcvgNiecSW/AYJ4v2ZY8uUClrBWVbGxlaOyRg9SB3PvheMZkF1HM+F9MZy38Ovhfm/FfFkWWZKjuSCsz6bpEvck9MfSnhDgaj4aySgy2mhT/tkUFgOpA64YfDn4e5L8OsnWHL6cCoYDnTMLvIfU4NWzSKKLVcA47GNKFt3ONlyA2qdR7S5RChDMBcY9qKWAxyDRsRY4Ypm7vuNxhRczjkVkIF/XFQZSKkvEzhX9rf4U/IStxLlVNqpmOmqEaj+H5G3ljjGSikmBYsybeW2PsD8QuH6HivIazLa1dcc8ZUjuPUHHzJ424CrOEeIK2glQskMh0MF+pL7HEjMMPUqVP4gb9pXdDlt33Jc9yB0GCKtyf5rLZpIxay2G2H9FkcjyJyhpU/zYMhl0ceXmnILeEgkixxBl8i2BEtw+MqqR95TeVIXDJKPoO4wW1qF8p5YYsbALvexHa/cYguQcuzaRGuv8TY4no5UnjaG+iRWJVSbA37fjYj74LMbIImeOKUqYOfLSQkNGbrf6T54e08zwWYn+GTuDuMLzU8ms69Ora6qb79satTGnp7uytqJ0r7G2NLX3NCcbqP8A5/VArta4cxna+2BGpq5JmbWxdb/zbj/1hStzCWVBBFFOkSuSSIzdjhkRb6Emt6xHbFWHFw2ZzPIzeoaHUVQ6bX91Hlh6cuGZBZpFZ2A03AJvbDKCIyG3LkF+gMZwZZPT1UFEFjpZmBYm/Lb/AGwxjXUnRbg7keRSZ5HXVhmgMVCmuSJ25bON9lHfy++IOvkhNY8lLEaaJyf4Y30/jhxTyOG5AvGOrINunS/nhGuax5Kr9LG58zglBDbiCw4hQIxjfRqsNX9OJGaNYHZFUgmw0gdD/h/PHtNlbTSQCVTBH1dwLg9T9sZUxVFTM/OZQNdyQ4YIOwuDbGlgT3ANRqCXflpqSykm/bD2fkk8tTKuw8ZUHfysOuF4J6Xnli3Mcm8khXZVv0H5YZTc6WtlejewUkIwIU2vtjOzBqJTQtGjKSS2m/Swt7YUgjkSByqXD6fw/wCf0wusExXmVepkUb38u3640iP/AHHhbwKuxI/O3ljb1PAyYoqRiIZEGvSx1AGxBAuF+9x+GPaTM5aLKamlkpqC9U/gmliu8dtrA22BF7jCGX5ukiVNPCjxgRG7Bt3Hc9OuFJKnLTGkU1VM3KAPLeMkBvsdziWm5UwhqShsSxuDeM8tioKKLNmppcwp4TTLNzCGeINqUNtvpIFvL0xE8XZtw3VDMp0parNM2nsiymUQ01IqiyhE+qQgd2IBJO2ACTkxzLJQGd5S2p5HVUA8gFF8MopnFTKrG3MurX9cXHNlfs6rqv1/OEXteMsD4bcUHhuurIo5IlppirytUPo0kD+Ve7EkDbtgn4r4tyTP+G8wo6OsJzPNKyOWqIhFykf0h5D/ACgAKqLt0v3JqZaCqpJkWSP/AOnrUqQdXljww1CuEglEkhvoiiIJ1ew6nA48ziwrWCK+PzHzDTIVHEiTNBWvk0zmFxPEHBkWwUNYGyg72tfBLxjWUlZQ0k2XyQSwyAC9tMkLbFkPYg32NuxwN5Nk1SKXmVcDrDIEcSEXG5ZQT5XKkWwUfuPn5eUVdW2wtcHGMSqkEbPvKsbkH4gvRiRahrX5fVvxwQwykH5Z42blLzJCdhv1Jv1xBR1L5fO8FRb+GbaCtt+1/PG81TUVkLlEM6FyQq7eHbv5g32xzXQsZ1EcKJtmTER1Ka95mGy+IgE9Pf8Azti8/wBm2FIs1eAvFJUkDl8y4VWB6MQb9O2Kmy/Jyqs00eqZU1bi9m6D774L+CKit4fzaKpp5VgAO1+wHXYYMAAVJmbkxM+j/D1BVxyyy1dVDJSMqCKJFtosN++98EvzlKPAoIANt8VXwDxhDnOSU80chYldwW322wSV1cxppGiJVyNj1GKlIURFFjuPuJMyamV2pWD2Xp2OOT/iR+0ZmNJmVRlGRx6ZYm0SGRTdWHUDzxetNUZnU0Uq5mEvuA6mxI87HHz0+Nctfl3xCzsUjySL8wShTsPbAJ+I24b/AIa6l25V+1JxRlo5dUqVA8z5YJOGP2sqmbNIqfPKcRxOQOYLGzE/pjij941aAGV3WTqwbqDjMvzKqqatEQsXZwAAL3OH+mB1JhmJ1PqrlfGdPnekxyglwCApv1wDcb/D6g4mzlJqyG9jZnXZgCOuGfwmy05Tw3lkeZShqhIVJA9r4swVKCYM4FnFr2viWua00rs4zayhsy+BNBQwyTQPJNEASAm1vwxUfEeWR5OJgqU4iS4IeVlcY6H+LnGi8IZRJMt2kkukIQkAtbv5D3xwRxZm2aZ3nM02bVMkxZtQF7IvoB0woYFJ6hHyHHvH1QP3lmUsnKVIwNgGvf1OEJE5cgLkhFsSD2t/xj2hUwROuo30jbrthCod9Cgm6s3uDgTtqEeul33PaGqMlYWa/UsB6/8AGJfK6dVro5aldcKEN9QGre4HtgfjcQSKY7Eqbn2xJPmaU0bNaQIviKKvXBV9WoBakMM/3hlg12oY21dfEOv44QGYUIksaJGXzLA/3xXTcSKSfHKP/iMJniCI3u8p91BxT6bSLmss+LMcu13+TUALpADH+2H8PEcVMnLKVK9wFk2+22Kh/wCokHR3t6xjC8XFrRrpBDAdNUQOM9Nx0IPNfvH+YZIyxHM6SmlbLlblpU6LK7X39geuG+S8FZ/xI2ZT5Dl5rUoKZ6yskDKBBEDuzaiB3Fh1PQXxJyfEqpbhaLKTEvKhj5LEWXWDftbbtgbkz/MMtppocsqZIKXMI1E6L9Mqq11BHezC/vhGE+UwYMADevfUldcXIcTY9/zjyopOTRxUEX+pI5acld7i2x7dDiEqRGlUaVHvEDpNhYXv+fvhegqqp41ijlkLytdvGdh54apTvzJJBG0rtIUiFv5u5PtcYtReN2YkCzFZI4YEaLVqDsNR67joMNtMAcrpN/U3Axvl1O1VVNFsbAlhfrbG1ZFHDM4RyXvZ1Itb1Hpgxo1c9uSlKyVVYx0gUCRaX17EIOuw7k/nbCVevMWWaJYoYyAEUAiy9gDh9kCL+78wo46KmmlrhHEJqgEGIXJ1ISRbtiCLzUcskKyuCvgIB2I329sJX6nIHtMqLZY6CoXQPEwZRbubHHk1AkSKt2kntqa5AAB3F/W3X3xmV1PJrYnpwUkVvC6WBBOHtRlpiq43NRrLESWINmF9x74I6fuejX5ZCgc6YnBF7MCD67Y8nplp21GRC7Hbfp/lxiUocupSbTF6mAy6pFhFyo3FhbvvhtXZa0tTK8dPPBSR3IMiaCQB6nrt3/8AREOLIngNXN6SloGyKqnrqqRq5njSiiA2J1eJmPYAbAe+JvIKCWCUVjRpVJEQrTIbcpidiD1Hv64EeeHQsxXl2ssYbsOgwYcA5zBTVYoqyJOdNIBENBZp3c2AZidKKBudrnFGDGS+2qNSj3LQyKvgmjaN4YZ+W4ZlkAKK4JIYj+axJNul8TUOXioqpHp4AYDa5sb6u5388a0s2TZFWsJJflpJAjI6prurXsQvcEruOu4IwX5TQTV1VJPT2RGPhChgB9juMW5ywwccrAtft9paa9oJ13A1LmlnnpIXbSQHC774iKf4crlk5lQysbWRm8QQemL4gypBEOZBd7bkLhvVZcArFFIHqMcdkhqZTMmQmIu0q9V6g7N6Yi5XOTyQT1i3ia3gA6+QA/wb+eLHzyn5EUjEaQGv6HALnTrMAropfSBta1uoA32PXCqhXLQ4F+J1LRPHbTECLNGHFz7+X546ByTiSDM6JJo5BIjC4W++PnyrPRzSVVOWjlBsFPQn1Hpi3OCPicctpk5sphlRFLsx2cn0xoBmhhOqMxroIlcq13I3A7DHJXxp4DHENfNmlDMtPIoO2/iH+XxZZ48mqqfXylm1j61bt3wL5nn89XcJQh1udmPuL4DnxMbxDCjOUhw/X1GYmmjQyPe2s3CnfrfFzfCj4S1UOcR1/ERjMVO11j6rfzJxKwpWUdass2WQhVO2hemDDK+L4aILzEMGx1bWv/bDWz2KEQuEKbMvbK62mgpFvDdR4SV6rhZ8zijktFJrv0xV1HxjREbVa2ZRffYj1wG8cfGGlyRWgyqRZ66RbJvcLbrfAqxMIyK/aNzyp4mraXL8oilPyJYzyJ5kdCv8wt3HTHOnJmp3+XnVopNyqm+x7fY2sfscWvlXED1czTSuX5jFmDG5Vj6424tycZvBDXUMIlrKRSbAfUnW9u5Hl6+mDD/6SIHH/UJVscpnki0qxJtqtsCMSstNzKN9brIVJKKvUH/P1xE5YHaARqoDk7g9rdsPpamDLJnNQ6cw78rzHb74nYEtQlqsAttIeSRYGLcxVK7EOCL+mGeY561TSGlpoVhiP1MpN2Hl6DGmZ14zOZZI1aOICyoxvbzOGBTTfHQTEKBYbnLyZTZCnUae+MwqYxc48Mdhh8nieM2xtpx5oONnpIaRUUTukICqbuwBv5WJ/P8AHDnLcvTMKeeFP9WNdaEvsV7g+WJMQz0lAlJChJ1F5YxfRqIsAfMgX3PmcRdHDTySSPG5ieM+IHZbdP16+2JA9gxdTxgsDRxU51TKB08x39cTtDl8k2UZjVxLHJFGUeSPVdi2oA7DtpJ3xF5DTPWZnBDF4+Y/L3WwYnp9vPD2qH7pzTNoKB2kpl1AKN0uW03NutiSAcLYm+IO/wDueTWzI+uZMnzOpfLomeGOdkhkfup3AtYX8JHXGAPX+J6FZJQtzyAwbqBuBe/UYmaSh/eeXKWV2Xotze0iDb8VNvtiMmj05hNFSAorI4BBvY/UB/8A5GMXIG0exPGriFJC1cTSU4lb5hty4uyW/Uf741pcqkiHNrAIyR4Vbr+HngphpTNnNDmckcdPRUa0ytBFtsE3F7WPiFzfcg4jMyRMxqaifkTjLqV0jM0y8vQWubkepBt12tjVy311NKkDUYZdLT0dY0rxLMgQqkTAr4r9fP8A5xs+YiU2dUWOTcaVuoI26H2wybMVa5SNVX+UsLk4yeWOJWBieOeNwWRhbSD/AIMM4m7MDdR+a+qjhVaVpYgw+tjb7LbYYXGfZkhjR66KWPdpFAVrqBcggjcWHfucRvMSaBkjUrKUJVgbXxGwowinYXuUAFx5sP8AbHlxg9iFQk1mtEKSqVJW0RSeKKyhV3/v0xvlmTtJVK3PSSzXPYj0OGkdRU19NFRursYFeV9TXLL9XfyF/wAcF+TUFNWT08tRI0ck0aBgADuo03b3sDc9b4IWomqplycEcPR18UEtfKlgAqknVsBt+A264vDhzhtKOl1xAyixYd29h/tirOCaSmoaRaahZpFRdK3W+/f88XbkFYlTFCkZ5aMS8hB2VFO+48zt7XwIYXLB1JjKqSCpmSnVCtQYBO8bqQUUmw1eRuCLeh8sTicMwPcTHXfcC22G3C4DXrJlMdTmzGdAw6RIAI1+yENbzc4OI4o441ubsRho+oXPAyvsz+GOX5rGQ0KKT1JF8AOffs6pmClKSvjhkLE3eG/bobdR3/vjohYRYHpjcU4fT4fGdul8AVuMBnGOefsw8WwUrnKq/La17EojM0ZHoCQfT/fFQ8Q8G8S8J/w+JcqloVmawkB1AkDzW4x9Oo6EOANCkqLb/niD4m4NyzPaCamrqKGeKQeJWS4+3374HhPanzWpOIMxy6kiipaluWCSV87HBBQfEKoEPLqYkdiNmtY9wcEXxj+Ddd8Ocw+boC1Xw/NJpilI8UBJJCP+gbv74qGrqYKe5Y6SviAv1uf/AFgeFwOREsSs+IkdNHTl0Mjuo1gHpvt+mBfNePedIGgjTRcgC/UeeAKfMXldg58RuVX+kedu2GjVS38XhA2G2PDAs96pk/VcS5hU8xmlaIOCAUHTEJqYsWd3Zw2rUd8e1dSirHHqFkjXbyv4j+uGfzYjcbgjzwxVFagFoW5HXGKULexbp74NKLM2TljVsQSfXFYZdWjX2FsGLZhEYqE8sQ3pwupe5BIJPqdr4Uy0RGK8LqSponq+ZFQU7VDHxStCL38ztcnErmfDOTZvTqaykinnYeGSwDA+wttivYs2kE4CX1J9LX3wT5PmtQtSj1R+kXsTjL49Q9NJ3JvhPwqYAtZla627CZz+pw+zL9n3hmqpXNGk+XykXVxKXX7g9sTeVV8UzozbKwve+C6gzCLMSkSozwRnc/1n19P19up+oZnpj7TnyX9m7PkpppMsqqGq135buWXw+gtsT5nt088VVnHD9bwzU1FHnFMaeuBKaG7Du336D7474nlkp1UUlpXkGy/0juxHkP8A1gT4syfhjiPKZaTiDkS7HQzgGdG/qBHiuTv5YIZANExTYx7ThlIucQFXUxNgB3Plh1yqKm8FQjzyD6ikwVQfIbG/vg/zfg6HKFlCSip5bNFHJflkx36tsSDuV2xCDKTb+BJQhO1oyfzKk40ZA/UTxMRWpqedSM6m1SjtBo3DnXpb8wfyxFTqgrJCkPIeZmdoybgDe9z5Wufvg3paWPLKNIHUu1AgeGQbbMPH03JuL/hgXo4HkmquZAUeWPxqyf6MeoaRb1IufQDzxEuUEtXQiysn+Hctgy2eN45W5xjEES6fqdyzM48gEAG/cnDJ8tApnEDLHzHEvIlkvLIm5Um3Ym7W2tcYditNJMjq55gdWF9vFa1revf74bxU8sq1ldEhKUsgildhcXZWZRfy/XEasxYuT3X7/WbftUe0WWVVHl3PiljvAqvHHf6piwvYdyRrHsoxHcU00VLn+YyjQJWqC8axpqAUtck+pLfbE3lNZT0sEEtU0o5r+M2BJN7bfiR9zjJ6J80zvI45lEazyrDOgA3B/mvba46+2MXIy5bPRB/f6frCH8shsvXTLUkoXSWYEkttZQybj8MP86zKbMJqKlm0RZfFQtBGkaD6dFjq82+mx6jSMOVrknllpVXUsRZYGX6YmW+wPmdye1ziOqcomeGolhsAUJCH+c3uRYd9hv5Y0Nb22oOwKEGcmyT5hq0tMqmlTx37ox0Ej/8AK/2w7GTQlSJpA8pDRSsFJ+k7EeewG+N6N5Y+ZNLpUSqIplk2IvffpfridysVWbZvTxrCJ6daf5Z1WTaMgncDtivJmcEmYADqCP7lmhpJM0pH5tNHVclEt412BJYdhuov03xJ8PUcdVnqU9YiwxUcySSurFlazi6A+p2Hlvg1zOAFKihy8PeprDHTwqttRBjMkt/K0dtzbfEXm1BBR1lTDBB8pTSuAwVW1am3Zwx6+3rgU8lsqfMZxrYgnRQ8yuhMkXJkaKpiqFTY6xq7edmUfbBHwtQSyVSyjUdYBNttiMP85pGNRFItOweOZQzWA1SGQ80jzDDQ33OHXDsAj5REckoVF1NYqRceW/44oTMG3PVRljcPVD0zaZiSrbEarsDvcj12/MYtPhLOY54P3VA38avqzCG86cXMjA+dtQ9CwxVyBKajSaEKJ7gJbfmKVIK2Pfp+uN+Fc2ENQMwjM38BmKBWAeO56LfysL9+uOa2cLkBH5Q7nXIqlizDJipt4pwFA2tyjt+QwRUVck0tr6iCQ3pY9Mc2T/FDMZGy96IrJPG8gZXshdSLHSN97bj/AGwdcD/EKkzNZ1RpUqElYtSuul1Gq2osdmufLHSTyUZqEMES9hKEiDqoexANz0F9yfbD6FNEms9+mA/Ls6kMsSlokD/yfUxHqen5YIIq0QOsLXZSp5XqB/L9v0xVY7hQhj6Gwxq4Gr+JutvET2GIOfiWkp6mii56K9Vuo1f0glv7Yk4qhKvY3RbizPsCfbrj3IGaIyz3g3K+JstqKLNYIqmnnj0vG4uCDjiD4zfsn55w7WVGacFpJmWStduTs0tMd77dWX1G+O95kgiURwyM7rYMBsBhJqeQrcSBbnzxoInitz49T5S+WVbU+ZI8btuQwKn0N/fGR5Oama8ahxDZjHe/MH9xb7239cfVPi/4QcLcf0zw8RZXT1EpuVnVAkqE9w43xyd8Tf2SuIcgllquCZBm1Gjao4GOmdFvf2a3pv6Yw2BYi6qcw5hlNJLUED+GJkVob7grpG1/MdO/TDROHY5IwIYrVCG1m8Sub7WB2Ht3xa78PuJ4qDO6KahmDcuOSpjaFRf/AMSLncXtYX388I1GRyZaoWRKaojY6Y5YLENcgXDX2t5G1r4h9cqtHR+Z6gRKtrMrkpauRKcEEtZYlNio8z/thzWUuZR5PEwictAsbDYjZrhgPOx0H74sA5QaymqRVRFKn6YGiQnx3UAn/wDId7/nh1X0c+WRwUqFaimhY81wPqQDQFB876yT22wtvKUATOMqzLqyqSQLUxyqQbXCkgj0O4wXUmZPBOgk2JuulhpO3n+OJaGgppal4a6jIVTpSVAHAktcM3c9vL8sO4OGqSnqIlkjZjLcLZdXLN76yb23PfyHrj2TyEA5QhYEIIc1pKem5aNKZIjyyAwuz+Snpbt+OCXI8zqqCserqlRZbmKIIA4UhRsBfT0Nrm57AYC0y2nEXOSapDvKUQJCfEbE6t9iDY9+xw9NXJRROsRKIl1c8hjrY3FtJtpboPIi2OefIJNmGOR2Ye1FfFUzSSy1s0RkVbvVFkE1xYHSvgXcEC5/PfDkZhDQUU/ycdOEmTcrHaRmAv8AVfv0ucV4lZUyyt84GSjmjaaN5CbmMGwG24G2xPcemEc5zmasjgmjjEsZ7wKbkWINztvsb9bYMZSD8zbjavyk1iiFbVCmJCpU7kFb73279fQdcRf/AExQKSJpYxIPqAHQ/fEhHWVdJHyqWROWqoC8imQqpuRa246AX6Y3k4RmzcJXmlWtNSuvXTwB1G5FjqNwdunrhwz3u6nlUt/KLg+MlzKaspXosrnp6WrXQjsmlQTcjSD9I7j0wxz/AIdrslq54pFjhnmQPUTmoVjJa2wAJIvbE09fKuQ1lUizq0VXJGp5jNrNwtgD5Db7YYcR0MkOXx5hWmCGNSlMXkBDmTRrew72Jt/xiHC5Z6J+PzP7MzgNwffLIaqMSTpK8YkvzlOmzAGxF9iL7W7DE/8Au1n4aighj5UE8pnnlTud1Uj7H88RmQZXW8YZxHT0Ec1YinxDoFQW3/zzxaL8OU8OUzVDV4OYQvyZKPQOUynbl7eSi9vM4LyXGPiCep4Y72JVVRk1UYQFheKClQa5GNkjW5Iv31E2wvlxlaphqRMY9E0cjvb6yCb2+9tsFlTwlmSsKKspqqEy2cRousvHYm4UdSoP23xlCj5jmVNk9PSGCGGVY6Zzp1Pbrq9ASSSemB9XkNwPSrUF6Vpsxr3ZKSOJKaN+UFjA5Sk/UT3JJN2a5N8N4IeRUygQ+JCHLhSXYEXAB9Cb2xalIuX5RPBSZNVJWR08jJWVkwULUSdCVW3QXsL998CCs8U8s1BFGVjLXMviuBtb79DjF8hMjsB0IRQfeRAy6PNz8tJTGeo8Ca1UhiHJ0+4vf2vgmThGlynN+ZQ5ikEdBCPAgLfPzvcOVPYLcbnbb1xIZZLLSQVGZmKNJ/BBTMtv4Tb3YeekE2HmcMmyzMFlhp85jlpI6iBXhDNpsBfRYdbXte474E5SxIQ66nqUdCQdSJJZatFmenhS8OtBcu4Ooj2vt+GHNHQZlUxsmb1NTOUkVRDpud7eYsPffD2mpUjmjoaiWOllmlI8bEsXsdz/AJ3xM0tbFV5MsDLIJxWu8gQ2ugQDqe5N/wAcYuT0koTyiRE2SfOV8FJTH5pIFMShTZnOrdRbuT0tvt5Y0i4ezGGctV060yK76YpgxNw9iTpO3Qge2JqmrpuFad8xptD1cqnlS2voU7Fh31C1gT74ypq6OBqNCJpZZ7LKUlLhh1AFrE9dybXvgUzMjfSL/e4QQGJVOXaMleSvpkhgjm061Zhp9/I39e+IekrLs9cJCBK+sEbB2Nz06Xtf8sEdFBJPR1YqCtRQSl4mjqSbuL37bXG3TuTfGlXleYvAI6GKKKAHwMyhNLbatK7A38I87C3QYU2YE1cZ6SsCRGNTLV5hT04SFQ8swkRkUtoAtp3/AA74d5BR1+VcQzVc1dJT0usgM9wpF9Qsb7sSBt5A4jKsV9NHetrH5crIWCLfcf026eWJLimWKuiybJcpgano8upRIZ2BlZ53sXkA6bXVQT0tthuF3Dd1FKt3csnKPjNJl1RK+Y86osjKgSDlqTqG4a9unbB8fi7S1KU7RySBgrTqrjQ1wuwBO2/vjm3NaaGhpkFRVaklcrZrlrC3QACxJufLriMNROmk2YqlyoUllO91P/3bdsWfxuUip467nRdb8RYv+rMiqqh1YJBWDlr9MGqO67eY39b4v/hKvqM7ghzKrVoo2UGCnYjUgP8AO/8A5ny/lHrfHz0jz98onXNZEWeKWdFZCN0jDguF7WsAMd5fC3PqXP8AIojlDwi6hwuoXIPTYYu8bIWJZupi7uWlTJBOrc5tKi1/M4kqWDL5hZ5CW6DUbYFnqRTqEaxlLWtfpfuThnnbTjLQYZhE8Uga/Zh/xi4sRsRoUNqEmZRx0stqWQsAPfEWuaxzLyqsWLXCv6+RxlJOjwQr4uxBO9x74bVtLA9PJGfAxOpST3wQY1YnqHRgN8R8gouJsqnoc0gilDKeTI6Asjdt8ct51wPTwcPxVrzkyxc1K/kR2AlQ+HbuCNj3uL747DzPk1dANbqNQKG53Djtjk74gcRQcL8S5nTzLHJk+eRGCsibqkw25g9QbH2xz/MQOoYdzVAB+qV/liyZTmMlVTzsxpFdwRcFlKGxvvtvsfNcQ5pKrMZictmgeBUfUXl0HSuonb1PX1OE1euy+orMonkCSLOqxv1uo36dwbdB54m6jMKXLq6VuZGVEoDpTIqsRe4QXHiv39fTHLChf3+/vN4Kx3oRLKcrrKbK5Mxr6eEUYqOSYFNpXlZSQBb+XbcjYb480SPK6LC6tqKusYAC2Nupsdr/AEnb9ceTZppWRrtHEZmlanZlsj2AUXPQ29euJKlqoP3FTyZdQTvU1U3N54RmZgD9NztYEfnibIxF/JqDQGhIekp+fkwjnDlZ5hqbUUKAE9N9jex8t7d9pOjkEU0kNXUTSUnMeN9D3WDUSLA+Qv5bb43aFMwzCdp0hEQIenpYruEa++oAEAb+vltbEzRZFFSU9HIuURZZDJOxmWdgjEE2Lbk9idtx5WwRyqAdw61owSqxPSzSZb+8JIXlh/iMb6Y4grAKvoQL7dz6Y0ocqkeOqaf+LRhQIJYZizFdNxc/rtc7dcEGYSwQ5j8zCqVqRFhDfYGxuAOlwLm19t8DiZj8r84s8bzQtLoVF8JUk7Hb1t1vhi5OY/OKJ1ub5ZzZamZlRkQwXgWWLSXIvqIFrg2DWHQ4VeVXctROTCbWsCtj3H1DC9DmMlHYTwgSRUspXnG5BYaVHck7nGsELZgrzxypGhYhVvptb0GNOQ9rMHKqUwgyrhT5fhxamsHOqZXqKhYzZi7uT1HYbqB7Yn6r4ZUkWS5VScXpHXGeKSplKklllLam027WFjjKd52gizCSBWSJ2cjfpawAGFOJM3rMnpqSmqJpHqIKIsjC/hVtyCepNz0x8kvk5+RYaN/ruVpxC2RG+TcB01TRR/I8SHJMoijEMc2gxMi672J6nWNX4XwU5dwpwrDSUdOnEgy+oSVpIYoaZJlle9/ET4vc27YA+G4q/Nqalr86Somy9WKllawMSjqR/nfFl5Fw5wjlnzmYzZ7U/Ps6yxCONTHbSbRnvptfYWxS2XI2Uoxuv6/5hKB9qEGsxo3OX8xnpp9VK0S1aFmKgEixHXp4rd9sCyVVHmdNrpIY0zSZuTUmyoV8NuYoHmLXtg7+VmyyCOnyiSDMDLO7J4NPhKE6WVvW2BrKaSpolzCqz6jo6aokj0RaogGFtwBbtjEyn0mLH8oLX7yHybgOCrZ8ry9poswgmEkk8Sq8UY7atRHucNOG/hnmXEccs9dmVPDBzGjREGp5LHrc2Ci+998FlFTf9M0T11VqqayvGtoCwW1rncX74hs0g4hqsyhkrlZKVo1cQUzeEI38h09PfDcXkvR47+5P9oHFANiP8+p6H4d5DFRZclKM2VedBXq4laPS1ix1bG5PYDFXCXN85RqrMJJ66sqTy2ma+y3ve/QE3Fh5A4OqqjoJ80mnqKQVJWMKgncWjIJJ8Ppt164Tr81y/MZpIjrZABGYXumr1su3UYrxZyo4qLJ2TAZS2hGua5NBmNLlWfVcQNTSQR0uZR06aQZUI0yepKgAkdTiQz+dOH6mtjyeApGJOc3hAL6twL9gAd/XDqmElDQ1B5I59VOgRQQLIfpBviL4odaeCngkliaSZwwLi6NHewF8e5M7izqeKMLgvm0lfmNFFSvqVVZqglUIF26W89u2MDylYaOkm5PzQ1zyFbWUCwt+H44XqqrNZZBTx/xam4ZUDBEsvTr0HTfEjR5bmk1HT1VTBBzJpN9EwZlUA9B33xY1KBcEKe/eDVfmlRTPFl1LJaOlbQgBsLE3LW7knfB2jR1mXUbiZVZBdUMtyttmv79sMMp4co5XqazNzXUlDLeOl5oUPI3crYDa/wDfHrvw9SF6GiyqZ6OCUtNMJW3YD+ZupOJ/IplKqN+8duqkfmNFlA0tVZnOjqNIGknmWPi0gdD64msg4aOZZfFJltbUCKna7iRLlkN7Am/QeeIWegqsudYYqeb5GqQvC918R6nf2ODfgGpi+QqcwrLKkcZLQ9OZqGnRbyAN7emFZc/DCrTFVe6kHxjldJz0yiimjjqZCJ2Ldb/0j0tcbYEsxzFIIqbJ6CNqSkTVGum7O5JuXv5E3xYVbkEmV1Gb1ahKvMKaU0lC0ig8xWXUSPKy/ngSy166CB5HCSrIOVBeK7JqJGkdyb32xR4+UuOJ9v6wSDyK9SCoIPmXaiLR7AaAEDBl7i3T1xY3wp4jzDgLM9UKyR5Xp5bMx0G99hp7Dytgcl4Sk4TGX1FSsd6tGmpY2Ymyf1E2FhfoMRuUUdRmKVFRmdTLCslQDPJqJ0xqb7eV72vipctN31ACFTTdzsHKfi9kme5mKNKuIKjAzIjX+nfY+eJ+Hi+j4kq/3UivPqWQq0Q2bbwEH1J+2OPctoaaizPmZZDG+XzORA7XB5gFrOfX88S/DPHlVwzmMVbS1TfvakqSHRt45IyALW9xis+YbqNQ8D9Qnd75O2U5fTRR+JkhVWBNze2K/wCMc4zClg0Q5fUVR1i5iW+nfe/lgX4Z+PK8Z5vlVPmdM+Q18yuqFZeZHI4W42IvvY7YMOLfinluT8N11PKIo80K2jjjN+ZY3LDvsATY46ePKmRCQdRZYg3K0zfP80onzETUk0sZPMWEGzggdTfobjFC/ECop+KP+4mppIqxAeZA+m5262NxjbPvjNmmZ8RZ1mWT1aihgVpY9dw0gvY29cCNf8QafiCGkety/XM5PNmkusiN5hh9Vu6nEvqAqR3U0nkLMc8L1Jg4lhqaipjmERKmEoBzG03PuQo7d8RmXLHmdfJDIzwATu0JnJtG5B0bHtfr5bYQoc5FRVVEaxRQpCBpqUh2Jbpde+HcDvR1QFa1PBA6iTU5Jsv/AIDf1G2OYUJJqYhBbczNIJZqtaWWop2lE7FwEsWPToB/l8Eaxx02XJlU+Yy12yycqCP/APjPY+PX0I6AgeeG1VlzU+YrDlEK1uZwohfXGV5rdRubqTY2I/HBPwZlMFQKkcTmThlw0kmkRgtI17AdbAb/AJYRk6HI/v8ArGrhHLuRJCUZpC6/K0VPUqsyQlVeRLX/ANTr59P6sFtHNS5hRmmXhv5ZGjdY6moqJZWLBbqLkgbkgdO+J9Mky+OnjrVv+6Av8NaikA5jWsNIUnYkX33v6YlKXIJsxp6isi5Jn06VVpdK6AQQLdb9N8crL5AApfaN9JhdQAreGhUU8Pzaq9BLrM0ity5EIG1xazi426e+AfM4K+Bpar5W+WxMCJyPFcbeIdh6jFomerOZ5hleZxvRoQqs0iWZjc7ggeEXO2G7w0FNVvQz1BzZ4hr5ESFF6bXc/V5Ww/DlyX9fUz0wRd1BaLh6mz7KzmdcZaqSKFdIQAc0qSNOnqdRt+Hrh5lfwyz3Nad3ossSSKGQwghiouvUAel7fbDpKmleCppqyOmVA6vy73UXNyDa3fBbPxbmEKwDKo3NO0QYBQoCnfbc4audgeI/6nsaIRvuDuQU1VPkeWZipesiWNedGn/1G+/QdMNs3zuRJKw5sYnqlVZVppfpUntfvfsMZjMcbEoNj/8AR/rNbSmNqfid86yaaCMPl8rqYoafT4bjrYeWGfDeXRnNYKWrV4wq8xWWU6mdbHceXXGYzBvSF1UV3/eK3QMIL1UuY0tZmJUxvVvbSdJTey/liTg+ezQV8TvDmlXEB8pDsoS17WY9T/tjMZiTEeQAPz/WGPqYA/vUZT8JZXDlYzriynkNaCqwrHMwMjG+zdh1wnxZmElFkyVGWTLQwpGIwIRcB7bX8xfGYzFD5GV8fya/WMYAKTBFcozSvgFdPJCalHASRTcTNYEhl9cOHkpkYGqy+qyyVZeZzob6S+5swtYX3xmMx1Vclqia4NqHeTimf5aj4lyDLqo16FmrpNTlR0Sw6AgdcDX7oqqvjGVaChSuy6BwiQyr/DAA6b9jjMZiA5W9Pl7xjfWgv7zfijhlaXLubDSRR17FkanhkusMZJNlJ/vjzgjhBjPFNl9WaymXS0qVAs6eXuL4zGYNcznAT8xNU0nuKuDnqqiPMc+zKmENECsMaMRoDdBftYfrgAzemgeHTkb1FdBTyjm06OCCtt2tYXH54zGY6XjjkpuVAAEmN6iup0y6GVowk1LZZFJIV1Gwtf07YmMgy6aeCqhgpngiiKVABa/MLHcX8wMZjMS+UePjMR7f5ElstowuzrKqWjqZq9YJ5GRkVVEhsZmILMQfww4g4eo63N+QJUpap5A2keLl2BJYKereuMxmJcBLGyZ0sONWAJ+3+YU51wcnEmVUlbn0vPkaFYKZYxoLqgIG3bpitM0ymnyujWnpagQaWK1aOba7HZScZjMMGRzkIvowcwBe4NVL0op4aCCSSIJL8wUiFgOguL+mFpqahyzPMxiy+inzCVYw8U5ewt2A9d8ZjMdGzo/EkChluMJ69oqmKeeqenrBIJUSEm6ODYW9cS2dZpnfEkbpV0Eysw0tPpAYkdx733xmMxev/wAxBCgxrnXC+T5XFllJPG8VdJTlJDGngJI/m62wCVPCkMM0EAkkid59KaASBrPU3xmMwjA7EA32P8zc+NQdSczz5HJ8zqMtSLRBAgHq3QE/554lcoypaeLLXzNZGpJQJIJta2sbkAAjqdtsZjMLZ2UKfv8A4iF25uK09WtJVVJy6okp6oOCzvclr9w98L0WcTVeZQTZvUc1I1IZFjuJOw1eZxmMxh+oG44MSKhZBnFVmdQrSCOFmAQoq6AI1H1Eeew3w/TPaSmq0aprJ68Q2VRGdIsOl/Q4zGYkPj4yLjj/ACXGPEnEnMeesmkkFRUOrTKF8N/5bdyMC1Jn9PT1RhqYFkmLMTpYgE2uLnzxmMw1MCVJ8h3ca0WWVudZhM9yqz7qWSyrvsD5YfVuWvFVzIKtp9LWJjnsAfLGYzE4as5HxPJjVhZn/9k=",
"text/plain": [
- ""
+ "'\\n\\nid: `chatcmpl-xxx`\\n- model: `claude-sonnet-4-20250514`\\n- finish_reason: `stop`\\n- usage: `Usage(completion_tokens=34, prompt_tokens=10, total_tokens=44, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\\n\\n '"
]
},
"execution_count": null,
- "metadata": {
- "image/jpeg": {
- "width": 200
- }
- },
+ "metadata": {},
"output_type": "execute_result"
}
],
"source": [
- "fn = Path('samples/puppy.jpg')\n",
- "Image(filename=fn, width=200)"
+ "cl.details"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "60bb0ee7",
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "{\n",
- " \"role\": \"user\",\n",
- " \"content\": [\n",
- " {\n",
- " \"type\": \"text\",\n",
- " \"text\": \"hey what in this image?\"\n",
- " },\n",
- " {\n",
- " \"type\": \"image_url\",\n",
- " \"image_url\": \"data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/4gxUSU...\n"
- ]
- }
- ],
- "source": [
- "msg = mk_msg(['hey what in this image?',fn.read_bytes()])\n",
- "print(json.dumps(msg,indent=1)[:200]+\"...\")"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "ba0bee2d",
+ "id": "ce79ea79",
"metadata": {},
+ "outputs": [],
"source": [
- "Which can be passed to litellm's `completion` function like this:"
+ "#| export\n",
+ "def _content(resp): \n",
+ " msgc = _msg(resp).content\n",
+ " return msgc[0]['text'] if isinstance(msgc,list) else msgc\n",
+ "\n",
+ "@patch(as_prop=True)\n",
+ "def content(self:litellm.ModelResponse): return _content(self)"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "6fc53ae1",
+ "id": "82e60e9e",
"metadata": {},
"outputs": [],
"source": [
- "model = ms[1]"
+ "@patch\n",
+ "def _repr_markdown_(self: litellm.ModelResponse):\n",
+ " content = self.content if self.content else str(self.msg) # If no content we will take the other Message info\n",
+ " return f'{content}\\n\\n{self.details}'"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "b9ebb192",
+ "id": "27b1c685",
"metadata": {},
"outputs": [
{
"data": {
"text/markdown": [
- "This image shows an adorable puppy with distinctive brown and white markings on its face. The puppy appears to be a small breed, possibly a Cavalier King Charles Spaniel or similar breed, with fluffy reddish-brown and white fur. The puppy is positioned near some purple flowers (which look like small daisies or asters) and is sitting on grass. The setting appears to be outdoors in a garden area, creating a sweet, natural portrait of this very cute young dog. The puppy has dark eyes and the classic innocent, gentle expression that makes puppies so endearing.\n",
+ "Hello! It's nice to meet you. How are you doing today? Is there anything I can help you with or would you like to chat about?\n",
"\n",
"\n",
"\n",
- "- id: `chatcmpl-xxx`\n",
+ "id: `chatcmpl-xxx`\n",
"- model: `claude-sonnet-4-20250514`\n",
"- finish_reason: `stop`\n",
- "- usage: `Usage(completion_tokens=131, prompt_tokens=104, total_tokens=235, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
+ "- usage: `Usage(completion_tokens=34, prompt_tokens=10, total_tokens=44, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
"\n",
" "
],
"text/plain": [
- "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content='This image shows an adorable puppy with distinctive brown and white markings on its face. The puppy appears to be a small breed, possibly a Cavalier King Charles Spaniel or similar breed, with fluffy reddish-brown and white fur. The puppy is positioned near some purple flowers (which look like small daisies or asters) and is sitting on grass. The setting appears to be outdoors in a garden area, creating a sweet, natural portrait of this very cute young dog. The puppy has dark eyes and the classic innocent, gentle expression that makes puppies so endearing.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}))], usage=Usage(completion_tokens=131, prompt_tokens=104, total_tokens=235, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0))"
+ "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content=\"Hello! It's nice to meet you. How are you doing today? Is there anything I can help you with or would you like to chat about?\", role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}))], usage=Usage(completion_tokens=34, prompt_tokens=10, total_tokens=44, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0))"
]
},
"execution_count": null,
@@ -586,63 +482,85 @@
}
],
"source": [
- "completion(model,[msg])"
+ "cl"
]
},
{
"cell_type": "markdown",
- "id": "90426580",
+ "id": "40369304",
"metadata": {},
"source": [
- "Now lets make it easy to provide entire conversations:"
+ "### Tool Calls"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "d222b4fb",
+ "metadata": {},
+ "source": [
+ "Some messages might have tool calls. Lets mock the whole tool flow so we understand the messages!\n",
+ "All we need to do is make a msg with role as assistant and provide an additional list of tool calls.\n",
+ "\n",
+ "Lets first make the tool call objects:"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "75fa16cc",
+ "id": "0e9734a1",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def simple_add(\n",
+ " a: int, # first operand\n",
+ " b: int=0 # second operand\n",
+ ") -> int:\n",
+ " \"Add two numbers together\"\n",
+ " return a + b"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "cd4712ac",
"metadata": {},
"outputs": [],
"source": [
"#| export\n",
- "def mk_msgs(msgs, # List of messages (each: str, bytes, list, or dict w 'role' and 'content' fields)\n",
- " cache=False, # Enable Anthropic caching\n",
- " ttl=None, # Cache TTL: '5m' (default) or '1h'\n",
- " cache_last_ckpt_only=True # Only cache the last message\n",
- " ):\n",
- " \"Create a list of LiteLLM compatible messages.\"\n",
- " if not msgs: return []\n",
- " if not isinstance(msgs, list): msgs = [msgs]\n",
- " res,role = [],'user'\n",
- " for m in msgs:\n",
- " res.append(msg:=mk_msg(m, role=role,cache=cache))\n",
- " role = 'assistant' if msg['role'] in ('user','function', 'tool') else 'user'\n",
- " if cache_last_ckpt_only: res = [_remove_cache_ckpts(m) for m in res]\n",
- " if res and cache: res[-1] = _add_cache_control(res[-1], cache=cache, ttl=ttl)\n",
- " return res"
+ "def random_tool_id():\n",
+ " \"Generate a random tool ID with 'toolu_' prefix\"\n",
+ " random_part = ''.join(random.choices(string.ascii_letters + string.digits, k=25))\n",
+ " return f'toolu_{random_part}'"
]
},
{
- "cell_type": "markdown",
- "id": "6b4624e2",
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "32deb93a",
"metadata": {},
+ "outputs": [],
"source": [
- "With `mk_msgs` you can easily provide a whole conversation:"
+ "#| export\n",
+ "def mk_tc(func, idx=1, **kwargs):\n",
+ " args = json.dumps(kwargs)\n",
+ " if callable(func): func = func.__name__\n",
+ " id = random_tool_id()\n",
+ " return {'index': idx, 'function': {'arguments': args, 'name': func}, 'id': id, 'type': 'function'}"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "263ecc52",
+ "id": "a94807c1",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
- "[{'role': 'user', 'content': 'Hey!'},\n",
- " {'role': 'assistant', 'content': 'Hi there!'},\n",
- " {'role': 'user', 'content': 'How are you?'},\n",
- " {'role': 'assistant', 'content': \"I'm doing fine and you?\"}]"
+ "{'index': 1,\n",
+ " 'function': {'arguments': '{\"a\": 5, \"b\": 7}', 'name': 'simple_add'},\n",
+ " 'id': 'toolu_FHtWbytX0gdOFCRODSrpCNAWc',\n",
+ " 'type': 'function'}"
]
},
"execution_count": null,
@@ -651,34 +569,47 @@
}
],
"source": [
- "msgs = mk_msgs(['Hey!',\"Hi there!\",\"How are you?\",\"I'm doing fine and you?\"])\n",
- "msgs"
+ "tc = mk_tc(simple_add, a=5, b=7)\n",
+ "tc"
]
},
{
"cell_type": "markdown",
- "id": "910677d4",
+ "id": "f7673693",
"metadata": {},
"source": [
- "Who's speaking at when is automatically inferred.\n",
- "Even when there are multiple tools being called in parallel (which LiteLLM supports!)."
+ "Now we can package this nicely into a Message"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "9cf8d1f2",
+ "id": "553e6825",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def mk_msg(content, role='user', tcs=None):\n",
+ " msg = {'content': content, 'role': 'assistant' if tcs else role}\n",
+ " if tcs: msg.update({'tool_calls': tcs, 'function_call': None}) # function_call is legacy and is given here explicitly to be safe\n",
+ " return msg"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "b4833887",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
- "[{'role': 'user', 'content': 'Tell me the weather in Paris and Rome'},\n",
- " {'role': 'assistant', 'content': 'Assistant calls weather tool two times'},\n",
- " {'role': 'tool', 'content': 'Weather in Paris is ...'},\n",
- " {'role': 'tool', 'content': 'Weather in Rome is ...'},\n",
- " {'role': 'assistant', 'content': 'Assistant returns weather'},\n",
- " {'role': 'user', 'content': 'Thanks!'}]"
+ "{'content': \"I'll use the simple_add tool to calculate 5 + 7 for you.\",\n",
+ " 'role': 'assistant',\n",
+ " 'tool_calls': [{'index': 1,\n",
+ " 'function': {'arguments': '{\"a\": 5, \"b\": 7}', 'name': 'simple_add'},\n",
+ " 'id': 'toolu_FHtWbytX0gdOFCRODSrpCNAWc',\n",
+ " 'type': 'function'}],\n",
+ " 'function_call': None}"
]
},
"execution_count": null,
@@ -687,44 +618,48 @@
}
],
"source": [
- "msgs = mk_msgs(['Tell me the weather in Paris and Rome',\n",
- " 'Assistant calls weather tool two times',\n",
- " {'role':'tool','content':'Weather in Paris is ...'},\n",
- " {'role':'tool','content':'Weather in Rome is ...'},\n",
- " 'Assistant returns weather',\n",
- " 'Thanks!'])\n",
- "msgs"
+ "tc_cts = \"I'll use the simple_add tool to calculate 5 + 7 for you.\"\n",
+ "tc_msg = mk_msg(tc_cts, tcs=[tc])\n",
+ "tc_msg"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "d6f3bf25",
+ "id": "944d9e64",
"metadata": {},
"outputs": [],
"source": [
- "#| hide\n",
- "test_eq([m['role'] for m in msgs],['user','assistant','tool','tool','assistant','user'])"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "475862e9",
- "metadata": {},
- "source": [
- "For ease of use, if `msgs` is not already in a `list`, it will automatically be wrapped inside one. This way you can pass a single prompt into `mk_msgs` and get back a LiteLLM compatible msg history."
+ "#| export\n",
+ "def _wrap_choice(msg, finish_reason='stop', index=0):\n",
+ " return {'finish_reason': finish_reason, 'index': index, 'message': msg}\n",
+ "\n",
+ "def mk_msg_resp(content, role='assistant', tcs=None, model='test-model'):\n",
+ " choice = _wrap_choice(mk_msg(content, role, tcs), 'tool_calls' if tcs else 'stop')\n",
+ " return {'id': 'chatcmpl-xxx', 'created': 1000000000, 'model': model, 'choices': [choice]}"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "aa4e7663",
+ "id": "93681363",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
- "[{'role': 'user', 'content': 'Hey'}]"
+ "{'id': 'chatcmpl-xxx',\n",
+ " 'created': 1000000000,\n",
+ " 'model': 'test-model',\n",
+ " 'choices': [{'finish_reason': 'tool_calls',\n",
+ " 'index': 0,\n",
+ " 'message': {'content': \"I'll use the simple_add tool to calculate 5 + 7 for you.\",\n",
+ " 'role': 'assistant',\n",
+ " 'tool_calls': [{'index': 1,\n",
+ " 'function': {'arguments': '{\"a\": 5, \"b\": 7}', 'name': 'simple_add'},\n",
+ " 'id': 'toolu_FHtWbytX0gdOFCRODSrpCNAWc',\n",
+ " 'type': 'function'}],\n",
+ " 'function_call': None}}]}"
]
},
"execution_count": null,
@@ -733,20 +668,50 @@
}
],
"source": [
- "msgs = mk_msgs(\"Hey\")\n",
- "msgs"
+ "tc_resp = mk_msg_resp(tc_cts, tcs=[tc])\n",
+ "tc_resp"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "f46a7d4f",
+ "metadata": {},
+ "source": [
+ "This isn't enough we need to provide the tool call result which will just be a simple dictionary and will be provided to the completion model in the list just following the tool call"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "bea8c2b0",
+ "id": "8ba4872f",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#| export\n",
+ "def mk_tc_result(tc, result): return {'tool_call_id': tc['id'], 'role': 'tool', 'name': tc['function']['name'], 'content': result}"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "8430fb42",
+ "metadata": {},
+ "source": [
+ "Note we might have more than one tool call if more than one was passed in, here we just will make one result."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "338d2833",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
- "[{'role': 'tool', 'content': 'fake tool result'}]"
+ "{'index': 1,\n",
+ " 'function': {'arguments': '{\"a\": 5, \"b\": 7}', 'name': 'simple_add'},\n",
+ " 'id': 'toolu_FHtWbytX0gdOFCRODSrpCNAWc',\n",
+ " 'type': 'function'}"
]
},
"execution_count": null,
@@ -755,24 +720,22 @@
}
],
"source": [
- "#| hide\n",
- "msgs = mk_msgs({'role':'tool','content':'fake tool result'})\n",
- "msgs"
+ "_msg(tc_resp)['tool_calls'][0]"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "2c828056",
+ "id": "4ebcbac7",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
- "[{'role': 'user', 'content': 'Hey!'},\n",
- " {'role': 'assistant', 'content': 'Hi there!'},\n",
- " {'role': 'user', 'content': 'How are you?'},\n",
- " {'role': 'assistant', 'content': \"I'm fine, you?\"}]"
+ "{'tool_call_id': 'toolu_FHtWbytX0gdOFCRODSrpCNAWc',\n",
+ " 'role': 'tool',\n",
+ " 'name': 'simple_add',\n",
+ " 'content': '12'}"
]
},
"execution_count": null,
@@ -781,142 +744,208 @@
}
],
"source": [
- "msgs = mk_msgs(['Hey!',\"Hi there!\",\"How are you?\",\"I'm fine, you?\"])\n",
- "msgs"
+ "tcr = mk_tc_result(_msg(tc_resp)['tool_calls'][0], '12')\n",
+ "tcr"
]
},
{
"cell_type": "markdown",
- "id": "284fbfc3",
+ "id": "c45ed893",
"metadata": {},
"source": [
- "However, beware that if you use `mk_msgs` for a single message, consisting of multiple parts.\n",
- "Then you should be explicit, and make sure to wrap those multiple messages in two lists:\n",
- "\n",
- "1. One list to show that they belong together in one message (the inner list).\n",
- "2. Another, because mk_msgs expects a list of multiple messages (the outer list).\n",
- "\n",
- "This is common when working with images for example:"
+ "Looks good so far! Now we will want to provide the actual result!"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "d315d964",
+ "id": "7d877827",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#| export\n",
+ "def mk_tc_results(tc_resp, results): return [mk_tc_result(a,b) for a,b in zip(_msg(tc_resp)['tool_calls'], results)]"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "f61d908d",
+ "metadata": {},
+ "source": [
+ "Same for here tcq.tool_calls will match the number of results passed in the results list."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "3fe8c7bb",
"metadata": {},
"outputs": [
{
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "[\n",
- " {\n",
- " \"role\": \"user\",\n",
- " \"content\": [\n",
- " {\n",
- " \"type\": \"text\",\n",
- " \"text\": \"Whats in this img?\"\n",
- " },\n",
- " {\n",
- " \"type\": \"image_url\",\n",
- " \"image_url\": \"data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD...\n"
- ]
+ "data": {
+ "text/plain": [
+ "[{'tool_call_id': 'toolu_FHtWbytX0gdOFCRODSrpCNAWc',\n",
+ " 'role': 'tool',\n",
+ " 'name': 'simple_add',\n",
+ " 'content': '12'}]"
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
}
],
"source": [
- "msgs = mk_msgs([['Whats in this img?',fn.read_bytes()]])\n",
- "print(json.dumps(msgs,indent=1)[:200]+\"...\")"
+ "tcr = mk_tc_results(tc_resp, ['12'])\n",
+ "tcr"
]
},
{
- "cell_type": "markdown",
- "id": "bad470e4",
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "8cc70607",
"metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "{'content': 'Please calculate 5+7. Please use tool calls to calculate. Please explain your result', 'role': 'user'}\n",
+ "\n",
+ "{'content': 'I will calculate 5 + 7 for you', 'role': 'assistant', 'tool_calls': [{'index': 1, 'function': {'arguments': '{\"a\": 5, \"b\": 7}', 'name': 'simple_add'}, 'id': 'toolu_NU4I5vol0UfZTDCWZIGxsWiG1', 'type': 'function'}], 'function_call': None}\n",
+ "\n",
+ "{'tool_call_id': 'toolu_NU4I5vol0UfZTDCWZIGxsWiG1', 'role': 'tool', 'name': 'simple_add', 'content': '13'}\n",
+ "\n"
+ ]
+ }
+ ],
"source": [
- "## Streaming"
+ "m = mk_msg('Please calculate 5+7. Please use tool calls to calculate. Please explain your result')\n",
+ "tc_resp = mk_msg_resp('I will calculate 5 + 7 for you', tcs=[mk_tc(simple_add, a=5, b=7)])\n",
+ "tcr = mk_tc_results(tc_resp, ['13']) # Lets give it a wrong answer to ensure our synthetic messages are working\n",
+ "inpt = [m, _msg(tc_resp), *tcr]\n",
+ "for i in inpt: print(i, end='\\n\\n')"
]
},
{
"cell_type": "markdown",
- "id": "eec3dc5d",
+ "id": "2a83daa3",
"metadata": {},
"source": [
- "LiteLLM supports streaming responses. That's really useful if you want to show intermediate results, instead of having to wait until the whole response is finished.\n",
- "\n",
- "We create this helper function that returns the entire response at the end of the stream. This is useful when you want to store the whole response somewhere after having displayed the intermediate results."
+ "Even though we don't expect completions to call simple_add given it already has a response, we still need to pass the function to\n",
+ "completions. In order to do this we have to serialize our `simple_add` function to a dictionary that is parsable by litellm.\n",
+ "This is made possible by toolsllm's [get_schema](https://github.com/AnswerDotAI/toolslm/blob/main/toolslm/funccall.py#L118) function."
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "9ad6fc2c",
+ "id": "57389377",
"metadata": {},
"outputs": [],
"source": [
"#| export\n",
- "def stream_with_complete(gen, postproc=noop):\n",
- " \"Extend streaming response chunks with the complete response\"\n",
- " chunks = []\n",
- " for chunk in gen:\n",
- " chunks.append(chunk)\n",
- " yield chunk\n",
- " postproc(chunks)\n",
- " return stream_chunk_builder(chunks)"
+ "def lite_mk_func(f): return f if isinstance(f, dict) else {'type':'function', 'function':get_schema(f, pname='parameters')}"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "46f16571",
+ "id": "d4e10f7d",
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "{'type': 'function',\n",
+ " 'function': {'name': 'simple_add',\n",
+ " 'description': 'Add two numbers together\\n\\nReturns:\\n- type: integer',\n",
+ " 'parameters': {'type': 'object',\n",
+ " 'properties': {'a': {'type': 'integer', 'description': 'first operand'},\n",
+ " 'b': {'type': 'integer', 'description': 'second operand', 'default': 0}},\n",
+ " 'required': ['a']}}}"
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
"source": [
- "r = completion(model, mk_msgs(\"Hey!\"), stream=True)\n",
- "r2 = SaveReturn(stream_with_complete(r))"
+ "lite_mk_func(simple_add)"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "9797136b",
+ "id": "372456fc",
"metadata": {},
"outputs": [
{
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Hello! How are you doing today? Is there anything I can help you with?"
- ]
+ "data": {
+ "text/markdown": [
+ "The calculation is complete! Using the simple_add function, I added 5 + 7 and got the result of **13**.\n",
+ "\n",
+ "**Explanation of the result:**\n",
+ "- The function took the first number (a = 5) and the second number (b = 7)\n",
+ "- It performed the addition operation: 5 + 7\n",
+ "- The mathematical result is 13, which is the correct sum of these two integers\n",
+ "\n",
+ "This is a straightforward arithmetic operation where we're combining two positive integers to get their total value.\n",
+ "\n",
+ "\n",
+ "\n",
+ "id: `chatcmpl-xxx`\n",
+ "- model: `claude-sonnet-4-20250514`\n",
+ "- finish_reason: `stop`\n",
+ "- usage: `Usage(completion_tokens=117, prompt_tokens=531, total_tokens=648, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
+ "\n",
+ " "
+ ],
+ "text/plain": [
+ "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content=\"The calculation is complete! Using the simple_add function, I added 5 + 7 and got the result of **13**.\\n\\n**Explanation of the result:**\\n- The function took the first number (a = 5) and the second number (b = 7)\\n- It performed the addition operation: 5 + 7\\n- The mathematical result is 13, which is the correct sum of these two integers\\n\\nThis is a straightforward arithmetic operation where we're combining two positive integers to get their total value.\", role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}))], usage=Usage(completion_tokens=117, prompt_tokens=531, total_tokens=648, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0))"
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
}
],
"source": [
- "for o in r2:\n",
- " cts = o.choices[0].delta.content\n",
- " if cts: print(cts, end='')"
+ "res = completion(model, inpt, tools=[lite_mk_func(simple_add)])\n",
+ "res"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "dc30d165",
+ "metadata": {},
+ "source": [
+ "Perfect. Now lets try calling completion on only the first message input."
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "897ec073",
+ "id": "f17053b7",
"metadata": {},
"outputs": [
{
"data": {
"text/markdown": [
- "Hello! How are you doing today? Is there anything I can help you with?\n",
+ "I'll calculate 5 + 7 for you using the addition function.\n",
"\n",
"\n",
"\n",
- "- id: `chatcmpl-xxx`\n",
+ "id: `chatcmpl-xxx`\n",
"- model: `claude-sonnet-4-20250514`\n",
- "- finish_reason: `stop`\n",
- "- usage: `Usage(completion_tokens=20, prompt_tokens=9, total_tokens=29, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=None, audio_tokens=None, reasoning_tokens=0, rejected_prediction_tokens=None, text_tokens=None), prompt_tokens_details=None)`\n",
+ "- finish_reason: `tool_calls`\n",
+ "- usage: `Usage(completion_tokens=88, prompt_tokens=425, total_tokens=513, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
"\n",
" "
],
"text/plain": [
- "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content='Hello! How are you doing today? Is there anything I can help you with?', role='assistant', tool_calls=None, function_call=None, provider_specific_fields=None))], usage=Usage(completion_tokens=20, prompt_tokens=9, total_tokens=29, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=None, audio_tokens=None, reasoning_tokens=0, rejected_prediction_tokens=None, text_tokens=None), prompt_tokens_details=None))"
+ "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=\"I'll calculate 5 + 7 for you using the addition function.\", role='assistant', tool_calls=[ChatCompletionMessageToolCall(index=1, function=Function(arguments='{\"a\": 5, \"b\": 7}', name='simple_add'), id='toolu_01Bu4558oDBhKjcXuheRw57M', type='function')], function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}))], usage=Usage(completion_tokens=88, prompt_tokens=425, total_tokens=513, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0))"
]
},
"execution_count": null,
@@ -925,61 +954,71 @@
}
],
"source": [
- "r2.value"
+ "res = completion(model, [mk_msg('Please calculate 5 + 7')], tools=[lite_mk_func(simple_add)])\n",
+ "res"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "b62f60f1",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=\"I'll calculate 5 + 7 for you using the addition function.\", role='assistant', tool_calls=[ChatCompletionMessageToolCall(index=1, function=Function(arguments='{\"a\": 5, \"b\": 7}', name='simple_add'), id='toolu_01Bu4558oDBhKjcXuheRw57M', type='function')], function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}))], usage=Usage(completion_tokens=88, prompt_tokens=425, total_tokens=513, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0))\n"
+ ]
+ }
+ ],
+ "source": [
+ "print(res)"
]
},
{
"cell_type": "markdown",
- "id": "bd8e11c0",
+ "id": "de806794",
"metadata": {},
"source": [
- "## Tools"
+ "### Add tools repr"
]
},
{
- "cell_type": "code",
- "execution_count": null,
- "id": "4301402e",
+ "cell_type": "markdown",
+ "id": "e52dcf5f",
"metadata": {},
- "outputs": [],
"source": [
- "#| export\n",
- "def lite_mk_func(f):\n",
- " if isinstance(f, dict): return f\n",
- " return {'type':'function', 'function':get_schema(f, pname='parameters')}"
+ "It would be nice to see what tool calls we used in the tool calls in the output as well."
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "7b103600",
+ "id": "8bea4e50",
"metadata": {},
"outputs": [],
"source": [
- "def simple_add(\n",
- " a: int, # first operand\n",
- " b: int=0 # second operand\n",
- ") -> int:\n",
- " \"Add two numbers together\"\n",
- " return a + b"
+ "#| export\n",
+ "def _show_tools(resp):\n",
+ " tool_calls = _msg(resp)['tool_calls'] if _msg(resp) else None\n",
+ " if not tool_calls: return ''\n",
+ " return \"\\n\".join([f\"\\n\\n🔧 {tc['function']['name']}({tc['function']['arguments']})\\n\" for tc in tool_calls])\n",
+ "\n",
+ "@patch(as_prop=True)\n",
+ "def show_tools(self:litellm.ModelResponse): return _show_tools(self)"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "100fc27b",
+ "id": "f9c1beda",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
- "{'type': 'function',\n",
- " 'function': {'name': 'simple_add',\n",
- " 'description': 'Add two numbers together\\n\\nReturns:\\n- type: integer',\n",
- " 'parameters': {'type': 'object',\n",
- " 'properties': {'a': {'type': 'integer', 'description': 'first operand'},\n",
- " 'b': {'type': 'integer', 'description': 'second operand', 'default': 0}},\n",
- " 'required': ['a']}}}"
+ "'\\n\\n🔧 simple_add({\"a\": 5, \"b\": 7})\\n'"
]
},
"execution_count": null,
@@ -988,183 +1027,345 @@
}
],
"source": [
- "toolsc = lite_mk_func(simple_add)\n",
- "toolsc"
+ "res.show_tools"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "8c2af29a",
+ "id": "3097f0ea",
"metadata": {},
"outputs": [],
"source": [
- "tmsg = mk_msg(\"What is 5478954793+547982745? How about 5479749754+9875438979? Always use tools for calculations, and describe what you'll do before using a tool. Where multiple tool calls are required, do them in a single response where possible.\")\n",
- "r = completion(model, [tmsg], tools=[toolsc])"
+ "#| export\n",
+ "@patch\n",
+ "def _repr_markdown_(self: litellm.ModelResponse):\n",
+ " content = self.content if self.content else str(self.msg) # If no content we will take the complete Message info\n",
+ " return f'{content+self.show_tools}\\n\\n{self.details}'"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "8bdb1817",
+ "id": "f5366aee",
"metadata": {},
"outputs": [
{
"data": {
"text/markdown": [
- "I'll help you calculate both of those addition problems using the simple_add tool. Let me perform both calculations for you:\n",
- "\n",
- "🔧 simple_add({\"a\": 5478954793, \"b\": 547982745})\n",
- "\n",
- "\n",
+ "I'll calculate 5 + 7 for you using the addition function.\n",
"\n",
- "🔧 simple_add({\"a\": 5479749754, \"b\": 9875438979})\n",
+ "🔧 simple_add({\"a\": 5, \"b\": 7})\n",
"\n",
"\n",
"\n",
"\n",
- "- id: `chatcmpl-xxx`\n",
+ "id: `chatcmpl-xxx`\n",
"- model: `claude-sonnet-4-20250514`\n",
"- finish_reason: `tool_calls`\n",
- "- usage: `Usage(completion_tokens=160, prompt_tokens=475, total_tokens=635, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
+ "- usage: `Usage(completion_tokens=88, prompt_tokens=425, total_tokens=513, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
"\n",
" "
],
"text/plain": [
- "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=\"I'll help you calculate both of those addition problems using the simple_add tool. Let me perform both calculations for you:\", role='assistant', tool_calls=[ChatCompletionMessageToolCall(index=1, function=Function(arguments='{\"a\": 5478954793, \"b\": 547982745}', name='simple_add'), id='toolu_013MZMqJL4fBRGTsMtAGJjMk', type='function'), ChatCompletionMessageToolCall(index=2, function=Function(arguments='{\"a\": 5479749754, \"b\": 9875438979}', name='simple_add'), id='toolu_01HkbM4zwAb38n4rH7SNvi75', type='function')], function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}))], usage=Usage(completion_tokens=160, prompt_tokens=475, total_tokens=635, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0))"
+ "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=\"I'll calculate 5 + 7 for you using the addition function.\", role='assistant', tool_calls=[ChatCompletionMessageToolCall(index=1, function=Function(arguments='{\"a\": 5, \"b\": 7}', name='simple_add'), id='toolu_01Bu4558oDBhKjcXuheRw57M', type='function')], function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}))], usage=Usage(completion_tokens=88, prompt_tokens=425, total_tokens=513, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0))"
]
},
+ "execution_count": null,
"metadata": {},
- "output_type": "display_data"
+ "output_type": "execute_result"
}
],
"source": [
- "display(r)"
+ "res"
]
},
{
- "cell_type": "code",
- "execution_count": null,
- "id": "c4d81d05",
+ "cell_type": "markdown",
+ "id": "aecbdd30",
"metadata": {},
- "outputs": [],
"source": [
- "#| export\n",
- "def _lite_call_func(tc,ns,raise_on_err=True):\n",
- " try: fargs = json.loads(tc.function.arguments)\n",
- " except Exception as e: raise ValueError(f\"Failed to parse function arguments: {tc.function.arguments}\") from e\n",
- " res = call_func(tc.function.name, fargs,ns=ns)\n",
- " return {\"tool_call_id\": tc.id, \"role\": \"tool\", \"name\": tc.function.name, \"content\": str(res)}"
+ "## Running different models"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "972300a0",
+ "metadata": {},
+ "source": [
+ "Lets try this out on a few different models!"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "d5af607c",
+ "id": "920d0288",
"metadata": {},
"outputs": [
{
"data": {
+ "text/markdown": [
+ "**gemini/gemini-2.5-flash:**"
+ ],
"text/plain": [
- "[{'tool_call_id': 'toolu_013MZMqJL4fBRGTsMtAGJjMk',\n",
- " 'role': 'tool',\n",
- " 'name': 'simple_add',\n",
- " 'content': '6026937538'},\n",
- " {'tool_call_id': 'toolu_01HkbM4zwAb38n4rH7SNvi75',\n",
- " 'role': 'tool',\n",
- " 'name': 'simple_add',\n",
- " 'content': '15355188733'}]"
+ ""
]
},
- "execution_count": null,
"metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "tcs = [_lite_call_func(o, ns=globals()) for o in r.choices[0].message.tool_calls]\n",
- "tcs"
- ]
- },
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "text/markdown": [
+ "Hey there yourself! How can I help you today?\n",
+ "\n",
+ "\n",
+ "\n",
+ "id: `chatcmpl-xxx`\n",
+ "- model: `gemini-2.5-flash`\n",
+ "- finish_reason: `stop`\n",
+ "- usage: `Usage(completion_tokens=113, prompt_tokens=4, total_tokens=117, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=None, audio_tokens=None, reasoning_tokens=102, rejected_prediction_tokens=None, text_tokens=11), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=None, text_tokens=4, image_tokens=None))`\n",
+ "\n",
+ " "
+ ],
+ "text/plain": [
+ "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='gemini-2.5-flash', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content='Hey there yourself! How can I help you today?', role='assistant', tool_calls=None, function_call=None, provider_specific_fields=None))], usage=Usage(completion_tokens=113, prompt_tokens=4, total_tokens=117, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=None, audio_tokens=None, reasoning_tokens=102, rejected_prediction_tokens=None, text_tokens=11), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=None, text_tokens=4, image_tokens=None)), vertex_ai_grounding_metadata=[], vertex_ai_url_context_metadata=[], vertex_ai_safety_results=[], vertex_ai_citation_metadata=[])"
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "text/markdown": [
+ "**claude-sonnet-4-20250514:**"
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "text/markdown": [
+ "Hello! Nice to meet you! How are you doing today?\n",
+ "\n",
+ "\n",
+ "\n",
+ "id: `chatcmpl-xxx`\n",
+ "- model: `claude-sonnet-4-20250514`\n",
+ "- finish_reason: `stop`\n",
+ "- usage: `Usage(completion_tokens=16, prompt_tokens=10, total_tokens=26, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
+ "\n",
+ " "
+ ],
+ "text/plain": [
+ "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content='Hello! Nice to meet you! How are you doing today?', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}))], usage=Usage(completion_tokens=16, prompt_tokens=10, total_tokens=26, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0))"
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "text/markdown": [
+ "**openai/gpt-4.1:**"
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "text/markdown": [
+ "Hello! How can I help you today? 😊\n",
+ "\n",
+ "\n",
+ "\n",
+ "id: `chatcmpl-xxx`\n",
+ "- model: `gpt-4.1-2025-04-14`\n",
+ "- finish_reason: `stop`\n",
+ "- usage: `Usage(completion_tokens=10, prompt_tokens=10, total_tokens=20, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None))`\n",
+ "\n",
+ " "
+ ],
+ "text/plain": [
+ "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='gpt-4.1-2025-04-14', object='chat.completion', system_fingerprint='fp_daf5fcc80a', choices=[Choices(finish_reason='stop', index=0, message=Message(content='Hello! How can I help you today? 😊', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=10, prompt_tokens=10, total_tokens=20, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')"
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "msg = [{'role':'user','content':'Hey there!', 'cache_control': {'type': 'ephemeral'}}]\n",
+ "for m in ms:\n",
+ " display(Markdown(f'**{m}:**'))\n",
+ " display(completion(m,msg))"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "715fff5a",
+ "metadata": {},
+ "source": [
+ "## Messages formatting"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "5671bcb0",
+ "metadata": {},
+ "source": [
+ "Let's start with making it easier to pass messages into litellm's `completion` function (including images)."
+ ]
+ },
{
"cell_type": "code",
"execution_count": null,
- "id": "3dd9871b",
+ "id": "f46ee6b1",
"metadata": {},
"outputs": [],
"source": [
- "def delta_text(msg):\n",
- " \"Extract printable content from streaming delta, return None if nothing to print\"\n",
- " c = msg.choices[0]\n",
- " if not c: return c\n",
- " if not hasattr(c,'delta'): return None #f'{c}'\n",
- " delta = c.delta\n",
- " if delta.content: return delta.content\n",
- " if delta.tool_calls:\n",
- " res = ''.join(f\"🔧 {tc.function.name}\" for tc in delta.tool_calls if tc.id and tc.function.name)\n",
- " if res: return f'\\n{res}\\n'\n",
- " if hasattr(delta,'reasoning_content'): return '🧠' if delta.reasoning_content else '\\n\\n'\n",
- " return None"
+ "#| export\n",
+ "def _mk_img(data:bytes)->tuple:\n",
+ " \"Convert image bytes to a base64 encoded image\"\n",
+ " img = base64.b64encode(data).decode(\"utf-8\")\n",
+ " mtype = mimetypes.types_map[\".\"+imghdr.what(None, h=data)]\n",
+ " return img, mtype"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "e4790a51",
+ "id": "ecb67a0e",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#| export\n",
+ "def _is_img(data): \n",
+ " return isinstance(data, bytes) and bool(imghdr.what(None, data))\n",
+ "\n",
+ "def _add_cache_control(msg, # LiteLLM formatted msg\n",
+ " cache=False, # Enable Anthropic caching\n",
+ " ttl=None): # Cache TTL: '5m' (default) or '1h'\n",
+ " \"cache `msg` with default time-to-live (ttl) of 5minutes ('5m'), but can be set to '1h'.\"\n",
+ " if not cache: return msg\n",
+ " if isinstance(msg[\"content\"], str): \n",
+ " msg[\"content\"] = [{\"type\": \"text\", \"text\": msg[\"content\"]}]\n",
+ " cache_control = {\"type\": \"ephemeral\"}\n",
+ " if ttl is not None: cache_control[\"ttl\"] = ttl\n",
+ " if isinstance(msg[\"content\"], list) and msg[\"content\"]:\n",
+ " msg[\"content\"][-1][\"cache_control\"] = cache_control\n",
+ " return msg\n",
+ "\n",
+ "def _remove_cache_ckpts(msg):\n",
+ " \"remove unnecessary cache checkpoints.\"\n",
+ " if isinstance(msg[\"content\"], list) and msg[\"content\"]:\n",
+ " msg[\"content\"][-1].pop('cache_control', None)\n",
+ " return msg\n",
+ "\n",
+ "def _mk_content(o):\n",
+ " if isinstance(o, str): return {'type':'text','text':o.strip() or '.'}\n",
+ " if _is_img(o): \n",
+ " img, mtype = _mk_img(o)\n",
+ " return {\"type\": \"image_url\", \"image_url\": f\"data:{mtype};base64,{img}\"}\n",
+ " return o\n",
+ "\n",
+ "def mk_msg(content, # Content: str, bytes (image), list of mixed content, or dict w 'role' and 'content' fields\n",
+ " role=\"user\", # Message role if content isn't already a dict/Message\n",
+ " cache=False, # Enable Anthropic caching\n",
+ " ttl=None): # Cache TTL: '5m' (default) or '1h'\n",
+ " \"Create a LiteLLM compatible message.\"\n",
+ " if isinstance(content, dict) or isinstance(content, Message): return content\n",
+ " if isinstance(content, list) and len(content) == 1 and isinstance(content[0], str): c = content[0]\n",
+ " elif isinstance(content, list): c = [_mk_content(o) for o in content]\n",
+ " else: c = content\n",
+ " return _add_cache_control({\"role\": role, \"content\": c}, cache=cache, ttl=ttl)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "da39f96f",
+ "metadata": {},
+ "source": [
+ "Now we can use mk_msg to create different types of messages:"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "85199113",
+ "metadata": {},
+ "source": [
+ "Simple text:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "f6217f4b",
"metadata": {},
"outputs": [
{
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "I'll help you calculate both of those sums using the addition tool. Let me perform both calculations for you:\n",
- "\n",
- "1. First, I'll calculate 5478954793 + 547982745\n",
- "2. Then, I'll calculate 5479749754 + 9875438979\n",
- "🔧 simple_add\n",
- "\n",
- "🔧 simple_add\n"
- ]
+ "data": {
+ "text/plain": [
+ "{'role': 'user', 'content': 'hey'}"
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
}
],
"source": [
- "r = completion(messages=[tmsg], model=model, stream=True, tools=[toolsc])\n",
- "r2 = SaveReturn(stream_with_complete(r))\n",
- "for o in r2: print(delta_text(o) or '', end='')"
+ "msg = mk_msg(\"hey\")\n",
+ "msg"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "552e1298",
+ "metadata": {},
+ "source": [
+ "Lists w just one string element are flattened for conciseness:"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "5b21118c",
+ "id": "c1a0955a",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "test_eq(mk_msg(\"hey\"), mk_msg([\"hey\"]))"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "60a3230e",
+ "metadata": {},
+ "source": [
+ "With Anthropic caching:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "1d4ef534",
"metadata": {},
"outputs": [
{
"data": {
- "text/markdown": [
- "I'll help you calculate both of those sums using the addition tool. Let me perform both calculations for you:\n",
- "\n",
- "1. First, I'll calculate 5478954793 + 547982745\n",
- "2. Then, I'll calculate 5479749754 + 9875438979\n",
- "\n",
- "🔧 simple_add({\"a\": 5478954793, \"b\": 547982745})\n",
- "\n",
- "\n",
- "\n",
- "🔧 simple_add({\"a\": 5479749754, \"b\": 9875438979})\n",
- "\n",
- "\n",
- "\n",
- "\n",
- "- id: `chatcmpl-xxx`\n",
- "- model: `claude-sonnet-4-20250514`\n",
- "- finish_reason: `tool_calls`\n",
- "- usage: `Usage(completion_tokens=197, prompt_tokens=475, total_tokens=672, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=None, audio_tokens=None, reasoning_tokens=0, rejected_prediction_tokens=None, text_tokens=None), prompt_tokens_details=None)`\n",
- "\n",
- " "
- ],
"text/plain": [
- "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=\"I'll help you calculate both of those sums using the addition tool. Let me perform both calculations for you:\\n\\n1. First, I'll calculate 5478954793 + 547982745\\n2. Then, I'll calculate 5479749754 + 9875438979\", role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{\"a\": 5478954793, \"b\": 547982745}', name='simple_add'), id='toolu_018E853fnQvQRskwHNSsPs8w', type='function'), ChatCompletionMessageToolCall(function=Function(arguments='{\"a\": 5479749754, \"b\": 9875438979}', name='simple_add'), id='toolu_01W3NFMfY5mSDdDVtgicRe4Q', type='function')], function_call=None, provider_specific_fields=None))], usage=Usage(completion_tokens=197, prompt_tokens=475, total_tokens=672, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=None, audio_tokens=None, reasoning_tokens=0, rejected_prediction_tokens=None, text_tokens=None), prompt_tokens_details=None))"
+ "{'role': 'user',\n",
+ " 'content': [{'type': 'text',\n",
+ " 'text': \"hey I'm Rens. Please repeat it in all caps w a fun greeting\",\n",
+ " 'cache_control': {'type': 'ephemeral'}}]}"
]
},
"execution_count": null,
@@ -1173,92 +1374,121 @@
}
],
"source": [
- "r2.value"
+ "msg = mk_msg(\"hey I'm Rens. Please repeat it in all caps w a fun greeting\",cache=True)\n",
+ "msg"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "001e399e",
+ "metadata": {},
+ "source": [
+ "(LiteLLM ignores these fields when sent to other providers)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "fcb3164e",
+ "metadata": {},
+ "source": [
+ "Text and images:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "2479f2d8",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "image/jpeg": "/9j/4AAQSkZJRgABAQAAAQABAAD/4gxUSUNDX1BST0ZJTEUAAQEAAAxEVUNDTQJAAABtbnRyUkdCIFhZWiAH0wAEAAQAAAAAAABhY3NwTVNGVAAAAABDQU5PWjAwOQAAAAAAAAAAAAAAAAAA9tYAAQAAAADTLUNBTk8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5yVFJDAAABLAAACAxnVFJDAAABLAAACAxiVFJDAAABLAAACAxyWFlaAAAJOAAAABRnWFlaAAAJTAAAABRiWFlaAAAJYAAAABRjaGFkAAAJdAAAACxjcHJ0AAAJoAAAAEBkbW5kAAAJ4AAAAHxkbWRkAAAKXAAAAJR3dHB0AAAK8AAAABR0ZWNoAAALBAAAAAxkZXNjAAAKXAAAAJR1Y21JAAALEAAAATRjdXJ2AAAAAAAABAAAAAAEAAkADgATABgAHQAiACcALAAxADYAOwBAAEUASgBPAFQAWQBeAGMAaABtAHIAdgB7AIAAhQCKAI8AlACZAJ4AowCoAK0AsgC3ALwAwQDGAMsA0ADVANoA3wDlAOoA8AD1APsBAQEGAQwBEgEYAR4BJAErATEBNwE+AUQBSwFSAVkBXwFmAW0BdQF8AYMBigGSAZkBoQGpAbABuAHAAcgB0AHYAeEB6QHxAfoCAgILAhQCHQImAi8COAJBAkoCUwJdAmYCcAJ6AoMCjQKXAqECrAK2AsACygLVAuAC6gL1AwADCwMWAyEDLAM3A0MDTgNaA2YDcQN9A4kDlQOhA60DugPGA9MD3wPsA/kEBgQTBCAELQQ6BEcEVQRiBHAEfgSMBJoEqAS2BMQE0gThBO8E/gUNBRsFKgU5BUgFWAVnBXYFhgWVBaUFtQXFBdUF5QX1BgUGFgYmBjcGSAZYBmkGegaLBp0Grga/BtEG4wb0BwYHGAcqBzwHTwdhB3MHhgeZB6sHvgfRB+QH+AgLCB4IMghFCFkIbQiBCJUIqQi+CNII5gj7CRAJJAk5CU4JZAl5CY4JpAm5Cc8J5Qn7ChEKJwo9ClMKagqACpcKrgrFCtwK8wsKCyELOQtQC2gLgAuYC7ALyAvgC/kMEQwqDEIMWwx0DI0MpgzADNkM8g0MDSYNQA1aDXQNjg2oDcMN3Q34DhMOLg5JDmQOfw6aDrYO0Q7tDwkPJQ9BD10PeQ+WD7IPzw/sEAkQJhBDEGAQfRCbELkQ1hD0ERIRMBFOEW0RixGqEcgR5xIGEiUSRBJkEoMSoxLCEuITAhMiE0ITYxODE6QTxBPlFAYUJxRIFGkUixSsFM4U8BURFTQVVhV4FZoVvRXfFgIWJRZIFmsWjxayFtUW+RcdF0EXZReJF60X0hf2GBsYQBhlGIoYrxjUGPoZHxlFGWsZkRm3Gd0aAxoqGlAadxqeGsUa7BsTGzsbYhuKG7Eb2RwBHCkcUhx6HKMcyxz0HR0dRh1vHZkdwh3sHhYePx5pHpMevh7oHxMfPR9oH5Mfvh/pIBUgQCBsIJcgwyDvIRshSCF0IaEhzSH6IiciVCKBIq8i3CMKIzcjZSOTI8Ij8CQeJE0kfCSqJNklCCU4JWcllyXGJfYmJiZWJoYmtybnJxgnSSd5J6on3CgNKD4ocCiiKNQpBik4KWopnSnPKgIqNSpoKpsqzisBKzUraSudK9EsBSw5LG0soizXLQstQC11Last4C4WLksugS63Lu0vIy9aL5Avxy/+MDUwbDCjMNoxEjFKMYExuTHxMioyYjKbMtMzDDNFM34ztzPxNCo0ZDSeNNg1EjVMNYc1wTX8Njc2cjatNug3JDdfN5s31zgTOE84jDjIOQU5QTl+Obs5+To2OnM6sTrvOy07azupO+c8JjxlPKQ84z0iPWE9oD3gPiA+YD6gPuA/ID9hP6E/4kAjQGRApUDnQShBakGsQe5CMEJyQrRC90M6Q31DwEQDREZEikTNRRFFVUWZRd1GIkZmRqtG8Ec1R3pHv0gFSEpIkEjWSRxJYkmpSe9KNkp9SsRLC0tSS5pL4UwpTHFMuU0CTUpNkk3bTiRObU62TwBPSU+TT9xQJlBwULtRBVFQUZpR5VIwUnxSx1MSU15TqlP2VEJUjlTbVSdVdFXBVg5WW1apVvZXRFeSV+BYLlh8WMtZGlloWbdaB1pWWqVa9VtFW5Vb5Vw1XIVc1l0nXXddyV4aXmtevV8OX2BfsmAEYFdgqWD8YU9homH1Ykhim2LvY0Njl2PrZD9klGToZT1lkmXnZjxmkmbnZz1nk2fpaD9olWjsaUNpmWnwakhqn2r3a05rpmv+bFZsr20HbWBtuW4RbmtuxG8db3dv0XArcIVw33E6cZRx73JKcqVzAXNcc7h0E3RvdMx1KHWEdeF2Pnabdvh3VXezeBB4bnjMeSp5iHnnekV6pHsDe2J7wXwhfIF84H1AfaB+AX5hfsJ/I3+Ef+WARoCogQmBa4HNgi+CkYL0g1eDuYQchICE44VGhaqGDoZyhtaHOoefiASIaIjNiTOJmIn+imOKyYsvi5WL/IxijMmNMI2Xjf6OZo7NjzWPnZAFkG2Q1pE/kaeSEJJ5kuOTTJO2lCCUipT0lV6VyZYzlp6XCZd1l+CYTJi3mSOZj5n7mmia1ZtBm66cG5yJnPadZJ3SnkCerp8cn4uf+aBooNehRqG2oiWilaMFo3Wj5aRWpMalN6Wophmmi6b8p26n4KhSqMSpNqmpqhyqjqsCq3Wr6KxcrNCtRK24riyuoa8Vr4qv/7B0sOqxX7HVskuywbM3s660JLSbtRK1ibYBtni28Ldot+C4WLjRuUm5wro7urS7LbunvCG8mr0UvY++Cb6Evv6/eb/0wHDA68FnwePCX8Lbw1fD1MRRxM3FS8XIxkXGw8dBx7/IPci7yTrJuco4yrfLNsu1zDXMtc01zbXONc62zzfPuNA50LrRO9G90j/SwdND08XUSNTL1U7V0dZU1tjXW9ff2GPY59ls2fDaddr623/cBNyK3RDdlt4c3qLfKN+v4DbgveFE4cviU+La42Lj6uRz5PvlhOYN5pbnH+eo6DLovOlG6dDqWurl62/r+uyF7RDtnO4n7rPvP+/L8Fjw5PFx8f7yi/MZ86b0NPTC9VD13vZs9vv3ivgZ+Kj5N/nH+lf65/t3/Af8mP0o/bn+Sv7b/23//1hZWiAAAAAAAABvoAAAOPIAAAOPWFlaIAAAAAAAAGKWAAC3igAAGNpYWVogAAAAAAAAJKAAAA+FAAC2xHNmMzIAAAAAAAEMPwAABdz///MnAAAHkAAA/ZL///ui///9owAAA9wAAMBxdGV4dAAAAABDb3B5cmlnaHQgKGMpIDIwMDMsIENhbm9uIEluYy4gIEFsbCByaWdodHMgcmVzZXJ2ZWQuAAAAAGRlc2MAAAAAAAAAC0Nhbm9uIEluYy4AAAAAAAAAAAoAQwBhAG4AbwBuACAASQBuAGMALgAAC0Nhbm9uIEluYy4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABkZXNjAAAAAAAAABNzUkdCIHYxLjMxIChDYW5vbikAAAAAAAAAABIAcwBSAEcAQgAgAHYAMQAuADMAMQAgACgAQwBhAG4AbwBuACkAABNzUkdCIHYxLjMxIChDYW5vbikAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAWFlaIAAAAAAAAPbWAAEAAAAA0y1zaWcgAAAAAENSVCB1Y21JQ1NJRwAAASgBCAAAAQgAAAEAAAAAAAABAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAVklUIExhYm9yYXRvcnkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAENJTkMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADzVAABAAAAARbPAAAAAAAAAAAAAAAAAAAAAwAAAAAAAAAAABQAAAAAAAEAAQAAAAAAAf/bAEMABAMDBAMDBAQDBAUEBAUGCgcGBgYGDQkKCAoPDRAQDw0PDhETGBQREhcSDg8VHBUXGRkbGxsQFB0fHRofGBobGv/bAEMBBAUFBgUGDAcHDBoRDxEaGhoaGhoaGhoaGhoaGhoaGhoaGhoaGhoaGhoaGhoaGhoaGhoaGhoaGhoaGhoaGhoaGv/AABEIAMgBLAMBIgACEQEDEQH/xAAdAAABBAMBAQAAAAAAAAAAAAAGAwQFBwACCAEJ/8QAQhAAAgECBAQEBAMHAwQBAwUAAQIDBBEABRIhBhMxQSJRYXEHFDKBkaGxCBUjQlLB8DPR8RYkYuGCQ3KSCRg0U6L/xAAaAQADAQEBAQAAAAAAAAAAAAACAwQBBQAG/8QAMREAAgICAgEDAgQFBQEBAAAAAQIAEQMhEjEEE0FhIlEjcaHwFDKxwdFCgZHh8TNi/9oADAMBAAIRAxEAPwCtuF+JeC6SDNsygzareKiAu4kZGjuPoHS9yNididr4p74qfENuLpRSUVdVVeWU8xKySnwygjwtpYa1IuQQSRfcYr4SOheKOR9DoRKqGwbe9j54UpDTTVSxVQZKXWCxQXcLft54a3k/gjEihV+BGNlbJ2Y94WySHOc3io55DFG8bP4CNbkfyrfv+PthXiDJlyWqIiqObGygXZLFbW2I97flhtLTxQ1UlTltQwhhkC0+vZyLbHbv/vhLMcyrM2qWFdJLOyR6QWU3Fu59ffEf81EGCCApBG5GtqEpbWtyb++FquqeomvZRpGkW7+uGsWzXI2XfCmuI28LavfB0LuBZjyZ4mjjMSaSo8TEm5NulvTG9OGbTZb6+m+9/bDcVEaR20KzkX3X6fv3xYPw5pEkyvNMyraekrItaxurwGR0UeIk9lU7epsewxipyB+AT9zqexpzapC0fD1dW5LUZrDSLNSU7jVp1K+k7FwehCkgN3FwbW3xP5/8Oc3yKanWn15k8omYpTxF25UZALsPI3Htt3xa+V0wi4cC08XIQXSOmZDrcHcjTa2k6j1seuCLI6PMsypav5RJKCoI5C1OyOoB+lS4IDC43sd+nni/H4+JuCFWtxfXVdg/Yyr+HAB3OWKj5f8Af1PJVUjVUYZJKiLUVdgDYr128vTFhycHZfxrmT1OQUByOnAZOVG/MTmAdLXJB3U9dwGt0xD1fw/zKPibMstnf93yRSgCWpOtmVm+slRudwTt+eHPCua1fAMwir4m+UqQC00NjoFzbUCtydrgXBsduuIkw8GDZAaFxY+zDUY8QcD1mRSV1KZoaxcupYJ5nRSulZugt2NyL/lgZSBYAWmUBgRp0tvqOOmaeXI+IuD8zBq8rrZatGrSJZXhEukaQ8nQnSwC7nw7DyJ5eliZSyKSUDX1Hz8sZmxcFVqrluu6+LEHKoQ/TuT2U5/meUUscVBWTJSrzf4BN1OtQrgqbixsNvQHqL4e5dNV5jmaTu80swTxSu2skkWuSdybWGGL5JVZZl9DXVQpZ4KxLqsc2pkJ/wD7FG6mxv62w6yWjmirI3hvDN9cXgK+1z5nfYYk58iFY2PzhICDRhpQrkWTy/MZvTPVo8RQJKTpJPU36Bhbp64hqyVY6WSSNWSkMn/bM6/y/wBJttcd8TVCK2qhNDUPI1HGplSIMBy5CbA3PVf6u+BvMcylqqgUEhhJefSwF+XfVYEX9D1HW2MxY2V2yve9fH9qP3qWkg4wK/zH/DFRSpWPU11TyVRbRNYAFjtY37e2E+JqiD971Bgm542u2nTvbfDdYKnJ6QVFVlzrDN/28mtPDuuqyn/7WBv2JxEmllIaUROkQJ2JvYdgT9xvhysQhVj79fb/ANg0eIAkNnEoaQC/fEeV5inG+biWGtKTqUNgwB8j0whFJpRi2DXqLyG2mfLKoDNhKblqLL1xo9QSdugwizFgScMiYi9rm2HWULetX2w0OH+Si9WT5Ljx/lMwfzSXmO5HntthC76yF2uML1A7oNhvhESBH2323FsSGVDubxxCNDdmsbat74cIpN9N1Vh4Tq3OGqTeBA67A36YerOx0WW1rNYCwwlrjkqbGBjpZnKA32J3IwsaZFiR+cFJYlrE9LYSjlkcuzW3/lAtcY30c6RWIOg9rd8K3HCo8hhhsHeUyWsVBWy/c3xJ01DTz6I4njEhYF21MAl+3r74iaepMSOCU5bC7XBJ69vXDzL61OZqq1eQX1SG9t+wH5bYS11HJV7hKI5qWKalzFVEqkLHK0ZugHcE7m9xiKqKKQFV1JVSNtrAYFd/LzxI09ZV1yOtLIsskcYjsNmcX73PbCGYMaeeSB2TmI3idUA3HWx8xiZNHfcobYsdSPePTYNPA8khJBMTMQR239sNGleV3b/ti2ohjyDuQcPkrpgjRpIF+rSzDbzONI5YIkUEAORdrN1Pn6YaDUTVwNajpI6OMxpJ80d2bXZT+ONqfK2qFjpYKaRq47gmULqF+tmta3nhWlekkp+dLJNFKEvDy1DaWB737bYkqziuo4kMUmdlagwOFja+kjb02N7dMdS2F1Pn1VSNmRVPltdSVAM0UUQhY/6pDLcd+/2Iw0gqDUVUrVtS9OtrNMgLFu1j33wmk8gjsJHILWAPYb48XVbxbqDY9rjDADu4LMAKWIiAVEjLCgjDsqrv6279ziRzXhwZM1OKqVC0l9cam5QC1zfoe/4YQlmgdH0agvVreEgennho9VPVyhFMkkrOAutrkb2AwY5E66grXE33JjKuDc3zL5iopctqaqkpjaSVF2H+4tvthPJ86rch1fJTN8s0gdoyLq5B6keeCT/quu4cjqYsonJDSPZHAAjtsGF/Yf4MAkuoqJJWJdiSRbvffCsD5y7FqrVVd/NxjBFUFCb94ccPfErMslrK6qEMVRLVSiUF92TxhiinqEa1jbe3lg7rPjHxDPM9BRUyU8tZDHNVLLHZIJdZJK3uGRoyFIPXa24wC/Dykoq6SZI3nknZQainaj1x2B8LK4N0YHoTbc2sQcWFmvD4zp5uTIlJmb6Qz7KbKCOWVuPxPTttjpN5D4UVPUouaH5x2JXyAm48oKf96CjrWWiindHaDXULHNUxqvi1KSey3BJHQDDHiKiymegdc3knpIle9pA0aCS22twrWNiNjbFd55S8QcDV8/zcaU8uYQPDExUM4Q2uV8iel8GGdZ/nVbQPnlPSw09QtItDmFI6NoqYiAqBomAJlRrm/wD5LpuMFgxLjFHlaggi7Bvd/n/bUY+fkCtfpKoinVJY9UjuobZQbEjv7X/y+H1RBG8cM0c2oPcNEPqS36389seUmTs0zU9Q0lNWxy8hoOUdYbyN7WANr98aZjltZlTQx1sckLSoW8SadgSNj3Fx1GOcyNdyAKQt1qFXDtMmaUMlFT/KwzBlcTsTrBHWxHYA3097YfHK63JIvnoa5JqpY1MgdrvpdR4SQfC1j0/4AZRZjNlxL0cjJ4dDjURrB7G2Cavno80y6TM8slgy6q5apVUp25yjbw72NjbYgbEb4DHiBJrRG7v7e3/H2lSOCN+0IshzakylJRTSyzGVv4EjDSULC7E9b+La3pjQUtPV5s+Y5lO88shBVCFBA+2wP6YHChraGhShkl5hJ17BQo3vYefTfBVlmTpAEaotGFFhrJ29lG59ycT+R5RbGMa6H5To+Nj5m2Fw+yqvpqmIRSwWUWsHZSG/92GJV8sy+oheOSlQKwtp0jcehHtgOpocucCzszdjyyP7nElzkoU8FQ4QdAx/tjhZCQbud7GAdVIziv4Z0vETs9O4o6lnDGRU1a7CwB9PbAr/APt/zmojkaHNMtjRbW5sjLcfh1wcx8Scl7NJ4T3viVp+JYyLc4D0tbDcPm58QobEVm8HDlNnRgRkf7NkWYqFzHi+lpJjtpipHlA+5IGLX4b/AGGsjzQxms43rpEIuwgy+Nb+xLH9Dh5w3BNns6LEguzKVcCxBuQT9v7jHUPAXCE2XxQSvIQNjpHbzGOz4ufNlP1CcnyfHw4h9JlGj/8AT+4DNOB/1BxHzdP+pzICCfPTy/yvgEzD9gPNsrM03DvGVDWm3girKF4Sf/mrMB+GO/UgAQAja2GksOkkWuOmOmdipzABc+V3FH7OPxL4WDvWcL1NdTq1jNlzrVr+CHUPuMVfX5bWZRVy0uaUs9DUx/XBPE0br/8AFgDj7H1VExRxHcb3BHbHM/xm4SlztHpOKMoizywLU55JEqi+3LdbMnfv23xJkpBcoxr6hoGcAQFQSty3ffthzy9R1Bh0sCfLB1nXw+gpM05eVVBINx8tPKNaHuNQ+seosR388EdH8PKWoypovCk5Xa/Y9r+vUG3XED50Xdy3H42Q2KlYRUzxDmrd0XdxboMec4EFtAJFgpvi9M44fyah4cai5emELd5F2bbqb+Z6YpzMMuanqW5cfIi7ISCbHp97YSmUZLIjXwnH3GskkXiEaEra4t2J9cPqSoRI0aa2m5GkISbj2+2GkdCqxo7SOFJIZLA7X69cSFLAs/8ABvy+rqxTt064I1UFY+o6iGQPrtFbSWHiO57bG9sO66Coo2C1DKqFNeo7kL+N/wAcR0uTIoD8y0W13WPYgffD9Kaasp1pBoc72kMJBUX6YUR7iMB9o0dYZI+YFN1BGpbAknsD3wyFRBQloqhhI+okkIHtftfDk5TJH4hGXiVtLFQ2kDp+vlhSny7LzHeoZAxP88ljbA8gO5oUmVtdo4boQTv5AAXx5azKskiJyxzBcXGrsNvS2NpomlZIpCqEMQ3Tc3wznlBkeygAenXHeAufL1FZphG76Bfc2GPYYJq0yclGblrdjq7Y3jozUTFNSq2m/W32PriVyGhooczRc/ephoZFdS1LYszdgR5bG+AZgikjuEuMncgw4QaL9fxAwpSymKsQqQhS5Ut2Nuv26++Cmm4AzDMK2gFFyxR5nHLLTVMouQiMQVYDo9rbeuHj/DDNaf5yeR4Y6WOaWCmmlBvUsnWyC5CgXux2FsUMjLjLkamrjcnQgxFFDVCX595VhPiQxKC1+gvfthJ5FWkjUcmQJcKoQ3A9b4ThR1kYzMVK9QO1u/5Y2pq8RLNrQSMwKgWtf++E9dTAb0ZOcMca5pwxpipZWSheXnSQoAu5sCwI6vYGxOy3uBfFpU/xkyCrjqFq4ZaSAwQrGBDeQTHVzGVh/SdHXqPwxSLSRJyxStolt4ypP3vhCoSFHRi5IKAgAeeK8HlZMQpZoYpJvPuIDW54k+VSSRw08oliZAYwjXB1BCdKNcb6bKSLgC9sXyGkp5KesraepikqlR4pKuNA4cX8RVbqG3uO18c000EuZ1KQU6qsjtYK7BV9yT0xanDHGFdkNTl0ObzVZWlVlldKpeZMHYBSjyAqqKEA3HcnBI4IIdytkGx8H+kdhYqSalu5F8PssqpaOMNN/FqmqMzhqYlabWQTrVzvo9t97emBD4xfDhKDKIK7KJKqtahOmRjECml236bkrsSwFgCAd8WdkfEaZsMxgyHMMvnlomQVwVmlpixBOkVGlVLC3VR374kKPP5ctqWphK9PPMGGtJRddQtqXazEe2x7Y6nmv4mLHzyro0oYC+z8e1jfzK1xeqpVTOKqmOrJBaJmjsAjBbi32xKUGW1FQ2p0a/1G9gCf7DB5xvl2W8N8SSQ5fCtLTNEpCJPJL4wSG1Ft9RO59+mBw1bSAgNyl8yT+mPmPI54WOMjqFi8VG2T/tHuXzR5XpEQ507G1k/QYnYKZ6phJVVGhT11NtfyAHXA3l78ybl0zc2Ug6nIsFGJmgiiec8yQTOpAYt0Hp/6HXHLce862IUKENsuWmy+IchDJIR1ZbfliJzyYVU2ssFb+rVsPxwtWSGKApAq6EFnYnTqPrbt6YAs0zMIzFIqdbGwYktv6euI0RsjalxZca7krUVjQbGujla9rCPb8Ri0fhrw0+dIkuZVaQU7NZWWHVY+uroD/bFE5dHV5lnVNSs2uTUHcDYRj28/THZvw5kyimoIo3pI1qTHZ0dbiRfT/NsXekq0D3IfWZrI6EsTgLhlcrr4oa9EYq2kOqAAhha+wHcC/li/MvhWKNSg0g21D/y88VvwzNTPDGv8gUKl9yo7b+mLFoZwVAc72tjr+OAi6nHzsXazJiwI364bhfEQbA74wTBiL9xj1X1am7nFFycTyWNVTp2vgG4zRpqGeGljEkzLtftv54NKyQCLTqsxHXA1m0ixxGOO3TxN3/HAnYM1dGcVccZLR5RmMsUgR5w51DQNCHyB7n9PfAquaU6NpCqGHSxub++Lw+LEOWutSDMxqRYOYjpWK/RQe7Hy3vjlnMKxqbNHijYizdH3NvQ9/wAMfNeRgtjRn0vjZ/p3DqWSKZQ0/wDEC7qna/rgD4koFlnec0kxksbMguPwxMNnRESgIgIFrmQg4bGqqPrWQhP6iLr+uEYuWMx2UB5VtRFVQs6xwyAarXYbX9sbfMSEpFLBIzDoYxYdcWRVr83E9mjWYjwSgXF/XzxDCOvpgSaqIaFCguNF/uf0746mN1yDrc5OTG+M96gvFmkVPIoho5KmNTqbWLG52sLYcSZ8ZXjC0eiEbWK2J+/tgkT52UQiJ4GdDqkj0B1vfuexwuGzPW7PErwQOpSOOEXAHWxwwoD7RIYj3goM45mnnQCI72207jzPlhxTZsksZMrwxkGwVKYsAPe+CWalrDS6onjnKklLwA3U9eovcHr6YUpp6qZC1OBpBteYAMT5207DywsqK1GhjdGVBVsnPkkp/FcGwtax6YbxOI5GlYgsG/h7X3v1PoMb0TAuEtq1baffCU0dpXVbBAbCx8sdVRWp89dkmbUupqidy7FgvXuSdv1wtTsTXx8pGciRVCLcsx8u+5OPYKadKaWpjjfS7BY7kX69fx298NkpWjqDCsjRzBwLFSrBr+XUG+PLRJM9sTqulpjR0KVr5PU5Ssr8ySlqoxEUl02LCx/m7na+xth9BkUXGeVmnmqJaGCSMpL8u38TSTdkU6TYMbXI3IFvPFH8GfE3MskzPJaWeZpssjqgtXCzgmdNR3dyL7E3+2LG4m+OVPwzVtQ8Iw2rYp3p6pYtBhmUfRNGwBGrsRYjvjs48qvl9ZshCVRQgd/e5eMqcKlHcY8Pw8M8UVOXQ1lRWRBv9SWmeEkEnazgE2/qtY9sQfKEbNYi8a7m/c/4cO8xzeu4gzE1Obzz1taZSXmllaR2BJIG/YX2AsMLNkGcT0r18WW1RoF1HmiI6fD9R/ztfyOOWw5ueA1ICLOpDyPy9v6upBw8dKRsqVkEprg1lN/BywT+e+GtfSVFHUPFUxtHKrbqw3Hf77EH74cBpBBAoa0hH4Dz9AMAwqeGhNKFGhJqJNS6D4durYdTTU9TE7TNLLUki2tj0/36YcZjRxw0cMkVW05k/rNlYdyDfrfrfzx7kq5XDWqOJzUNSlG8NPIA2q23nthYaxcZxZWo+8fcM8X5xw1A9NR1WnLmqFnkp5Yg8Usi2IunUjwrtextvi3uFvjstVnGd1nE0dPAgoYjQUoVrNPGSWW9iQXBYav5TpPQYoWlkWOpVRKwphKpLutxpva5W+5t2xY+bZfSZhlE5y6OnIhd21ohDBRvc+4PmcV4/I8jFvHsf073XxHYQWB31H/xJ4nyDi/ihsx4WiqFjaCMSmawu1uw/lIuVbtdbjY2wDy6p5REr2W/iK9hjSlPIpQuoFifEb9cJCqSJySeZv0UD9cc3M7ZsrZD7zq4zxQAyVlrly2iZIBpllIVSOo9cPuHHaeeKKNdfisiD+Zj3OBqXmVb82UaV6IowXcJUzQMTFfmMpC27A9T74lygKnzHYiXf4hRnFpIXjDgQxCzHUPG3e3p+uASWCKhhqMxrCDMgIp0Jvo/8rDofLBZxRIKVWjjMemKyOzG4DW3AHc4rmtq1lPIYFkd10qLi4B3PtfE/ioWHxKfKcJDH4bZDFVVZrauVhKW1AkW38wcdEZXnlNSUxikdUbquobBv6rjp7j74qHg6SCny1UjiUSWuT2H2/5xrmVZVU78+mqVOk/Qx6/hhWTIWymHjxccQnTnA3xPWnzOGizDTypHsJAwOk/7f579I0FWskSPGwYEbEdxj5t5Hms2Yyo8EpQBrOL3KsDtjtH4WcRSvk8cFVIZGRF0k+uK/GzENwaQ+VgBXmsuZKk9jj0VvL5uo9ALYhVrBYWNziOr8yMTugbcnHQL8ZzAtyXqczLuSWNhgP4wzpqegk5J/iEeBL2JOIbPON6PK0cSzKWTqt/wGOcviL8YqivzAw0eoxXtZAT9yRiZvIC6lOPx2bftHnE0tZNK71QGrUxBeVYljHfStyTfzO574oHjl4qPM4Ji4EbHSWVgdJ8/XFhfvda6Fnaop0dhc60ZiDireN4pgrSnRMmsE6Lb/briVSGcS8gohk9SZxUrBGoaKRLeEmMEEehxMCRqmkLJIgNt1tsfQjFdZGzUyry5Q1PJuEc7XHWx7HBoQsmXvpkK3XwNqsVbt6YjyYwr6luPIWx7jLl/LAvSXCMd0PiAOEJ4KmpjAlkVvFcAoLYjcrzKSSlcOdMiNvb3xLJVc5L2VTbquGEFDFCnEio5M3oZppRUuhcgFlXYjV3/ADxvJn2ZRzNor5DqUqA52W/Ww/TD+aoHKZAWNx1ABt+eIOZEEgUFFckEEi+3kb9Dhi5GMmbGo1HT5/mtrU+ZyqYyXAY3uPK9sb5fn9bFTaDPO1mNrSCwHWw2xFyzI1lWy+I7EgEnphVNAQCRPEBvY3wXNqg8FuNOEvh/UZ/V1cM1bHl01Kf4kDqTMD2fSbXTsbG462OJ+X4Wz5bnL5dSxjMvmJeVHU6QI47gG5J2B637+WGHDnxBzk5nQ5ZFmFFQUpmHzNTNTqb+bu1ixPYb+QAxdAzWLIVoIcxmiHMk5fhXQWZuhFze58vXHdfP4qDDjyA8nYDX+L6+Zx/HxK3Jl9vvB/JsnyfhLNmkpqVa8RaFjSQhhGwO9gLg3IvfqPTEVx5ltDLllPm8VDGkyl3eo5gjMrO5MrAWLSMSfqJAVRsD1xMcSfFzhGiqa6mehqq6vSGSO1RSaBFLayrZjsL9dsV9x18UpuLCKOgp3yrKxAnMpgbI7kDbSNrBr2IsbY6OfhjGQBwVPSgdH3JPvBLoo0NiP8q+G9HUiGszVnpVqYdZUOFEZ3O3uCp9wR3wtxT8L4skjXMOE655cxjjab5XTzNMZJOot0RQndjvv6YjOFeOAyUuVZ40jUaqscDqNbBiQo1Mx2QAknY9AMHf/VnCFTV1uQtTZnXPFLKsNXTcuWJwBs5V/D6XtYW8sDjXFlHGhxI73yv3FTLRhfvKKyqr0VktTUIWVlLmQWGliO3nc4tH4VcVT1dVPkyy0NJCpvSQuHM07MT4V6rt3vbY998V5xHw9WZPFS1FcUhWvD1KjUuqxa3iVRZSL7AbHe22IWqrIdMLZaklNyxd21WJPv1xHgyN42bkB/5FK7KKudLQcNcM5rBNV5zlVPKGaNUlLxlRyyYwgYuBYEEdQCbAg7YoHjrJ4sm4praHLaLMaeBSGCV0aq9jc7aPDo8iOuHGTcfZpl2QDLFiiqKeKaQojr4THKhWWMgfUGuG67FRbBDxJwZm1TleV1uZNFl8EsCRQrKhEqgAC1u4O7XJ872OHO6+gqKCa7J739z776jD+OfpG5W7RgiRPCoAFiCDbzw2IsLyPta4BHiOJHNcsOS1BpfmIqomCKW6KRpLLq0kHuL4SoYoknY1sHOTRuWawDefXEp+m5NsEgxGgaSSri5bCLQwYH+m2+LbyLMos6o5aWk5b1TIUmhBJbSe+w327i/UXxV+YTZcrwjLoSIkjtKGa+p/MHz/AEwbfDzhueugfMFEYJIWlMMzBonUhrG2xDgFWBOsXVhtfDcIyZbVCRf/ACJVjPA0NxbPPhxmeXU09VS6ZqeIcx42dVeNbd1JvgQji0WWMBn87YuH4j8P5zXcPGukK5gYq+omu8aq1LSFysSoQAd7Eld76rgXF8VOkQjYIJAxNr+ajywHmYxgcBRQl+P69zWFSswBOuQ2A74POGEaGCon0ljDYj3tt+ZwNU9PHCTILM/0r/f/AD3wYZVMKChZqkXVAGdT/OQNhjhZ2sTo4E4mRvEFFyqaFJrtOVDG+5F9yTiv5I5JM0OgCyBQPbr/AHwf5xNLKvNqZLtJ4msd/wDgdsQWX5eZD80gBZm1FfIdsF47cFJMHyU9RgBMn4jrsgphFGn1DYhsN2h4hqaGjzGoNMkFfzGpg9UoeTQSGst79RYXG/bE5neWRT0YeVdnFiQb28jgAamkpZgJoj18LgEg+xGLfGXE68iNyHy8mfG3HlqGPC2eT5XnUYq45Iv5ZVbYkdsdefB/iiSpEEUj6WY2AJ3sB0+2OLYy8/y60+uSSKPxMbne97A+mOu/2fOHa2oSCZouQDYcyVTYjyFsS58NZQySnBlLYiHnTQreTBdbl2AAxC5xJOIaqpKtpRCdvbBeuVxx8uMDmOerW2GMzLJvm6Gtp1XRrjKhrd7dcPbGzSEMAZw7xfxW1HLU1lZUGUK1gn3/ANv1xR+d8bz5pO3LCwLfY+ntgi+KtdNTZzV0M45ZhlZGCm6kqxF/xvbFUIVFSTURtJD3CvpPvifxPGDWzS/yvK9MBVh/kmYCUamqpGfyLC34YfcQTCqyqS4R7DoU/wAOKx5hpalHy93I6lSemCCozqQUTx1IKh069be+GZPHZXBBgYvJXJjIIqPsjkhr6SWimYwzKebCx3FxsQe9j/tiehkalpmpai2iVDpcHY/7HAFlrtHNE4O4a4N7ehGC/MJWiy68hPS6nzwvOn1RuB/o/KR1GzQPUAGxuCDcH9MSUMhur7hH8uxwN0c4So8d9DrY2xOZdKYpOXKQ8T/Sw74zIvcLE3tJKoaSJOby46kdyBZgPcYiK3MoZYiQo0rbwkeJfY98Tq5TFVSMFqmpd9WpX02979cMM34KepjU0ua0yyMpJaUaAw+3fHsWMN7xWbIVvUGjVq1wQWsbg9cOFzSO3jFj5DDKs4OzyhXUhpauMk2aCpRr/a4OB+WoqaeRop0MciGzKwsRiz+GDdGQfxTL2JM0bpRzsdKz1VuY9xstug273xpm/Elfmzn56aWVVa6RhrIp8x6jDFlaOMRIAFYgykm246DGLopo5QbSGTuOg38/PDlxry5nZnLDGquKVNTJmtSavOKiWSaQqryOdbMALA372AGPZ0jjlMNJKZItdlkcabgdCfLDM6dKqthGr6iLb4d86CSnPLSzK2q7G+1+nvg2Ju4PeolGzsrLH9VrKF6Li6OAcvoc24btlFLI1dBOEq4rcxpPD9Qby6+Hp274plp2U6Y2LR27ixJ98T9JxlnOX5JFlmXZk9LSJHINMQ0XLNdmJG5Y7bnsAMNxMq8g10QRrvfz7Q8bhDc6FoOHf35TNFNHDHUwobNUwxM8SnYsI5CQNrjVY9O2Ky4c+F2UcRRzxU2dkSRGRGQpGV56E3BKn6WABVgTcXGAfiXjut4mzHLZYL5eaCnEKSRSEMT1Zi3Xc3NvXCGS8TS5PmBqMlaSBeXaXxbSDvf0vig5seNAnEvxHZOz9rjWdWayNSxKb4fZnwfNlvEVTlSfu+nQSRRfN8uZZGGzbXsF269z3wcR5PHxfSUVb8xRZlWVWuRqaStL3I8TA6RsFuL3B3NtthgVPxgoqzhmmoa7LY8wqebrqKeUMqOq/QisDfc7/bFU0PFdbkOc1dfwyf3aZQ6CNm5hRGa+m562sN8D+C2P0nFqQLF7B70dQxkGNrUy3fjbkdDT5PR1pphBmotA0sNmVmUf6bjbTYbqw28+oxRyxPydbKxRG0NJbw3sbC/n6YIzX5txGJjnc9RUzmRX0iMnchVBt2J8IB77YjHo62i1UOZmakjY89ElU2JFwGt62IvhWZ1ZrQUBQ/4+YrIOR5feROrlm7JYLuRax/8AWCrgfis8OZiJKqgWrpZotPMB0SRqpO4PQi5NwQb2FrHEdwtwhmHGOZtQ5QLSXDPLI2lFuwBJP39cOs6yWoyCuFBm0DrVGNXKF7XU/SdvPc22xgL4wHA/3gpyXYnVnDMUnEf73pKavizBYm0SCjqlSWMW2YEq1gb9R+Ixzrxhk9Nl3E1dT5fFJHHA+6TVKVDX6El0AB9rAjvviQ4Kq6Wgelkyesqcvr2IM7RzEMgB8SjYeEjbfETxVEaHPquSBzIs15dTS62a43LeVzfY9rYF/SHhrixLXE9XdA3/AL1OyjNy5N7zSGoWiBkkQuyi58h5ffC8de9W6i7FWsQL9yf8/DDKMCuURC4hcgyN3XbpiXNDFQx3iN3J2Nr6f/e/5Y4r0O+50EsjXU3r5TJGRsUTdj3Jt09gLY84KeCuplhkYcxR9N7fcHDStqUFPOFuI0j2Pc7bnBL8Nfh9V5/BrykCpIANozZ1PtfGBfwzMZvxBHVdk87ppSO8JH1FgbeuBWbIi9QUgD6na1h1OOpuG/2ceJc6p1+fqIcvgNiecSW/AYJ4v2ZY8uUClrBWVbGxlaOyRg9SB3PvheMZkF1HM+F9MZy38Ovhfm/FfFkWWZKjuSCsz6bpEvck9MfSnhDgaj4aySgy2mhT/tkUFgOpA64YfDn4e5L8OsnWHL6cCoYDnTMLvIfU4NWzSKKLVcA47GNKFt3ONlyA2qdR7S5RChDMBcY9qKWAxyDRsRY4Ypm7vuNxhRczjkVkIF/XFQZSKkvEzhX9rf4U/IStxLlVNqpmOmqEaj+H5G3ljjGSikmBYsybeW2PsD8QuH6HivIazLa1dcc8ZUjuPUHHzJ424CrOEeIK2glQskMh0MF+pL7HEjMMPUqVP4gb9pXdDlt33Jc9yB0GCKtyf5rLZpIxay2G2H9FkcjyJyhpU/zYMhl0ceXmnILeEgkixxBl8i2BEtw+MqqR95TeVIXDJKPoO4wW1qF8p5YYsbALvexHa/cYguQcuzaRGuv8TY4no5UnjaG+iRWJVSbA37fjYj74LMbIImeOKUqYOfLSQkNGbrf6T54e08zwWYn+GTuDuMLzU8ms69Ora6qb79satTGnp7uytqJ0r7G2NLX3NCcbqP8A5/VArta4cxna+2BGpq5JmbWxdb/zbj/1hStzCWVBBFFOkSuSSIzdjhkRb6Emt6xHbFWHFw2ZzPIzeoaHUVQ6bX91Hlh6cuGZBZpFZ2A03AJvbDKCIyG3LkF+gMZwZZPT1UFEFjpZmBYm/Lb/AGwxjXUnRbg7keRSZ5HXVhmgMVCmuSJ25bON9lHfy++IOvkhNY8lLEaaJyf4Y30/jhxTyOG5AvGOrINunS/nhGuax5Kr9LG58zglBDbiCw4hQIxjfRqsNX9OJGaNYHZFUgmw0gdD/h/PHtNlbTSQCVTBH1dwLg9T9sZUxVFTM/OZQNdyQ4YIOwuDbGlgT3ANRqCXflpqSykm/bD2fkk8tTKuw8ZUHfysOuF4J6Xnli3Mcm8khXZVv0H5YZTc6WtlejewUkIwIU2vtjOzBqJTQtGjKSS2m/Swt7YUgjkSByqXD6fw/wCf0wusExXmVepkUb38u3640iP/AHHhbwKuxI/O3ljb1PAyYoqRiIZEGvSx1AGxBAuF+9x+GPaTM5aLKamlkpqC9U/gmliu8dtrA22BF7jCGX5ukiVNPCjxgRG7Bt3Hc9OuFJKnLTGkU1VM3KAPLeMkBvsdziWm5UwhqShsSxuDeM8tioKKLNmppcwp4TTLNzCGeINqUNtvpIFvL0xE8XZtw3VDMp0parNM2nsiymUQ01IqiyhE+qQgd2IBJO2ACTkxzLJQGd5S2p5HVUA8gFF8MopnFTKrG3MurX9cXHNlfs6rqv1/OEXteMsD4bcUHhuurIo5IlppirytUPo0kD+Ve7EkDbtgn4r4tyTP+G8wo6OsJzPNKyOWqIhFykf0h5D/ACgAKqLt0v3JqZaCqpJkWSP/AOnrUqQdXljww1CuEglEkhvoiiIJ1ew6nA48ziwrWCK+PzHzDTIVHEiTNBWvk0zmFxPEHBkWwUNYGyg72tfBLxjWUlZQ0k2XyQSwyAC9tMkLbFkPYg32NuxwN5Nk1SKXmVcDrDIEcSEXG5ZQT5XKkWwUfuPn5eUVdW2wtcHGMSqkEbPvKsbkH4gvRiRahrX5fVvxwQwykH5Z42blLzJCdhv1Jv1xBR1L5fO8FRb+GbaCtt+1/PG81TUVkLlEM6FyQq7eHbv5g32xzXQsZ1EcKJtmTER1Ka95mGy+IgE9Pf8Azti8/wBm2FIs1eAvFJUkDl8y4VWB6MQb9O2Kmy/Jyqs00eqZU1bi9m6D774L+CKit4fzaKpp5VgAO1+wHXYYMAAVJmbkxM+j/D1BVxyyy1dVDJSMqCKJFtosN++98EvzlKPAoIANt8VXwDxhDnOSU80chYldwW322wSV1cxppGiJVyNj1GKlIURFFjuPuJMyamV2pWD2Xp2OOT/iR+0ZmNJmVRlGRx6ZYm0SGRTdWHUDzxetNUZnU0Uq5mEvuA6mxI87HHz0+Nctfl3xCzsUjySL8wShTsPbAJ+I24b/AIa6l25V+1JxRlo5dUqVA8z5YJOGP2sqmbNIqfPKcRxOQOYLGzE/pjij941aAGV3WTqwbqDjMvzKqqatEQsXZwAAL3OH+mB1JhmJ1PqrlfGdPnekxyglwCApv1wDcb/D6g4mzlJqyG9jZnXZgCOuGfwmy05Tw3lkeZShqhIVJA9r4swVKCYM4FnFr2viWua00rs4zayhsy+BNBQwyTQPJNEASAm1vwxUfEeWR5OJgqU4iS4IeVlcY6H+LnGi8IZRJMt2kkukIQkAtbv5D3xwRxZm2aZ3nM02bVMkxZtQF7IvoB0woYFJ6hHyHHvH1QP3lmUsnKVIwNgGvf1OEJE5cgLkhFsSD2t/xj2hUwROuo30jbrthCod9Cgm6s3uDgTtqEeul33PaGqMlYWa/UsB6/8AGJfK6dVro5aldcKEN9QGre4HtgfjcQSKY7Eqbn2xJPmaU0bNaQIviKKvXBV9WoBakMM/3hlg12oY21dfEOv44QGYUIksaJGXzLA/3xXTcSKSfHKP/iMJniCI3u8p91BxT6bSLmss+LMcu13+TUALpADH+2H8PEcVMnLKVK9wFk2+22Kh/wCokHR3t6xjC8XFrRrpBDAdNUQOM9Nx0IPNfvH+YZIyxHM6SmlbLlblpU6LK7X39geuG+S8FZ/xI2ZT5Dl5rUoKZ6yskDKBBEDuzaiB3Fh1PQXxJyfEqpbhaLKTEvKhj5LEWXWDftbbtgbkz/MMtppocsqZIKXMI1E6L9Mqq11BHezC/vhGE+UwYMADevfUldcXIcTY9/zjyopOTRxUEX+pI5acld7i2x7dDiEqRGlUaVHvEDpNhYXv+fvhegqqp41ijlkLytdvGdh54apTvzJJBG0rtIUiFv5u5PtcYtReN2YkCzFZI4YEaLVqDsNR67joMNtMAcrpN/U3Axvl1O1VVNFsbAlhfrbG1ZFHDM4RyXvZ1Itb1Hpgxo1c9uSlKyVVYx0gUCRaX17EIOuw7k/nbCVevMWWaJYoYyAEUAiy9gDh9kCL+78wo46KmmlrhHEJqgEGIXJ1ISRbtiCLzUcskKyuCvgIB2I329sJX6nIHtMqLZY6CoXQPEwZRbubHHk1AkSKt2kntqa5AAB3F/W3X3xmV1PJrYnpwUkVvC6WBBOHtRlpiq43NRrLESWINmF9x74I6fuejX5ZCgc6YnBF7MCD67Y8nplp21GRC7Hbfp/lxiUocupSbTF6mAy6pFhFyo3FhbvvhtXZa0tTK8dPPBSR3IMiaCQB6nrt3/8AREOLIngNXN6SloGyKqnrqqRq5njSiiA2J1eJmPYAbAe+JvIKCWCUVjRpVJEQrTIbcpidiD1Hv64EeeHQsxXl2ssYbsOgwYcA5zBTVYoqyJOdNIBENBZp3c2AZidKKBudrnFGDGS+2qNSj3LQyKvgmjaN4YZ+W4ZlkAKK4JIYj+axJNul8TUOXioqpHp4AYDa5sb6u5388a0s2TZFWsJJflpJAjI6prurXsQvcEruOu4IwX5TQTV1VJPT2RGPhChgB9juMW5ywwccrAtft9paa9oJ13A1LmlnnpIXbSQHC774iKf4crlk5lQysbWRm8QQemL4gypBEOZBd7bkLhvVZcArFFIHqMcdkhqZTMmQmIu0q9V6g7N6Yi5XOTyQT1i3ia3gA6+QA/wb+eLHzyn5EUjEaQGv6HALnTrMAropfSBta1uoA32PXCqhXLQ4F+J1LRPHbTECLNGHFz7+X546ByTiSDM6JJo5BIjC4W++PnyrPRzSVVOWjlBsFPQn1Hpi3OCPicctpk5sphlRFLsx2cn0xoBmhhOqMxroIlcq13I3A7DHJXxp4DHENfNmlDMtPIoO2/iH+XxZZ48mqqfXylm1j61bt3wL5nn89XcJQh1udmPuL4DnxMbxDCjOUhw/X1GYmmjQyPe2s3CnfrfFzfCj4S1UOcR1/ERjMVO11j6rfzJxKwpWUdass2WQhVO2hemDDK+L4aILzEMGx1bWv/bDWz2KEQuEKbMvbK62mgpFvDdR4SV6rhZ8zijktFJrv0xV1HxjREbVa2ZRffYj1wG8cfGGlyRWgyqRZ66RbJvcLbrfAqxMIyK/aNzyp4mraXL8oilPyJYzyJ5kdCv8wt3HTHOnJmp3+XnVopNyqm+x7fY2sfscWvlXED1czTSuX5jFmDG5Vj6424tycZvBDXUMIlrKRSbAfUnW9u5Hl6+mDD/6SIHH/UJVscpnki0qxJtqtsCMSstNzKN9brIVJKKvUH/P1xE5YHaARqoDk7g9rdsPpamDLJnNQ6cw78rzHb74nYEtQlqsAttIeSRYGLcxVK7EOCL+mGeY561TSGlpoVhiP1MpN2Hl6DGmZ14zOZZI1aOICyoxvbzOGBTTfHQTEKBYbnLyZTZCnUae+MwqYxc48Mdhh8nieM2xtpx5oONnpIaRUUTukICqbuwBv5WJ/P8AHDnLcvTMKeeFP9WNdaEvsV7g+WJMQz0lAlJChJ1F5YxfRqIsAfMgX3PmcRdHDTySSPG5ieM+IHZbdP16+2JA9gxdTxgsDRxU51TKB08x39cTtDl8k2UZjVxLHJFGUeSPVdi2oA7DtpJ3xF5DTPWZnBDF4+Y/L3WwYnp9vPD2qH7pzTNoKB2kpl1AKN0uW03NutiSAcLYm+IO/wDueTWzI+uZMnzOpfLomeGOdkhkfup3AtYX8JHXGAPX+J6FZJQtzyAwbqBuBe/UYmaSh/eeXKWV2Xotze0iDb8VNvtiMmj05hNFSAorI4BBvY/UB/8A5GMXIG0exPGriFJC1cTSU4lb5hty4uyW/Uf741pcqkiHNrAIyR4Vbr+HngphpTNnNDmckcdPRUa0ytBFtsE3F7WPiFzfcg4jMyRMxqaifkTjLqV0jM0y8vQWubkepBt12tjVy311NKkDUYZdLT0dY0rxLMgQqkTAr4r9fP8A5xs+YiU2dUWOTcaVuoI26H2wybMVa5SNVX+UsLk4yeWOJWBieOeNwWRhbSD/AIMM4m7MDdR+a+qjhVaVpYgw+tjb7LbYYXGfZkhjR66KWPdpFAVrqBcggjcWHfucRvMSaBkjUrKUJVgbXxGwowinYXuUAFx5sP8AbHlxg9iFQk1mtEKSqVJW0RSeKKyhV3/v0xvlmTtJVK3PSSzXPYj0OGkdRU19NFRursYFeV9TXLL9XfyF/wAcF+TUFNWT08tRI0ck0aBgADuo03b3sDc9b4IWomqplycEcPR18UEtfKlgAqknVsBt+A264vDhzhtKOl1xAyixYd29h/tirOCaSmoaRaahZpFRdK3W+/f88XbkFYlTFCkZ5aMS8hB2VFO+48zt7XwIYXLB1JjKqSCpmSnVCtQYBO8bqQUUmw1eRuCLeh8sTicMwPcTHXfcC22G3C4DXrJlMdTmzGdAw6RIAI1+yENbzc4OI4o441ubsRho+oXPAyvsz+GOX5rGQ0KKT1JF8AOffs6pmClKSvjhkLE3eG/bobdR3/vjohYRYHpjcU4fT4fGdul8AVuMBnGOefsw8WwUrnKq/La17EojM0ZHoCQfT/fFQ8Q8G8S8J/w+JcqloVmawkB1AkDzW4x9Oo6EOANCkqLb/niD4m4NyzPaCamrqKGeKQeJWS4+3374HhPanzWpOIMxy6kiipaluWCSV87HBBQfEKoEPLqYkdiNmtY9wcEXxj+Ddd8Ocw+boC1Xw/NJpilI8UBJJCP+gbv74qGrqYKe5Y6SviAv1uf/AFgeFwOREsSs+IkdNHTl0Mjuo1gHpvt+mBfNePedIGgjTRcgC/UeeAKfMXldg58RuVX+kedu2GjVS38XhA2G2PDAs96pk/VcS5hU8xmlaIOCAUHTEJqYsWd3Zw2rUd8e1dSirHHqFkjXbyv4j+uGfzYjcbgjzwxVFagFoW5HXGKULexbp74NKLM2TljVsQSfXFYZdWjX2FsGLZhEYqE8sQ3pwupe5BIJPqdr4Uy0RGK8LqSponq+ZFQU7VDHxStCL38ztcnErmfDOTZvTqaykinnYeGSwDA+wttivYs2kE4CX1J9LX3wT5PmtQtSj1R+kXsTjL49Q9NJ3JvhPwqYAtZla627CZz+pw+zL9n3hmqpXNGk+XykXVxKXX7g9sTeVV8UzozbKwve+C6gzCLMSkSozwRnc/1n19P19up+oZnpj7TnyX9m7PkpppMsqqGq135buWXw+gtsT5nt088VVnHD9bwzU1FHnFMaeuBKaG7Du336D7474nlkp1UUlpXkGy/0juxHkP8A1gT4syfhjiPKZaTiDkS7HQzgGdG/qBHiuTv5YIZANExTYx7ThlIucQFXUxNgB3Plh1yqKm8FQjzyD6ikwVQfIbG/vg/zfg6HKFlCSip5bNFHJflkx36tsSDuV2xCDKTb+BJQhO1oyfzKk40ZA/UTxMRWpqedSM6m1SjtBo3DnXpb8wfyxFTqgrJCkPIeZmdoybgDe9z5Wufvg3paWPLKNIHUu1AgeGQbbMPH03JuL/hgXo4HkmquZAUeWPxqyf6MeoaRb1IufQDzxEuUEtXQiysn+Hctgy2eN45W5xjEES6fqdyzM48gEAG/cnDJ8tApnEDLHzHEvIlkvLIm5Um3Ym7W2tcYditNJMjq55gdWF9vFa1revf74bxU8sq1ldEhKUsgildhcXZWZRfy/XEasxYuT3X7/WbftUe0WWVVHl3PiljvAqvHHf6piwvYdyRrHsoxHcU00VLn+YyjQJWqC8axpqAUtck+pLfbE3lNZT0sEEtU0o5r+M2BJN7bfiR9zjJ6J80zvI45lEazyrDOgA3B/mvba46+2MXIy5bPRB/f6frCH8shsvXTLUkoXSWYEkttZQybj8MP86zKbMJqKlm0RZfFQtBGkaD6dFjq82+mx6jSMOVrknllpVXUsRZYGX6YmW+wPmdye1ziOqcomeGolhsAUJCH+c3uRYd9hv5Y0Nb22oOwKEGcmyT5hq0tMqmlTx37ox0Ej/8AK/2w7GTQlSJpA8pDRSsFJ+k7EeewG+N6N5Y+ZNLpUSqIplk2IvffpfridysVWbZvTxrCJ6daf5Z1WTaMgncDtivJmcEmYADqCP7lmhpJM0pH5tNHVclEt412BJYdhuov03xJ8PUcdVnqU9YiwxUcySSurFlazi6A+p2Hlvg1zOAFKihy8PeprDHTwqttRBjMkt/K0dtzbfEXm1BBR1lTDBB8pTSuAwVW1am3Zwx6+3rgU8lsqfMZxrYgnRQ8yuhMkXJkaKpiqFTY6xq7edmUfbBHwtQSyVSyjUdYBNttiMP85pGNRFItOweOZQzWA1SGQ80jzDDQ33OHXDsAj5REckoVF1NYqRceW/44oTMG3PVRljcPVD0zaZiSrbEarsDvcj12/MYtPhLOY54P3VA38avqzCG86cXMjA+dtQ9CwxVyBKajSaEKJ7gJbfmKVIK2Pfp+uN+Fc2ENQMwjM38BmKBWAeO56LfysL9+uOa2cLkBH5Q7nXIqlizDJipt4pwFA2tyjt+QwRUVck0tr6iCQ3pY9Mc2T/FDMZGy96IrJPG8gZXshdSLHSN97bj/AGwdcD/EKkzNZ1RpUqElYtSuul1Gq2osdmufLHSTyUZqEMES9hKEiDqoexANz0F9yfbD6FNEms9+mA/Ls6kMsSlokD/yfUxHqen5YIIq0QOsLXZSp5XqB/L9v0xVY7hQhj6Gwxq4Gr+JutvET2GIOfiWkp6mii56K9Vuo1f0glv7Yk4qhKvY3RbizPsCfbrj3IGaIyz3g3K+JstqKLNYIqmnnj0vG4uCDjiD4zfsn55w7WVGacFpJmWStduTs0tMd77dWX1G+O95kgiURwyM7rYMBsBhJqeQrcSBbnzxoInitz49T5S+WVbU+ZI8btuQwKn0N/fGR5Oama8ahxDZjHe/MH9xb7239cfVPi/4QcLcf0zw8RZXT1EpuVnVAkqE9w43xyd8Tf2SuIcgllquCZBm1Gjao4GOmdFvf2a3pv6Yw2BYi6qcw5hlNJLUED+GJkVob7grpG1/MdO/TDROHY5IwIYrVCG1m8Sub7WB2Ht3xa78PuJ4qDO6KahmDcuOSpjaFRf/AMSLncXtYX388I1GRyZaoWRKaojY6Y5YLENcgXDX2t5G1r4h9cqtHR+Z6gRKtrMrkpauRKcEEtZYlNio8z/thzWUuZR5PEwictAsbDYjZrhgPOx0H74sA5QaymqRVRFKn6YGiQnx3UAn/wDId7/nh1X0c+WRwUqFaimhY81wPqQDQFB876yT22wtvKUATOMqzLqyqSQLUxyqQbXCkgj0O4wXUmZPBOgk2JuulhpO3n+OJaGgppal4a6jIVTpSVAHAktcM3c9vL8sO4OGqSnqIlkjZjLcLZdXLN76yb23PfyHrj2TyEA5QhYEIIc1pKem5aNKZIjyyAwuz+Snpbt+OCXI8zqqCserqlRZbmKIIA4UhRsBfT0Nrm57AYC0y2nEXOSapDvKUQJCfEbE6t9iDY9+xw9NXJRROsRKIl1c8hjrY3FtJtpboPIi2OefIJNmGOR2Ye1FfFUzSSy1s0RkVbvVFkE1xYHSvgXcEC5/PfDkZhDQUU/ycdOEmTcrHaRmAv8AVfv0ucV4lZUyyt84GSjmjaaN5CbmMGwG24G2xPcemEc5zmasjgmjjEsZ7wKbkWINztvsb9bYMZSD8zbjavyk1iiFbVCmJCpU7kFb73279fQdcRf/AExQKSJpYxIPqAHQ/fEhHWVdJHyqWROWqoC8imQqpuRa246AX6Y3k4RmzcJXmlWtNSuvXTwB1G5FjqNwdunrhwz3u6nlUt/KLg+MlzKaspXosrnp6WrXQjsmlQTcjSD9I7j0wxz/AIdrslq54pFjhnmQPUTmoVjJa2wAJIvbE09fKuQ1lUizq0VXJGp5jNrNwtgD5Db7YYcR0MkOXx5hWmCGNSlMXkBDmTRrew72Jt/xiHC5Z6J+PzP7MzgNwffLIaqMSTpK8YkvzlOmzAGxF9iL7W7DE/8Au1n4aighj5UE8pnnlTud1Uj7H88RmQZXW8YZxHT0Ec1YinxDoFQW3/zzxaL8OU8OUzVDV4OYQvyZKPQOUynbl7eSi9vM4LyXGPiCep4Y72JVVRk1UYQFheKClQa5GNkjW5Iv31E2wvlxlaphqRMY9E0cjvb6yCb2+9tsFlTwlmSsKKspqqEy2cRousvHYm4UdSoP23xlCj5jmVNk9PSGCGGVY6Zzp1Pbrq9ASSSemB9XkNwPSrUF6Vpsxr3ZKSOJKaN+UFjA5Sk/UT3JJN2a5N8N4IeRUygQ+JCHLhSXYEXAB9Cb2xalIuX5RPBSZNVJWR08jJWVkwULUSdCVW3QXsL998CCs8U8s1BFGVjLXMviuBtb79DjF8hMjsB0IRQfeRAy6PNz8tJTGeo8Ca1UhiHJ0+4vf2vgmThGlynN+ZQ5ikEdBCPAgLfPzvcOVPYLcbnbb1xIZZLLSQVGZmKNJ/BBTMtv4Tb3YeekE2HmcMmyzMFlhp85jlpI6iBXhDNpsBfRYdbXte474E5SxIQ66nqUdCQdSJJZatFmenhS8OtBcu4Ooj2vt+GHNHQZlUxsmb1NTOUkVRDpud7eYsPffD2mpUjmjoaiWOllmlI8bEsXsdz/AJ3xM0tbFV5MsDLIJxWu8gQ2ugQDqe5N/wAcYuT0koTyiRE2SfOV8FJTH5pIFMShTZnOrdRbuT0tvt5Y0i4ezGGctV060yK76YpgxNw9iTpO3Qge2JqmrpuFad8xptD1cqnlS2voU7Fh31C1gT74ypq6OBqNCJpZZ7LKUlLhh1AFrE9dybXvgUzMjfSL/e4QQGJVOXaMleSvpkhgjm061Zhp9/I39e+IekrLs9cJCBK+sEbB2Nz06Xtf8sEdFBJPR1YqCtRQSl4mjqSbuL37bXG3TuTfGlXleYvAI6GKKKAHwMyhNLbatK7A38I87C3QYU2YE1cZ6SsCRGNTLV5hT04SFQ8swkRkUtoAtp3/AA74d5BR1+VcQzVc1dJT0usgM9wpF9Qsb7sSBt5A4jKsV9NHetrH5crIWCLfcf026eWJLimWKuiybJcpgano8upRIZ2BlZ53sXkA6bXVQT0tthuF3Dd1FKt3csnKPjNJl1RK+Y86osjKgSDlqTqG4a9unbB8fi7S1KU7RySBgrTqrjQ1wuwBO2/vjm3NaaGhpkFRVaklcrZrlrC3QACxJufLriMNROmk2YqlyoUllO91P/3bdsWfxuUip467nRdb8RYv+rMiqqh1YJBWDlr9MGqO67eY39b4v/hKvqM7ghzKrVoo2UGCnYjUgP8AO/8A5ny/lHrfHz0jz98onXNZEWeKWdFZCN0jDguF7WsAMd5fC3PqXP8AIojlDwi6hwuoXIPTYYu8bIWJZupi7uWlTJBOrc5tKi1/M4kqWDL5hZ5CW6DUbYFnqRTqEaxlLWtfpfuThnnbTjLQYZhE8Uga/Zh/xi4sRsRoUNqEmZRx0stqWQsAPfEWuaxzLyqsWLXCv6+RxlJOjwQr4uxBO9x74bVtLA9PJGfAxOpST3wQY1YnqHRgN8R8gouJsqnoc0gilDKeTI6Asjdt8ct51wPTwcPxVrzkyxc1K/kR2AlQ+HbuCNj3uL747DzPk1dANbqNQKG53Djtjk74gcRQcL8S5nTzLHJk+eRGCsibqkw25g9QbH2xz/MQOoYdzVAB+qV/liyZTmMlVTzsxpFdwRcFlKGxvvtvsfNcQ5pKrMZictmgeBUfUXl0HSuonb1PX1OE1euy+orMonkCSLOqxv1uo36dwbdB54m6jMKXLq6VuZGVEoDpTIqsRe4QXHiv39fTHLChf3+/vN4Kx3oRLKcrrKbK5Mxr6eEUYqOSYFNpXlZSQBb+XbcjYb480SPK6LC6tqKusYAC2Nupsdr/AEnb9ceTZppWRrtHEZmlanZlsj2AUXPQ29euJKlqoP3FTyZdQTvU1U3N54RmZgD9NztYEfnibIxF/JqDQGhIekp+fkwjnDlZ5hqbUUKAE9N9jex8t7d9pOjkEU0kNXUTSUnMeN9D3WDUSLA+Qv5bb43aFMwzCdp0hEQIenpYruEa++oAEAb+vltbEzRZFFSU9HIuURZZDJOxmWdgjEE2Lbk9idtx5WwRyqAdw61owSqxPSzSZb+8JIXlh/iMb6Y4grAKvoQL7dz6Y0ocqkeOqaf+LRhQIJYZizFdNxc/rtc7dcEGYSwQ5j8zCqVqRFhDfYGxuAOlwLm19t8DiZj8r84s8bzQtLoVF8JUk7Hb1t1vhi5OY/OKJ1ub5ZzZamZlRkQwXgWWLSXIvqIFrg2DWHQ4VeVXctROTCbWsCtj3H1DC9DmMlHYTwgSRUspXnG5BYaVHck7nGsELZgrzxypGhYhVvptb0GNOQ9rMHKqUwgyrhT5fhxamsHOqZXqKhYzZi7uT1HYbqB7Yn6r4ZUkWS5VScXpHXGeKSplKklllLam027WFjjKd52gizCSBWSJ2cjfpawAGFOJM3rMnpqSmqJpHqIKIsjC/hVtyCepNz0x8kvk5+RYaN/ruVpxC2RG+TcB01TRR/I8SHJMoijEMc2gxMi672J6nWNX4XwU5dwpwrDSUdOnEgy+oSVpIYoaZJlle9/ET4vc27YA+G4q/Nqalr86Somy9WKllawMSjqR/nfFl5Fw5wjlnzmYzZ7U/Ps6yxCONTHbSbRnvptfYWxS2XI2Uoxuv6/5hKB9qEGsxo3OX8xnpp9VK0S1aFmKgEixHXp4rd9sCyVVHmdNrpIY0zSZuTUmyoV8NuYoHmLXtg7+VmyyCOnyiSDMDLO7J4NPhKE6WVvW2BrKaSpolzCqz6jo6aokj0RaogGFtwBbtjEyn0mLH8oLX7yHybgOCrZ8ry9poswgmEkk8Sq8UY7atRHucNOG/hnmXEccs9dmVPDBzGjREGp5LHrc2Ci+998FlFTf9M0T11VqqayvGtoCwW1rncX74hs0g4hqsyhkrlZKVo1cQUzeEI38h09PfDcXkvR47+5P9oHFANiP8+p6H4d5DFRZclKM2VedBXq4laPS1ix1bG5PYDFXCXN85RqrMJJ66sqTy2ma+y3ve/QE3Fh5A4OqqjoJ80mnqKQVJWMKgncWjIJJ8Ppt164Tr81y/MZpIjrZABGYXumr1su3UYrxZyo4qLJ2TAZS2hGua5NBmNLlWfVcQNTSQR0uZR06aQZUI0yepKgAkdTiQz+dOH6mtjyeApGJOc3hAL6twL9gAd/XDqmElDQ1B5I59VOgRQQLIfpBviL4odaeCngkliaSZwwLi6NHewF8e5M7izqeKMLgvm0lfmNFFSvqVVZqglUIF26W89u2MDylYaOkm5PzQ1zyFbWUCwt+H44XqqrNZZBTx/xam4ZUDBEsvTr0HTfEjR5bmk1HT1VTBBzJpN9EwZlUA9B33xY1KBcEKe/eDVfmlRTPFl1LJaOlbQgBsLE3LW7knfB2jR1mXUbiZVZBdUMtyttmv79sMMp4co5XqazNzXUlDLeOl5oUPI3crYDa/wDfHrvw9SF6GiyqZ6OCUtNMJW3YD+ZupOJ/IplKqN+8duqkfmNFlA0tVZnOjqNIGknmWPi0gdD64msg4aOZZfFJltbUCKna7iRLlkN7Am/QeeIWegqsudYYqeb5GqQvC918R6nf2ODfgGpi+QqcwrLKkcZLQ9OZqGnRbyAN7emFZc/DCrTFVe6kHxjldJz0yiimjjqZCJ2Ldb/0j0tcbYEsxzFIIqbJ6CNqSkTVGum7O5JuXv5E3xYVbkEmV1Gb1ahKvMKaU0lC0ig8xWXUSPKy/ngSy166CB5HCSrIOVBeK7JqJGkdyb32xR4+UuOJ9v6wSDyK9SCoIPmXaiLR7AaAEDBl7i3T1xY3wp4jzDgLM9UKyR5Xp5bMx0G99hp7Dytgcl4Sk4TGX1FSsd6tGmpY2Ymyf1E2FhfoMRuUUdRmKVFRmdTLCslQDPJqJ0xqb7eV72vipctN31ACFTTdzsHKfi9kme5mKNKuIKjAzIjX+nfY+eJ+Hi+j4kq/3UivPqWQq0Q2bbwEH1J+2OPctoaaizPmZZDG+XzORA7XB5gFrOfX88S/DPHlVwzmMVbS1TfvakqSHRt45IyALW9xis+YbqNQ8D9Qnd75O2U5fTRR+JkhVWBNze2K/wCMc4zClg0Q5fUVR1i5iW+nfe/lgX4Z+PK8Z5vlVPmdM+Q18yuqFZeZHI4W42IvvY7YMOLfinluT8N11PKIo80K2jjjN+ZY3LDvsATY46ePKmRCQdRZYg3K0zfP80onzETUk0sZPMWEGzggdTfobjFC/ECop+KP+4mppIqxAeZA+m5262NxjbPvjNmmZ8RZ1mWT1aihgVpY9dw0gvY29cCNf8QafiCGkety/XM5PNmkusiN5hh9Vu6nEvqAqR3U0nkLMc8L1Jg4lhqaipjmERKmEoBzG03PuQo7d8RmXLHmdfJDIzwATu0JnJtG5B0bHtfr5bYQoc5FRVVEaxRQpCBpqUh2Jbpde+HcDvR1QFa1PBA6iTU5Jsv/AIDf1G2OYUJJqYhBbczNIJZqtaWWop2lE7FwEsWPToB/l8Eaxx02XJlU+Yy12yycqCP/APjPY+PX0I6AgeeG1VlzU+YrDlEK1uZwohfXGV5rdRubqTY2I/HBPwZlMFQKkcTmThlw0kmkRgtI17AdbAb/AJYRk6HI/v8ArGrhHLuRJCUZpC6/K0VPUqsyQlVeRLX/ANTr59P6sFtHNS5hRmmXhv5ZGjdY6moqJZWLBbqLkgbkgdO+J9Mky+OnjrVv+6Av8NaikA5jWsNIUnYkX33v6YlKXIJsxp6isi5Jn06VVpdK6AQQLdb9N8crL5AApfaN9JhdQAreGhUU8Pzaq9BLrM0ity5EIG1xazi426e+AfM4K+Bpar5W+WxMCJyPFcbeIdh6jFomerOZ5hleZxvRoQqs0iWZjc7ggeEXO2G7w0FNVvQz1BzZ4hr5ESFF6bXc/V5Ww/DlyX9fUz0wRd1BaLh6mz7KzmdcZaqSKFdIQAc0qSNOnqdRt+Hrh5lfwyz3Nad3ossSSKGQwghiouvUAel7fbDpKmleCppqyOmVA6vy73UXNyDa3fBbPxbmEKwDKo3NO0QYBQoCnfbc4audgeI/6nsaIRvuDuQU1VPkeWZipesiWNedGn/1G+/QdMNs3zuRJKw5sYnqlVZVppfpUntfvfsMZjMcbEoNj/8AR/rNbSmNqfid86yaaCMPl8rqYoafT4bjrYeWGfDeXRnNYKWrV4wq8xWWU6mdbHceXXGYzBvSF1UV3/eK3QMIL1UuY0tZmJUxvVvbSdJTey/liTg+ezQV8TvDmlXEB8pDsoS17WY9T/tjMZiTEeQAPz/WGPqYA/vUZT8JZXDlYzriynkNaCqwrHMwMjG+zdh1wnxZmElFkyVGWTLQwpGIwIRcB7bX8xfGYzFD5GV8fya/WMYAKTBFcozSvgFdPJCalHASRTcTNYEhl9cOHkpkYGqy+qyyVZeZzob6S+5swtYX3xmMx1Vclqia4NqHeTimf5aj4lyDLqo16FmrpNTlR0Sw6AgdcDX7oqqvjGVaChSuy6BwiQyr/DAA6b9jjMZiA5W9Pl7xjfWgv7zfijhlaXLubDSRR17FkanhkusMZJNlJ/vjzgjhBjPFNl9WaymXS0qVAs6eXuL4zGYNcznAT8xNU0nuKuDnqqiPMc+zKmENECsMaMRoDdBftYfrgAzemgeHTkb1FdBTyjm06OCCtt2tYXH54zGY6XjjkpuVAAEmN6iup0y6GVowk1LZZFJIV1Gwtf07YmMgy6aeCqhgpngiiKVABa/MLHcX8wMZjMS+UePjMR7f5ElstowuzrKqWjqZq9YJ5GRkVVEhsZmILMQfww4g4eo63N+QJUpap5A2keLl2BJYKereuMxmJcBLGyZ0sONWAJ+3+YU51wcnEmVUlbn0vPkaFYKZYxoLqgIG3bpitM0ymnyujWnpagQaWK1aOba7HZScZjMMGRzkIvowcwBe4NVL0op4aCCSSIJL8wUiFgOguL+mFpqahyzPMxiy+inzCVYw8U5ewt2A9d8ZjMdGzo/EkChluMJ69oqmKeeqenrBIJUSEm6ODYW9cS2dZpnfEkbpV0Eysw0tPpAYkdx733xmMxev/wAxBCgxrnXC+T5XFllJPG8VdJTlJDGngJI/m62wCVPCkMM0EAkkid59KaASBrPU3xmMwjA7EA32P8zc+NQdSczz5HJ8zqMtSLRBAgHq3QE/554lcoypaeLLXzNZGpJQJIJta2sbkAAjqdtsZjMLZ2UKfv8A4iF25uK09WtJVVJy6okp6oOCzvclr9w98L0WcTVeZQTZvUc1I1IZFjuJOw1eZxmMxh+oG44MSKhZBnFVmdQrSCOFmAQoq6AI1H1Eeew3w/TPaSmq0aprJ68Q2VRGdIsOl/Q4zGYkPj4yLjj/ACXGPEnEnMeesmkkFRUOrTKF8N/5bdyMC1Jn9PT1RhqYFkmLMTpYgE2uLnzxmMw1MCVJ8h3ca0WWVudZhM9yqz7qWSyrvsD5YfVuWvFVzIKtp9LWJjnsAfLGYzE4as5HxPJjVhZn/9k=",
+ "text/plain": [
+ ""
+ ]
+ },
+ "execution_count": null,
+ "metadata": {
+ "image/jpeg": {
+ "width": 200
+ }
+ },
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "fn = Path('samples/puppy.jpg')\n",
+ "Image(filename=fn, width=200)"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "f355ecbe",
+ "id": "60bb0ee7",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
- "🧠🧠🧠🧠🧠🧠🧠🧠🧠🧠🧠🧠🧠🧠🧠🧠🧠🧠\n",
- "\n",
- "I'll find the derivative of f(x) = x³ + 2x² - 5x + 1 using the power rule.\n",
- "\n",
- "The power rule states that for any term ax^n, the derivative is nax^(n-1).\n",
- "\n",
- "Taking the derivative of each term:\n",
- "\n",
- "**Term 1:** x³\n",
- "- Derivative: 3x^(3-1) = 3x²\n",
- "\n",
- "**Term 2:** 2x² \n",
- "- Derivative: 2 × 2x^(2-1) = 4x\n",
- "\n",
- "**Term 3:** -5x\n",
- "- Derivative: -5 × 1x^(1-1) = -5\n",
- "\n",
- "**Term 4:** 1 (constant)\n",
- "- Derivative: 0\n",
- "\n",
- "**Final Answer:**\n",
- "f'(x) = 3x² + 4x - 5"
+ "{\n",
+ " \"role\": \"user\",\n",
+ " \"content\": [\n",
+ " {\n",
+ " \"type\": \"text\",\n",
+ " \"text\": \"hey what in this image?\"\n",
+ " },\n",
+ " {\n",
+ " \"type\": \"image_url\",\n",
+ " \"image_url\": \"data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/4gxUSU...\n"
]
}
],
"source": [
- "msg = mk_msg(\"Solve this complex math problem: What is the derivative of x^3 + 2x^2 - 5x + 1?\")\n",
- "r = completion(messages=[msg], model=model, stream=True, reasoning_effort=\"low\")\n",
- "r2 = SaveReturn(stream_with_complete(r))\n",
- "for o in r2: print(delta_text(o) or '', end='')\n"
+ "msg = mk_msg(['hey what in this image?',fn.read_bytes()])\n",
+ "print(json.dumps(msg,indent=1)[:200]+\"...\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "ba0bee2d",
+ "metadata": {},
+ "source": [
+ "Which can be passed to litellm's `completion` function like this:"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "30ff5f1a",
+ "id": "6fc53ae1",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "model = ms[1]"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "b9ebb192",
"metadata": {},
"outputs": [
{
"data": {
"text/markdown": [
- "I'll find the derivative of f(x) = x³ + 2x² - 5x + 1 using the power rule.\n",
- "\n",
- "The power rule states that for any term ax^n, the derivative is nax^(n-1).\n",
- "\n",
- "Taking the derivative of each term:\n",
- "\n",
- "**Term 1:** x³\n",
- "- Derivative: 3x^(3-1) = 3x²\n",
- "\n",
- "**Term 2:** 2x² \n",
- "- Derivative: 2 × 2x^(2-1) = 4x\n",
- "\n",
- "**Term 3:** -5x\n",
- "- Derivative: -5 × 1x^(1-1) = -5\n",
- "\n",
- "**Term 4:** 1 (constant)\n",
- "- Derivative: 0\n",
- "\n",
- "**Final Answer:**\n",
- "f'(x) = 3x² + 4x - 5\n",
+ "This image shows an adorable puppy with distinctive brown and white markings on its face. The puppy appears to be a small breed, possibly a Cavalier King Charles Spaniel or similar breed, with fluffy reddish-brown and white fur. The puppy is positioned near some purple flowers (which look like small daisies or asters) and is sitting on grass. The setting appears to be outdoors in a garden area, creating a sweet, natural portrait of this very cute young dog. The puppy has dark eyes and the classic innocent, gentle expression that makes puppies so endearing.\n",
"\n",
"\n",
"\n",
- "- id: `chatcmpl-xxx`\n",
+ "id: `chatcmpl-xxx`\n",
"- model: `claude-sonnet-4-20250514`\n",
"- finish_reason: `stop`\n",
- "- usage: `Usage(completion_tokens=433, prompt_tokens=66, total_tokens=499, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=None, audio_tokens=None, reasoning_tokens=205, rejected_prediction_tokens=None, text_tokens=None), prompt_tokens_details=None)`\n",
+ "- usage: `Usage(completion_tokens=131, prompt_tokens=104, total_tokens=235, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
"\n",
" "
],
"text/plain": [
- "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content=\"I'll find the derivative of f(x) = x³ + 2x² - 5x + 1 using the power rule.\\n\\nThe power rule states that for any term ax^n, the derivative is nax^(n-1).\\n\\nTaking the derivative of each term:\\n\\n**Term 1:** x³\\n- Derivative: 3x^(3-1) = 3x²\\n\\n**Term 2:** 2x² \\n- Derivative: 2 × 2x^(2-1) = 4x\\n\\n**Term 3:** -5x\\n- Derivative: -5 × 1x^(1-1) = -5\\n\\n**Term 4:** 1 (constant)\\n- Derivative: 0\\n\\n**Final Answer:**\\nf'(x) = 3x² + 4x - 5\", role='assistant', tool_calls=None, function_call=None, provider_specific_fields=None, thinking_blocks=[{'type': 'thinking', 'thinking': \"I need to find the derivative of the function f(x) = x^3 + 2x^2 - 5x + 1.\\n\\nI'll use the power rule for derivatives, which states that if f(x) = x^n, then f'(x) = nx^(n-1).\\n\\nLet me take the derivative of each term:\\n\\n1) For x^3: The derivative is 3x^(3-1) = 3x^2\\n2) For 2x^2: The derivative is 2 · 2x^(2-1) = 4x\\n3) For -5x: The derivative is -5 · 1x^(1-1) = -5x^0 = -5\\n4) For the constant 1: The derivative of any constant is 0\\n\\nSo the derivative is: 3x^2 + 4x - 5 + 0 = 3x^2 + 4x - 5\", 'signature': 'Eo4FCkYIBxgCKkDYpmV8bmbO2hMcNNjoiEju7Bq203xL5XgVRaQou4xDja7JHmRpeAKH01+uDSdMIjY6saSHwVhm9B0THnyqoBFtEgw6ZIwvUa51SCFUym8aDMi+gs0t/d1JU//qmCIwF/fycNXbo1EiddKqAmFQvsAiNcBXQ9mg8WDoXLgnSnl/upY4QNJQxklIwNqWhPloKvUDr2W6ppA3QGQlDnnUCdyDiJD04GCmNHDtdbwLBPKqVyecg5nE9APb7K/VJi2TensSBWJKN7JiZyREOTqYqIe7Ev+dPFJKn6dJb0E7Zbbq2t4FSsk/TflMFk151YRZ5oggl6Oa3wo7Lfhboe0JQbpTXfAK5ZHS0ckFrildgn8TA7Eg2Z+iuMNVn/fZaNgLIRNNTCEb0O0Vh/O/rhz52OTZm7Ro9bjeF3kxF9wYopQTFrZhljrlGgvQPWPuL0LCw6D0zKvU5ZXb9pfGT47cryJ1mwG9HUoi5q3/XE105tXq93VrjRJoWSyyg/hcZ41Vti8Gppv0yP7d0ylgc3lQe/qJPI78Y6Am62g3kgB9HKM7hGpi3Xd2ybL9NKXvMEBBUnV4IDMmjTFwwfTSl6UXrOkqoOTrQrlKxO5S/IkEkY/S6sEOEuEe6aBbiR80fM2LHPzD9SjJ/cvUeS7e5rY1cjtz6OPmpzCeJOATtgmGEUg82NW6Xv+GHONbTfmk7PGBdgTwQnByyFdbdjZbCygw6uix6kXBBKWEa/56qpWDoMms9c06iaJUhxC7HwrXKYs5+PtUMCAMmmdtnNTop5UJb688yZPb52w2M2bcGBeiKuUdYXqCf+d2A0qYreLGJYbf8PfI6LasV68DMqMOms8ukXT59PXWlyQ3GAE='}], reasoning_content=\"I need to find the derivative of the function f(x) = x^3 + 2x^2 - 5x + 1.\\n\\nI'll use the power rule for derivatives, which states that if f(x) = x^n, then f'(x) = nx^(n-1).\\n\\nLet me take the derivative of each term:\\n\\n1) For x^3: The derivative is 3x^(3-1) = 3x^2\\n2) For 2x^2: The derivative is 2 · 2x^(2-1) = 4x\\n3) For -5x: The derivative is -5 · 1x^(1-1) = -5x^0 = -5\\n4) For the constant 1: The derivative of any constant is 0\\n\\nSo the derivative is: 3x^2 + 4x - 5 + 0 = 3x^2 + 4x - 5\"))], usage=Usage(completion_tokens=433, prompt_tokens=66, total_tokens=499, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=None, audio_tokens=None, reasoning_tokens=205, rejected_prediction_tokens=None, text_tokens=None), prompt_tokens_details=None))"
+ "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content='This image shows an adorable puppy with distinctive brown and white markings on its face. The puppy appears to be a small breed, possibly a Cavalier King Charles Spaniel or similar breed, with fluffy reddish-brown and white fur. The puppy is positioned near some purple flowers (which look like small daisies or asters) and is sitting on grass. The setting appears to be outdoors in a garden area, creating a sweet, natural portrait of this very cute young dog. The puppy has dark eyes and the classic innocent, gentle expression that makes puppies so endearing.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}))], usage=Usage(completion_tokens=131, prompt_tokens=104, total_tokens=235, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0))"
]
},
"execution_count": null,
@@ -1267,104 +1497,99 @@
}
],
"source": [
- "r2.value"
+ "completion(model,[msg])"
]
},
{
"cell_type": "markdown",
- "id": "d92cdb20",
+ "id": "90426580",
"metadata": {},
"source": [
- "## Search"
+ "Now lets make it easy to provide entire conversations:"
]
},
{
- "cell_type": "markdown",
- "id": "92a7128c",
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "75fa16cc",
"metadata": {},
+ "outputs": [],
"source": [
- "LiteLLM provides search, not via tools, but via the special `web_search_options` param."
+ "#| export\n",
+ "def mk_msgs(msgs, # List of messages (each: str, bytes, list, or dict w 'role' and 'content' fields)\n",
+ " cache=False, # Enable Anthropic caching\n",
+ " ttl=None, # Cache TTL: '5m' (default) or '1h'\n",
+ " cache_last_ckpt_only=True # Only cache the last message\n",
+ " ):\n",
+ " \"Create a list of LiteLLM compatible messages.\"\n",
+ " if not msgs: return []\n",
+ " if not isinstance(msgs, list): msgs = [msgs]\n",
+ " res,role = [],'user'\n",
+ " for m in msgs:\n",
+ " res.append(msg:=mk_msg(m, role=role,cache=cache))\n",
+ " role = 'assistant' if msg['role'] in ('user','function', 'tool') else 'user'\n",
+ " if cache_last_ckpt_only: res = [_remove_cache_ckpts(m) for m in res]\n",
+ " if res and cache: res[-1] = _add_cache_control(res[-1], cache=cache, ttl=ttl)\n",
+ " return res"
]
},
{
"cell_type": "markdown",
- "id": "e76423a3",
- "metadata": {},
- "source": [
- "**Note:** Not all models support web search. LiteLLM's `supports_web_search` field should indicate this, but it's unreliable for some models like `claude-sonnet-4-20250514`. Checking both `supports_web_search` and `search_context_cost_per_query` provides more accurate detection."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "6530af43",
+ "id": "6b4624e2",
"metadata": {},
- "outputs": [],
"source": [
- "#| export\n",
- "def _has_search(m):\n",
- " i = get_model_info(m)\n",
- " return bool(i['search_context_cost_per_query'] or i['supports_web_search'])"
+ "With `mk_msgs` you can easily provide a whole conversation:"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "43417017",
+ "id": "263ecc52",
"metadata": {},
"outputs": [
{
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "gemini/gemini-2.5-flash True\n",
- "claude-sonnet-4-20250514 True\n",
- "openai/gpt-4.1 False\n"
- ]
+ "data": {
+ "text/plain": [
+ "[{'role': 'user', 'content': 'Hey!'},\n",
+ " {'role': 'assistant', 'content': 'Hi there!'},\n",
+ " {'role': 'user', 'content': 'How are you?'},\n",
+ " {'role': 'assistant', 'content': \"I'm doing fine and you?\"}]"
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
}
],
"source": [
- "for m in ms: print(m, _has_search(m))"
+ "msgs = mk_msgs(['Hey!',\"Hi there!\",\"How are you?\",\"I'm doing fine and you?\"])\n",
+ "msgs"
]
},
{
"cell_type": "markdown",
- "id": "ff8611e2",
+ "id": "910677d4",
"metadata": {},
"source": [
- "When search is supported it can be used like this:"
+ "Who's speaking at when is automatically inferred.\n",
+ "Even when there are multiple tools being called in parallel (which LiteLLM supports!)."
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "89e0bead",
+ "id": "9cf8d1f2",
"metadata": {},
"outputs": [
{
"data": {
- "text/markdown": [
- "Otters are fascinating semiaquatic mammals that belong to the weasel family. The charismatic otter, a member of the weasel family, is found on every continent except Australia and Antarctica. There are 13 species in total, ranging from the small-clawed otter to the giant otter.\n",
- "\n",
- "Most are small, with short ears and noses, elongated bodies, long tails, and soft, dense fur. Otters have the densest fur of any animal—as many as a million hairs per square inch in places. Webbed feet and powerful tails, which act like rudders, make otters strong swimmers.\n",
- "\n",
- "All otters are expert hunters that eat fish, crustaceans, and other critters. Sea otters have a unique feeding behavior: A sea otter will float on its back, place a rock on its chest, then smash the mollusk down on it until it breaks open. River otters, however, can hold their breath for up to 8 minutes.\n",
- "\n",
- "River otters are especially playful, gamboling on land and splashing into rivers and streams. Otters live up to 16 years; they are by nature playful, and frolic in the water with their pups. When resting, sea otters entangle themselves in kelp so they don't float away.\n",
- "\n",
- "Many otter species face conservation challenges. Otters and their mustelid relatives were once hunted extensively for their fur, many to the point of near extinction. Despite regulations designed to protect them, many species remain at risk from pollution and habitat loss.\n",
- "\n",
- "\n",
- "\n",
- "- id: `chatcmpl-xxx`\n",
- "- model: `claude-sonnet-4-20250514`\n",
- "- finish_reason: `stop`\n",
- "- usage: `Usage(completion_tokens=603, prompt_tokens=13314, total_tokens=13917, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), server_tool_use=ServerToolUse(web_search_requests=1), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
- "\n",
- " "
- ],
"text/plain": [
- "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content=\"Otters are fascinating semiaquatic mammals that belong to the weasel family. The charismatic otter, a member of the weasel family, is found on every continent except Australia and Antarctica. There are 13 species in total, ranging from the small-clawed otter to the giant otter.\\n\\nMost are small, with short ears and noses, elongated bodies, long tails, and soft, dense fur. Otters have the densest fur of any animal—as many as a million hairs per square inch in places. Webbed feet and powerful tails, which act like rudders, make otters strong swimmers.\\n\\nAll otters are expert hunters that eat fish, crustaceans, and other critters. Sea otters have a unique feeding behavior: A sea otter will float on its back, place a rock on its chest, then smash the mollusk down on it until it breaks open. River otters, however, can hold their breath for up to 8 minutes.\\n\\nRiver otters are especially playful, gamboling on land and splashing into rivers and streams. Otters live up to 16 years; they are by nature playful, and frolic in the water with their pups. When resting, sea otters entangle themselves in kelp so they don't float away.\\n\\nMany otter species face conservation challenges. Otters and their mustelid relatives were once hunted extensively for their fur, many to the point of near extinction. Despite regulations designed to protect them, many species remain at risk from pollution and habitat loss.\", role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'citations': [[{'type': 'web_search_result_location', 'cited_text': 'The charismatic otter, a member of the weasel family, is found on every continent except Australia and Antarctica. ', 'url': 'https://www.nationalgeographic.com/animals/mammals/facts/otters-1', 'title': 'Otters, facts and information | National Geographic', 'encrypted_index': 'Eo8BCioIBxgCIiQ4ODk4YTFkYy0yMTNkLTRhNmYtOTljYi03ZTBlNTUzZDc0NWISDLIIDjwxPgKn3T1dcBoMOiqvShGJLxd8+SdzIjCz3SlTIqO2HA5gvks2pAQGmz3XWB+xFAaljcWlyygSy/kTY7sqeqMn1qU4tGpBmf4qE4abQbfNHbLywouCbZ9quTv0iwgYBA=='}], [{'type': 'web_search_result_location', 'cited_text': 'There are 13 species in total, ranging from the small-clawed otter to the giant otter.', 'url': 'https://www.nationalgeographic.com/animals/mammals/facts/otters-1', 'title': 'Otters, facts and information | National Geographic', 'encrypted_index': 'Eo8BCioIBxgCIiQ4ODk4YTFkYy0yMTNkLTRhNmYtOTljYi03ZTBlNTUzZDc0NWISDCXseI4G8zexyxzXlRoMpFgWRLWJhF6OCzWzIjC4h57A4xvrpWQFrK5X4R5FCgSieqLTHAhYMDpZKSfmktNjvZEhPRsh65kdvMm1/TMqE8nJZb8r81E83mBk45fzBfkGEF4YBA=='}, {'type': 'web_search_result_location', 'cited_text': 'There are 14 known species of otters, ranging in size and habitat preferences, with some species adapted to cold waters requiring a high metabolic rat...', 'url': 'https://en.wikipedia.org/wiki/Otter', 'title': 'Otter - Wikipedia', 'encrypted_index': 'Eo8BCioIBxgCIiQ4ODk4YTFkYy0yMTNkLTRhNmYtOTljYi03ZTBlNTUzZDc0NWISDMBGz/TkkP7vJ/5bBBoM6OKET14S7EBuRDq5IjCdnxv7okJjoIxqiaU0kmfBEJ3V8tboSz+XFgFEGPxIyKThi6pIEgjWFQWJpwIvWMEqE24LwoVusqslEH/NExIYNSa+GdQYBA=='}], [{'type': 'web_search_result_location', 'cited_text': 'Most are small, with short ears and noses, elongated bodies, long tails, and soft, dense fur. ', 'url': 'https://www.nationalgeographic.com/animals/mammals/facts/otters-1', 'title': 'Otters, facts and information | National Geographic', 'encrypted_index': 'Eo8BCioIBxgCIiQ4ODk4YTFkYy0yMTNkLTRhNmYtOTljYi03ZTBlNTUzZDc0NWISDNH8Zg5oGTTIuNgWbhoM203UStaxnwT6ihm3IjCJO09/UFQsgEsQDC6HMT95Uvq5eX9qnL7+rTsmYXFAUUCDUdofHzjcOxwNHLCwXjsqEyGDArRktHJMZWjqQ75J2rJzQIUYBA=='}], [{'type': 'web_search_result_location', 'cited_text': 'Otters have the densest fur of any animal—as many as a million hairs per square inch in places. ', 'url': 'https://www.nationalgeographic.com/animals/mammals/facts/otters-1', 'title': 'Otters, facts and information | National Geographic', 'encrypted_index': 'Eo8BCioIBxgCIiQ4ODk4YTFkYy0yMTNkLTRhNmYtOTljYi03ZTBlNTUzZDc0NWISDOM+ZAdwxG6vExmODhoMpZpQzqs8haGSfQzzIjBuCmrn97nFh1fkEDH+x5wSdfuyWZdCH4C0IOMr4IRxpMCjxixd9bdsqvkWH5BlI90qE84aNPZNvlF9U3B2yi0OFSTIjUsYBA=='}], [{'type': 'web_search_result_location', 'cited_text': 'Webbed feet and powerful tails, which act like rudders, make otters strong swimmers. ', 'url': 'https://www.nationalgeographic.com/animals/mammals/facts/otters-1', 'title': 'Otters, facts and information | National Geographic', 'encrypted_index': 'EpABCioIBxgCIiQ4ODk4YTFkYy0yMTNkLTRhNmYtOTljYi03ZTBlNTUzZDc0NWISDB4SDVlqNhefbSSuzxoMeG4s1rBdtpIgK0DSIjBX3WipgCcchJ4jFkLFj4/COw6rDv4fyRw4Xe4GmR3Dpo4TCUrgwHoJmO6l8MK4kTMqFOEpHWwGbHkFVUd+Aq1NOx2lpNPRGAQ='}], [{'type': 'web_search_result_location', 'cited_text': 'All otters are expert hunters that eat fish, crustaceans, and other critters. ', 'url': 'https://www.nationalgeographic.com/animals/mammals/facts/otters-1', 'title': 'Otters, facts and information | National Geographic', 'encrypted_index': 'EpABCioIBxgCIiQ4ODk4YTFkYy0yMTNkLTRhNmYtOTljYi03ZTBlNTUzZDc0NWISDBEyDl3t5gAz0iW2JxoMLzj6yl7IqCr/rkQFIjDc7DkfXM7M8ELyQrl7PgcEJwHGQ2IbW6kDSpbVdQR9q6pqQiFAonIGapSO6bOs8lcqFNxFIb6J9DOBWrepkBD9oLphplfWGAQ='}], [{'type': 'web_search_result_location', 'cited_text': 'A sea otter will float on its back, place a rock on its chest, then smash the mollusk down on it until it breaks open. ', 'url': 'https://www.nationalgeographic.com/animals/mammals/facts/otters-1', 'title': 'Otters, facts and information | National Geographic', 'encrypted_index': 'EpABCioIBxgCIiQ4ODk4YTFkYy0yMTNkLTRhNmYtOTljYi03ZTBlNTUzZDc0NWISDA0ilE2w5h7YdRjMzhoMqwIS1XuE51cgr+7wIjC8Jm4aMVIY12LLXkgVX2aDuWfaz4V2YH9VxtHDFglIlgA6zNp5gE+1+M4bj2YkdLoqFKCvef+BMeVSAFwlkJtEYJYVwGvYGAQ='}], [{'type': 'web_search_result_location', 'cited_text': 'River otters, however, can hold their breath for up to 8 minutes. ', 'url': 'https://www.doi.gov/blog/12-facts-about-otters-sea-otter-awareness-week', 'title': '12 Facts About Otters for Sea Otter Awareness Week | U.S. Department of the Interior', 'encrypted_index': 'EpABCioIBxgCIiQ4ODk4YTFkYy0yMTNkLTRhNmYtOTljYi03ZTBlNTUzZDc0NWISDBRRB8XLgZXQcvTOzxoM7ICPdtJHrGPgTjBgIjAwJzEqHlU34vJgq2xCl91sP8PUJojcyUmr3RjFmjSOCl6DSGhxaIIFMZuf2LIydG4qFOSehmf1sOfabXVUU/3zEtuYUF15GAQ='}], [{'type': 'web_search_result_location', 'cited_text': 'River otters are especially playful, gamboling on land and splashing into rivers and streams. ', 'url': 'https://www.nationalgeographic.com/animals/mammals/facts/otters-1', 'title': 'Otters, facts and information | National Geographic', 'encrypted_index': 'EpABCioIBxgCIiQ4ODk4YTFkYy0yMTNkLTRhNmYtOTljYi03ZTBlNTUzZDc0NWISDB57wjzqdgmYw53MIxoMbnctVN4aNFUYI2QyIjCeO6ekN1PB/st0ohcHxwVnl7Hm2uKHdJsB6qhKHD9zcFdBHWi4bXmkITh1prJiP+EqFEDV9W/kth6i6gPvo5fFJ91A8ZgCGAQ='}], [{'type': 'web_search_result_location', 'cited_text': 'Otters live up to 16 years; they are by nature playful, and frolic in the water with their pups. ', 'url': 'https://en.wikipedia.org/wiki/Otter', 'title': 'Otter - Wikipedia', 'encrypted_index': 'EpABCioIBxgCIiQ4ODk4YTFkYy0yMTNkLTRhNmYtOTljYi03ZTBlNTUzZDc0NWISDKANsJtkm8sPve7PxRoMBV2hF6Bv9+BRCaLdIjCbmBPNZLWyg/fl9fIqE2yzUgazsFZOzLxn1SsM+vzPaUIFNuDMDQQ2kyfRUJt3thcqFI5qd+bcbUuz7G8sVhT8+5Npf2oJGAQ='}], [{'type': 'web_search_result_location', 'cited_text': 'When it’s time to nap, sea otters entangle themselves in kelp so they don’t float away. ', 'url': 'https://www.nationalgeographic.com/animals/mammals/facts/otters-1', 'title': 'Otters, facts and information | National Geographic', 'encrypted_index': 'EpABCioIBxgCIiQ4ODk4YTFkYy0yMTNkLTRhNmYtOTljYi03ZTBlNTUzZDc0NWISDLGvrNLsLI08xlvgTBoM9IV7m9dO6UW4WiPQIjA+J2H71NbOFXTC9c/BSXoc/k5LWai1PWOl/RTN6DvcX7+aoNa5X8vUj0aA4b9m+VkqFPEjP3FFBjUYjR3a8FST57xIkOM5GAQ='}], [{'type': 'web_search_result_location', 'cited_text': 'Otters and their mustelid relatives were once hunted extensively for their fur, many to the point of near extinction. Despite regulations designed to ...', 'url': 'https://www.nationalgeographic.com/animals/mammals/facts/otters-1', 'title': 'Otters, facts and information | National Geographic', 'encrypted_index': 'EpMBCioIBxgCIiQ4ODk4YTFkYy0yMTNkLTRhNmYtOTljYi03ZTBlNTUzZDc0NWISDGpQatyaoWKjXpMGZxoM4lzcl/OByeZqtkXVIjB1sXm55ThwnOma8DGFbuaHxzNH5TuEDVReHHdd+RSoQ4qVRlu/xPhPpK5EX5Z7iuQqF1a45PWr6Is2p49kQjUlwAmSPJJTSYlmGAQ='}]], 'thinking_blocks': None}))], usage=Usage(completion_tokens=603, prompt_tokens=13314, total_tokens=13917, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), server_tool_use=ServerToolUse(web_search_requests=1), cache_creation_input_tokens=0, cache_read_input_tokens=0))"
+ "[{'role': 'user', 'content': 'Tell me the weather in Paris and Rome'},\n",
+ " {'role': 'assistant', 'content': 'Assistant calls weather tool two times'},\n",
+ " {'role': 'tool', 'content': 'Weather in Paris is ...'},\n",
+ " {'role': 'tool', 'content': 'Weather in Rome is ...'},\n",
+ " {'role': 'assistant', 'content': 'Assistant returns weather'},\n",
+ " {'role': 'user', 'content': 'Thanks!'}]"
]
},
"execution_count": null,
@@ -1373,43 +1598,92 @@
}
],
"source": [
- "smsg = mk_msg(\"Search the web and tell me very briefly about otters\")\n",
- "r = completion(model, [smsg], web_search_options={\"search_context_size\": \"low\"}) # or 'medium' / 'high'\n",
- "r"
+ "msgs = mk_msgs(['Tell me the weather in Paris and Rome',\n",
+ " 'Assistant calls weather tool two times',\n",
+ " {'role':'tool','content':'Weather in Paris is ...'},\n",
+ " {'role':'tool','content':'Weather in Rome is ...'},\n",
+ " 'Assistant returns weather',\n",
+ " 'Thanks!'])\n",
+ "msgs"
]
},
{
- "cell_type": "markdown",
- "id": "e1af925d",
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "d6f3bf25",
"metadata": {},
+ "outputs": [],
"source": [
- "## Citations"
+ "#| hide\n",
+ "test_eq([m['role'] for m in msgs],['user','assistant','tool','tool','assistant','user'])"
]
},
{
"cell_type": "markdown",
- "id": "abaeeee1",
+ "id": "475862e9",
"metadata": {},
"source": [
- "Next, lets handle Anthropic's search citations.\n",
- "\n",
- "When not using streaming, all citations are placed in a separate key in the response:"
+ "For ease of use, if `msgs` is not already in a `list`, it will automatically be wrapped inside one. This way you can pass a single prompt into `mk_msgs` and get back a LiteLLM compatible msg history."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "aa4e7663",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "[{'role': 'user', 'content': 'Hey'}]"
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "msgs = mk_msgs(\"Hey\")\n",
+ "msgs"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "bea8c2b0",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "[{'role': 'tool', 'content': 'fake tool result'}]"
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "#| hide\n",
+ "msgs = mk_msgs({'role':'tool','content':'fake tool result'})\n",
+ "msgs"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "cf84f8c8",
+ "id": "2c828056",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
- "[{'type': 'web_search_result_location',\n",
- " 'cited_text': 'The charismatic otter, a member of the weasel family, is found on every continent except Australia and Antarctica. ',\n",
- " 'url': 'https://www.nationalgeographic.com/animals/mammals/facts/otters-1',\n",
- " 'title': 'Otters, facts and information | National Geographic',\n",
- " 'encrypted_index': 'Eo8BCioIBxgCIiQ4ODk4YTFkYy0yMTNkLTRhNmYtOTljYi03ZTBlNTUzZDc0NWISDLIIDjwxPgKn3T1dcBoMOiqvShGJLxd8+SdzIjCz3SlTIqO2HA5gvks2pAQGmz3XWB+xFAaljcWlyygSy/kTY7sqeqMn1qU4tGpBmf4qE4abQbfNHbLywouCbZ9quTv0iwgYBA=='}]"
+ "[{'role': 'user', 'content': 'Hey!'},\n",
+ " {'role': 'assistant', 'content': 'Hi there!'},\n",
+ " {'role': 'user', 'content': 'How are you?'},\n",
+ " {'role': 'assistant', 'content': \"I'm fine, you?\"}]"
]
},
"execution_count": null,
@@ -1418,308 +1692,299 @@
}
],
"source": [
- "r.choices[0].message.provider_specific_fields['citations'][0]"
+ "msgs = mk_msgs(['Hey!',\"Hi there!\",\"How are you?\",\"I'm fine, you?\"])\n",
+ "msgs"
]
},
{
"cell_type": "markdown",
- "id": "4f2a8559",
- "metadata": {},
- "source": [
- "However, when streaming the results are not captured this way.\n",
- "Instead, we provide this helper function that adds the citation to the `content` field in markdown format:"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "fc341e7e",
+ "id": "284fbfc3",
"metadata": {},
- "outputs": [],
"source": [
- "#| export\n",
- "def cite_footnote(msg):\n",
- " if not (delta:=nested_idx(msg, 'choices', 0, 'delta')): return\n",
- " if citation:= nested_idx(delta, 'provider_specific_fields', 'citation'):\n",
- " title = citation['title'].replace('\"', '\\\\\"')\n",
- " delta.content = f'[*]({citation[\"url\"]} \"{title}\") '\n",
- " \n",
- "def cite_footnotes(stream_list):\n",
- " \"Add markdown footnote citations to stream deltas\"\n",
- " for msg in stream_list: cite_footnote(msg)"
+ "However, beware that if you use `mk_msgs` for a single message, consisting of multiple parts.\n",
+ "Then you should be explicit, and make sure to wrap those multiple messages in two lists:\n",
+ "\n",
+ "1. One list to show that they belong together in one message (the inner list).\n",
+ "2. Another, because mk_msgs expects a list of multiple messages (the outer list).\n",
+ "\n",
+ "This is common when working with images for example:"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "c2150365",
+ "id": "d315d964",
"metadata": {},
"outputs": [
{
- "data": {
- "text/markdown": [
- "Otters are [*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") members of the weasel family, found on every continent except Australia and Antarctica. [*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") [*](https://en.wikipedia.org/wiki/Otter \"Otter - Wikipedia\") There are 13-14 species in total, ranging from [*](https://greatergood.org/blog/9-facts-about-the-small-clawed-otter \"9 Facts About Small-Clawed Otters And how you can save them\") the small-clawed otter (the smallest species) to [*](https://en.wikipedia.org/wiki/Otter \"Otter - Wikipedia\") the giant otter and sea otter (the largest).\n",
- "\n",
- "[*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") Most are small, with short ears and noses, elongated bodies, long tails, and soft, dense fur. [*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") Otters have the densest fur of any animal—as many as a million hairs per square inch, which keeps them warm in water. [*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") [*](https://www.pbs.org/wnet/nature/blog/otter-fact-sheet/ \"Otter Fact Sheet | Blog | Nature | PBS\") They have webbed feet and powerful tails that act like rudders, making them strong swimmers.\n",
- "\n",
- "[*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") [*](https://www.pbs.org/wnet/nature/blog/otter-fact-sheet/ \"Otter Fact Sheet | Blog | Nature | PBS\") All otters are expert hunters that eat fish, crustaceans, and other critters. Sea otters are famous for [*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") floating on their backs and using rocks to smash open shellfish. [*](https://www.doi.gov/blog/12-facts-about-otters-sea-otter-awareness-week \"12 Facts About Otters for Sea Otter Awareness Week | U.S. Department of the Interior\") Sea otters can stay submerged for more than 5 minutes, while river otters can hold their breath for up to 8 minutes.\n",
- "\n",
- "[*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") [*](https://en.wikipedia.org/wiki/Otter \"Otter - Wikipedia\") They are playful animals, engaging in activities like sliding and splashing. [*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") Sea otters even entangle themselves in kelp while sleeping and sometimes hold hands with other otters to stay together.\n",
- "\n",
- "Many otter species face conservation challenges due to [*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") historical hunting for their fur and current threats from pollution and habitat loss.\n",
- "\n",
- "\n",
- "\n",
- "- id: `chatcmpl-xxx`\n",
- "- model: `claude-sonnet-4-20250514`\n",
- "- finish_reason: `stop`\n",
- "- usage: `Usage(completion_tokens=576, prompt_tokens=13314, total_tokens=13890, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=None, audio_tokens=None, reasoning_tokens=0, rejected_prediction_tokens=None, text_tokens=None), prompt_tokens_details=None)`\n",
- "\n",
- " "
- ],
- "text/plain": [
- "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content='Otters are [*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") members of the weasel family, found on every continent except Australia and Antarctica. [*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") [*](https://en.wikipedia.org/wiki/Otter \"Otter - Wikipedia\") There are 13-14 species in total, ranging from [*](https://greatergood.org/blog/9-facts-about-the-small-clawed-otter \"9 Facts About Small-Clawed Otters And how you can save them\") the small-clawed otter (the smallest species) to [*](https://en.wikipedia.org/wiki/Otter \"Otter - Wikipedia\") the giant otter and sea otter (the largest).\\n\\n[*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") Most are small, with short ears and noses, elongated bodies, long tails, and soft, dense fur. [*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") Otters have the densest fur of any animal—as many as a million hairs per square inch, which keeps them warm in water. [*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") [*](https://www.pbs.org/wnet/nature/blog/otter-fact-sheet/ \"Otter Fact Sheet | Blog | Nature | PBS\") They have webbed feet and powerful tails that act like rudders, making them strong swimmers.\\n\\n[*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") [*](https://www.pbs.org/wnet/nature/blog/otter-fact-sheet/ \"Otter Fact Sheet | Blog | Nature | PBS\") All otters are expert hunters that eat fish, crustaceans, and other critters. Sea otters are famous for [*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") floating on their backs and using rocks to smash open shellfish. [*](https://www.doi.gov/blog/12-facts-about-otters-sea-otter-awareness-week \"12 Facts About Otters for Sea Otter Awareness Week | U.S. Department of the Interior\") Sea otters can stay submerged for more than 5 minutes, while river otters can hold their breath for up to 8 minutes.\\n\\n[*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") [*](https://en.wikipedia.org/wiki/Otter \"Otter - Wikipedia\") They are playful animals, engaging in activities like sliding and splashing. [*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") Sea otters even entangle themselves in kelp while sleeping and sometimes hold hands with other otters to stay together.\\n\\nMany otter species face conservation challenges due to [*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") historical hunting for their fur and current threats from pollution and habitat loss.', role='assistant', tool_calls=[], function_call=None, provider_specific_fields=None))], usage=Usage(completion_tokens=576, prompt_tokens=13314, total_tokens=13890, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=None, audio_tokens=None, reasoning_tokens=0, rejected_prediction_tokens=None, text_tokens=None), prompt_tokens_details=None))"
- ]
- },
- "execution_count": null,
- "metadata": {},
- "output_type": "execute_result"
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "[\n",
+ " {\n",
+ " \"role\": \"user\",\n",
+ " \"content\": [\n",
+ " {\n",
+ " \"type\": \"text\",\n",
+ " \"text\": \"Whats in this img?\"\n",
+ " },\n",
+ " {\n",
+ " \"type\": \"image_url\",\n",
+ " \"image_url\": \"data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD...\n"
+ ]
}
],
"source": [
- "r = list(completion(model, [smsg], stream=True, web_search_options={\"search_context_size\": \"low\"}))\n",
- "cite_footnotes(r)\n",
- "stream_chunk_builder(r)"
+ "msgs = mk_msgs([['Whats in this img?',fn.read_bytes()]])\n",
+ "print(json.dumps(msgs,indent=1)[:200]+\"...\")"
]
},
{
"cell_type": "markdown",
- "id": "29018310",
+ "id": "bad470e4",
"metadata": {},
"source": [
- "# Chat"
+ "## Streaming"
]
},
{
"cell_type": "markdown",
- "id": "da9223dc",
+ "id": "eec3dc5d",
"metadata": {},
"source": [
- "LiteLLM is pretty bare bones. It doesnt keep track of conversation history or what tools have been added in the conversation so far.\n",
+ "LiteLLM supports streaming responses. That's really useful if you want to show intermediate results, instead of having to wait until the whole response is finished.\n",
"\n",
- "So lets make a Claudette style wrapper so we can do streaming, toolcalling, and toolloops without problems."
+ "We create this helper function that returns the entire response at the end of the stream. This is useful when you want to store the whole response somewhere after having displayed the intermediate results."
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "a636d732",
+ "id": "9ad6fc2c",
"metadata": {},
"outputs": [],
"source": [
"#| export\n",
- "effort = AttrDict({o[0]:o for o in ('low','medium','high')})"
+ "def stream_with_complete(gen, postproc=noop):\n",
+ " \"Extend streaming response chunks with the complete response\"\n",
+ " chunks = []\n",
+ " for chunk in gen:\n",
+ " chunks.append(chunk)\n",
+ " yield chunk\n",
+ " postproc(chunks)\n",
+ " return stream_chunk_builder(chunks)"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "715e9a83",
+ "id": "46f16571",
"metadata": {},
"outputs": [],
"source": [
- "#| export\n",
- "def _mk_prefill(pf): return ModelResponseStream([StreamingChoices(delta=Delta(content=pf,role='assistant'))])"
+ "r = completion(model, mk_msgs(\"Hey!\"), stream=True)\n",
+ "r2 = SaveReturn(stream_with_complete(r))"
]
},
{
- "cell_type": "markdown",
- "id": "987fba85",
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "9797136b",
"metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Hello! How are you doing today? Is there anything I can help you with?"
+ ]
+ }
+ ],
"source": [
- "When the tool uses are about to be exhausted it is important to alert the AI so that it knows to use its final steps for communicating the user current progress and next steps"
+ "for o in r2:\n",
+ " cts = o.choices[0].delta.content\n",
+ " if cts: print(cts, end='')"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "e18e226c",
+ "id": "897ec073",
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "data": {
+ "text/markdown": [
+ "Hello! How are you doing today? Is there anything I can help you with?\n",
+ "\n",
+ "\n",
+ "\n",
+ "id: `chatcmpl-xxx`\n",
+ "- model: `claude-sonnet-4-20250514`\n",
+ "- finish_reason: `stop`\n",
+ "- usage: `Usage(completion_tokens=20, prompt_tokens=9, total_tokens=29, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=None, audio_tokens=None, reasoning_tokens=0, rejected_prediction_tokens=None, text_tokens=None), prompt_tokens_details=None)`\n",
+ "\n",
+ " "
+ ],
+ "text/plain": [
+ "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content='Hello! How are you doing today? Is there anything I can help you with?', role='assistant', tool_calls=None, function_call=None, provider_specific_fields=None))], usage=Usage(completion_tokens=20, prompt_tokens=9, total_tokens=29, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=None, audio_tokens=None, reasoning_tokens=0, rejected_prediction_tokens=None, text_tokens=None), prompt_tokens_details=None))"
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
"source": [
- "#| export\n",
- "_final_prompt = \"You have no more tool uses. Please summarize your findings. If you did not complete your goal please tell the user what further work needs to be done so they can choose how best to proceed.\""
+ "r2.value"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "bd8e11c0",
+ "metadata": {},
+ "source": [
+ "## Tools"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "266f3d5d",
+ "id": "4301402e",
"metadata": {},
"outputs": [],
- "source": [
- "#| export\n",
- "class Chat:\n",
- " def __init__(\n",
- " self,\n",
- " model:str, # LiteLLM compatible model name \n",
- " sp='', # System prompt\n",
- " temp=0, # Temperature\n",
- " search=False, # Search (l,m,h), if model supports it\n",
- " tools:list=None, # Add tools\n",
- " hist:list=None, # Chat history\n",
- " ns:Optional[dict]=None, # Custom namespace for tool calling \n",
- " cache=False # Anthropic prompt caching\n",
- " ):\n",
- " \"LiteLLM chat client.\"\n",
- " self.model = model\n",
- " hist,tools = mk_msgs(hist),listify(tools)\n",
- " if ns is None and tools: ns = mk_ns(tools)\n",
- " elif ns is None: ns = globals()\n",
- " self.tool_schemas = [lite_mk_func(t) for t in tools] if tools else None\n",
- " store_attr()\n",
- " \n",
- " def _prep_msg(self, msg=None, prefill=None):\n",
- " \"Prepare the messages list for the API call\"\n",
- " sp = [{\"role\": \"system\", \"content\": self.sp}] if self.sp else []\n",
- " if msg: self.hist = mk_msgs(self.hist+[msg], cache=self.cache)\n",
- " pf = [{\"role\":\"assistant\",\"content\":prefill}] if prefill else []\n",
- " return sp + self.hist + pf\n",
- "\n",
- " def _call(self, msg=None, prefill=None, temp=None, think=None, search=None, stream=False, max_steps=2, step=1, final_prompt=None, tool_choice=None, **kwargs):\n",
- " \"Internal method that always yields responses\"\n",
- " if step>max_steps: return\n",
- " if not get_model_info(self.model)[\"supports_assistant_prefill\"]: prefill=None\n",
- " if _has_search(self.model) and (s:=ifnone(search,self.search)): kwargs['web_search_options'] = {\"search_context_size\": effort[s]}\n",
- " else: _=kwargs.pop('web_search_options',None)\n",
- " res = completion(model=self.model, messages=self._prep_msg(msg, prefill), stream=stream, \n",
- " tools=self.tool_schemas, reasoning_effort = effort.get(think), tool_choice=tool_choice,\n",
- " # temperature is not supported when reasoning\n",
- " temperature=None if think else ifnone(temp,self.temp),\n",
- " **kwargs)\n",
- " if stream:\n",
- " if prefill: yield _mk_prefill(prefill)\n",
- " res = yield from stream_with_complete(res,postproc=cite_footnotes)\n",
- " m = res.choices[0].message\n",
- " if prefill: m.content = prefill + m.content\n",
- " self.hist.append(m)\n",
- " yield res\n",
- "\n",
- " if tcs := m.tool_calls:\n",
- " tool_results=[_lite_call_func(tc, ns=self.ns) for tc in tcs]\n",
- " self.hist+=tool_results\n",
- " for r in tool_results: yield r\n",
- " if step>=max_steps-1: prompt,tool_choice,search = final_prompt,'none',False\n",
- " else: prompt = None\n",
- " yield from self._call(\n",
- " prompt, prefill, temp, think, search, stream, max_steps, step+1,\n",
- " final_prompt, tool_choice, **kwargs)\n",
- " \n",
- " def __call__(self,\n",
- " msg=None, # Message str, or list of multiple message parts\n",
- " prefill=None, # Prefill AI response if model supports it\n",
- " temp=None, # Override temp set on chat initialization\n",
- " think=None, # Thinking (l,m,h)\n",
- " search=None, # Override search set on chat initialization (l,m,h)\n",
- " stream=False, # Stream results\n",
- " max_steps=2, # Maximum number of tool calls\n",
- " final_prompt=_final_prompt, # Final prompt when tool calls have ran out \n",
- " return_all=False, # Returns all intermediate ModelResponses if not streaming and has tool calls\n",
- " **kwargs):\n",
- " \"Main call method - handles streaming vs non-streaming\"\n",
- " result_gen = self._call(msg, prefill, temp, think, search, stream, max_steps, 1, final_prompt, **kwargs) \n",
- " if stream: return result_gen # streaming\n",
- " elif return_all: return list(result_gen) # toolloop behavior\n",
- " else: return last(result_gen) # normal chat behavior"
+ "source": [
+ "#| export\n",
+ "def lite_mk_func(f):\n",
+ " if isinstance(f, dict): return f\n",
+ " return {'type':'function', 'function':get_schema(f, pname='parameters')}"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "69419bb7",
+ "id": "7b103600",
"metadata": {},
"outputs": [],
"source": [
- "@patch(as_prop=True)\n",
- "def cost(self: Chat):\n",
- " \"Total cost of all responses in conversation history\"\n",
- " return sum(getattr(r, '_hidden_params', {}).get('response_cost') or 0\n",
- " for r in self.h if hasattr(r, 'choices'))"
+ "def simple_add(\n",
+ " a: int, # first operand\n",
+ " b: int=0 # second operand\n",
+ ") -> int:\n",
+ " \"Add two numbers together\"\n",
+ " return a + b"
]
},
{
- "cell_type": "markdown",
- "id": "ce163563",
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "100fc27b",
"metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "{'type': 'function',\n",
+ " 'function': {'name': 'simple_add',\n",
+ " 'description': 'Add two numbers together\\n\\nReturns:\\n- type: integer',\n",
+ " 'parameters': {'type': 'object',\n",
+ " 'properties': {'a': {'type': 'integer', 'description': 'first operand'},\n",
+ " 'b': {'type': 'integer', 'description': 'second operand', 'default': 0}},\n",
+ " 'required': ['a']}}}"
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
"source": [
- "## Examples"
+ "toolsc = lite_mk_func(simple_add)\n",
+ "toolsc"
]
},
{
- "cell_type": "markdown",
- "id": "1dbf319a",
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "8c2af29a",
"metadata": {},
+ "outputs": [],
"source": [
- "### History tracking"
+ "tmsg = mk_msg(\"What is 5478954793+547982745? How about 5479749754+9875438979? Always use tools for calculations, and describe what you'll do before using a tool. Where multiple tool calls are required, do them in a single response where possible.\")\n",
+ "r = completion(model, [tmsg], tools=[toolsc])"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "957ccd1e",
+ "id": "8bdb1817",
"metadata": {},
"outputs": [
{
"data": {
"text/markdown": [
- "Hi Rens! Nice to meet you. How are you doing today?\n",
+ "I'll help you calculate both of those addition problems using the simple_add tool. Let me perform both calculations for you:\n",
+ "\n",
+ "🔧 simple_add({\"a\": 5478954793, \"b\": 547982745})\n",
+ "\n",
+ "\n",
+ "\n",
+ "🔧 simple_add({\"a\": 5479749754, \"b\": 9875438979})\n",
+ "\n",
"\n",
"\n",
"\n",
- "- id: `chatcmpl-xxx`\n",
+ "id: `chatcmpl-xxx`\n",
"- model: `claude-sonnet-4-20250514`\n",
- "- finish_reason: `stop`\n",
- "- usage: `Usage(completion_tokens=19, prompt_tokens=14, total_tokens=33, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
+ "- finish_reason: `tool_calls`\n",
+ "- usage: `Usage(completion_tokens=160, prompt_tokens=475, total_tokens=635, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
"\n",
" "
],
"text/plain": [
- "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content='Hi Rens! Nice to meet you. How are you doing today?', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}))], usage=Usage(completion_tokens=19, prompt_tokens=14, total_tokens=33, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0))"
+ "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=\"I'll help you calculate both of those addition problems using the simple_add tool. Let me perform both calculations for you:\", role='assistant', tool_calls=[ChatCompletionMessageToolCall(index=1, function=Function(arguments='{\"a\": 5478954793, \"b\": 547982745}', name='simple_add'), id='toolu_013MZMqJL4fBRGTsMtAGJjMk', type='function'), ChatCompletionMessageToolCall(index=2, function=Function(arguments='{\"a\": 5479749754, \"b\": 9875438979}', name='simple_add'), id='toolu_01HkbM4zwAb38n4rH7SNvi75', type='function')], function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}))], usage=Usage(completion_tokens=160, prompt_tokens=475, total_tokens=635, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0))"
]
},
- "execution_count": null,
"metadata": {},
- "output_type": "execute_result"
+ "output_type": "display_data"
}
],
"source": [
- "chat = Chat(model)\n",
- "res = chat(\"Hey my name is Rens\")\n",
- "res"
+ "display(r)"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "5978da56",
+ "id": "c4d81d05",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#| export\n",
+ "def _lite_call_func(tc,ns,raise_on_err=True):\n",
+ " try: fargs = json.loads(tc.function.arguments)\n",
+ " except Exception as e: raise ValueError(f\"Failed to parse function arguments: {tc.function.arguments}\") from e\n",
+ " res = call_func(tc.function.name, fargs,ns=ns)\n",
+ " return {\"tool_call_id\": tc.id, \"role\": \"tool\", \"name\": tc.function.name, \"content\": str(res)}"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "d5af607c",
"metadata": {},
"outputs": [
{
"data": {
- "text/markdown": [
- "Your name is Rens! You introduced yourself to me at the start of our conversation.\n",
- "\n",
- "\n",
- "\n",
- "- id: `chatcmpl-xxx`\n",
- "- model: `claude-sonnet-4-20250514`\n",
- "- finish_reason: `stop`\n",
- "- usage: `Usage(completion_tokens=22, prompt_tokens=41, total_tokens=63, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
- "\n",
- " "
- ],
"text/plain": [
- "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content='Your name is Rens! You introduced yourself to me at the start of our conversation.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}))], usage=Usage(completion_tokens=22, prompt_tokens=41, total_tokens=63, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0))"
+ "[{'tool_call_id': 'toolu_013MZMqJL4fBRGTsMtAGJjMk',\n",
+ " 'role': 'tool',\n",
+ " 'name': 'simple_add',\n",
+ " 'content': '6026937538'},\n",
+ " {'tool_call_id': 'toolu_01HkbM4zwAb38n4rH7SNvi75',\n",
+ " 'role': 'tool',\n",
+ " 'name': 'simple_add',\n",
+ " 'content': '15355188733'}]"
]
},
"execution_count": null,
@@ -1728,32 +1993,89 @@
}
],
"source": [
- "chat(\"Whats my name\")"
+ "tcs = [_lite_call_func(o, ns=globals()) for o in r.choices[0].message.tool_calls]\n",
+ "tcs"
]
},
{
- "cell_type": "markdown",
- "id": "8dbb77c7",
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "3dd9871b",
"metadata": {},
+ "outputs": [],
"source": [
- "See now we keep track of history!\n",
- "\n",
- "History is stored in the `hist` attribute:"
+ "def delta_text(msg):\n",
+ " \"Extract printable content from streaming delta, return None if nothing to print\"\n",
+ " c = msg.choices[0]\n",
+ " if not c: return c\n",
+ " if not hasattr(c,'delta'): return None #f'{c}'\n",
+ " delta = c.delta\n",
+ " if delta.content: return delta.content\n",
+ " if delta.tool_calls:\n",
+ " res = ''.join(f\"🔧 {tc.function.name}\" for tc in delta.tool_calls if tc.id and tc.function.name)\n",
+ " if res: return f'\\n{res}\\n'\n",
+ " if hasattr(delta,'reasoning_content'): return '🧠' if delta.reasoning_content else '\\n\\n'\n",
+ " return None"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "4a3c29d8",
+ "id": "e4790a51",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "I'll help you calculate both of those sums using the addition tool. Let me perform both calculations for you:\n",
+ "\n",
+ "1. First, I'll calculate 5478954793 + 547982745\n",
+ "2. Then, I'll calculate 5479749754 + 9875438979\n",
+ "🔧 simple_add\n",
+ "\n",
+ "🔧 simple_add\n"
+ ]
+ }
+ ],
+ "source": [
+ "r = completion(messages=[tmsg], model=model, stream=True, tools=[toolsc])\n",
+ "r2 = SaveReturn(stream_with_complete(r))\n",
+ "for o in r2: print(delta_text(o) or '', end='')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "5b21118c",
"metadata": {},
"outputs": [
{
"data": {
+ "text/markdown": [
+ "I'll help you calculate both of those sums using the addition tool. Let me perform both calculations for you:\n",
+ "\n",
+ "1. First, I'll calculate 5478954793 + 547982745\n",
+ "2. Then, I'll calculate 5479749754 + 9875438979\n",
+ "\n",
+ "🔧 simple_add({\"a\": 5478954793, \"b\": 547982745})\n",
+ "\n",
+ "\n",
+ "\n",
+ "🔧 simple_add({\"a\": 5479749754, \"b\": 9875438979})\n",
+ "\n",
+ "\n",
+ "\n",
+ "\n",
+ "id: `chatcmpl-xxx`\n",
+ "- model: `claude-sonnet-4-20250514`\n",
+ "- finish_reason: `tool_calls`\n",
+ "- usage: `Usage(completion_tokens=197, prompt_tokens=475, total_tokens=672, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=None, audio_tokens=None, reasoning_tokens=0, rejected_prediction_tokens=None, text_tokens=None), prompt_tokens_details=None)`\n",
+ "\n",
+ " "
+ ],
"text/plain": [
- "[{'role': 'user', 'content': 'Hey my name is Rens'},\n",
- " Message(content='Hi Rens! Nice to meet you. How are you doing today?', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}),\n",
- " {'role': 'user', 'content': 'Whats my name'},\n",
- " Message(content='Your name is Rens! You introduced yourself to me at the start of our conversation.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None})]"
+ "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=\"I'll help you calculate both of those sums using the addition tool. Let me perform both calculations for you:\\n\\n1. First, I'll calculate 5478954793 + 547982745\\n2. Then, I'll calculate 5479749754 + 9875438979\", role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{\"a\": 5478954793, \"b\": 547982745}', name='simple_add'), id='toolu_018E853fnQvQRskwHNSsPs8w', type='function'), ChatCompletionMessageToolCall(function=Function(arguments='{\"a\": 5479749754, \"b\": 9875438979}', name='simple_add'), id='toolu_01W3NFMfY5mSDdDVtgicRe4Q', type='function')], function_call=None, provider_specific_fields=None))], usage=Usage(completion_tokens=197, prompt_tokens=475, total_tokens=672, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=None, audio_tokens=None, reasoning_tokens=0, rejected_prediction_tokens=None, text_tokens=None), prompt_tokens_details=None))"
]
},
"execution_count": null,
@@ -1762,39 +2084,92 @@
}
],
"source": [
- "chat.hist"
+ "r2.value"
]
},
{
- "cell_type": "markdown",
- "id": "8f38015b",
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "f355ecbe",
"metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "🧠🧠🧠🧠🧠🧠🧠🧠🧠🧠🧠🧠🧠🧠🧠🧠🧠🧠\n",
+ "\n",
+ "I'll find the derivative of f(x) = x³ + 2x² - 5x + 1 using the power rule.\n",
+ "\n",
+ "The power rule states that for any term ax^n, the derivative is nax^(n-1).\n",
+ "\n",
+ "Taking the derivative of each term:\n",
+ "\n",
+ "**Term 1:** x³\n",
+ "- Derivative: 3x^(3-1) = 3x²\n",
+ "\n",
+ "**Term 2:** 2x² \n",
+ "- Derivative: 2 × 2x^(2-1) = 4x\n",
+ "\n",
+ "**Term 3:** -5x\n",
+ "- Derivative: -5 × 1x^(1-1) = -5\n",
+ "\n",
+ "**Term 4:** 1 (constant)\n",
+ "- Derivative: 0\n",
+ "\n",
+ "**Final Answer:**\n",
+ "f'(x) = 3x² + 4x - 5"
+ ]
+ }
+ ],
"source": [
- "You can also pass an old chat history into new Chat objects:"
+ "msg = mk_msg(\"Solve this complex math problem: What is the derivative of x^3 + 2x^2 - 5x + 1?\")\n",
+ "r = completion(messages=[msg], model=model, stream=True, reasoning_effort=\"low\")\n",
+ "r2 = SaveReturn(stream_with_complete(r))\n",
+ "for o in r2: print(delta_text(o) or '', end='')\n"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "d9f575f3",
+ "id": "30ff5f1a",
"metadata": {},
"outputs": [
{
"data": {
"text/markdown": [
- "Your name is Rens - you told me that when you first said hello.\n",
+ "I'll find the derivative of f(x) = x³ + 2x² - 5x + 1 using the power rule.\n",
+ "\n",
+ "The power rule states that for any term ax^n, the derivative is nax^(n-1).\n",
+ "\n",
+ "Taking the derivative of each term:\n",
+ "\n",
+ "**Term 1:** x³\n",
+ "- Derivative: 3x^(3-1) = 3x²\n",
+ "\n",
+ "**Term 2:** 2x² \n",
+ "- Derivative: 2 × 2x^(2-1) = 4x\n",
+ "\n",
+ "**Term 3:** -5x\n",
+ "- Derivative: -5 × 1x^(1-1) = -5\n",
+ "\n",
+ "**Term 4:** 1 (constant)\n",
+ "- Derivative: 0\n",
+ "\n",
+ "**Final Answer:**\n",
+ "f'(x) = 3x² + 4x - 5\n",
"\n",
"\n",
"\n",
- "- id: `chatcmpl-xxx`\n",
+ "id: `chatcmpl-xxx`\n",
"- model: `claude-sonnet-4-20250514`\n",
"- finish_reason: `stop`\n",
- "- usage: `Usage(completion_tokens=20, prompt_tokens=72, total_tokens=92, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
+ "- usage: `Usage(completion_tokens=433, prompt_tokens=66, total_tokens=499, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=None, audio_tokens=None, reasoning_tokens=205, rejected_prediction_tokens=None, text_tokens=None), prompt_tokens_details=None)`\n",
"\n",
" "
],
"text/plain": [
- "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content='Your name is Rens - you told me that when you first said hello.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}))], usage=Usage(completion_tokens=20, prompt_tokens=72, total_tokens=92, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0))"
+ "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content=\"I'll find the derivative of f(x) = x³ + 2x² - 5x + 1 using the power rule.\\n\\nThe power rule states that for any term ax^n, the derivative is nax^(n-1).\\n\\nTaking the derivative of each term:\\n\\n**Term 1:** x³\\n- Derivative: 3x^(3-1) = 3x²\\n\\n**Term 2:** 2x² \\n- Derivative: 2 × 2x^(2-1) = 4x\\n\\n**Term 3:** -5x\\n- Derivative: -5 × 1x^(1-1) = -5\\n\\n**Term 4:** 1 (constant)\\n- Derivative: 0\\n\\n**Final Answer:**\\nf'(x) = 3x² + 4x - 5\", role='assistant', tool_calls=None, function_call=None, provider_specific_fields=None, thinking_blocks=[{'type': 'thinking', 'thinking': \"I need to find the derivative of the function f(x) = x^3 + 2x^2 - 5x + 1.\\n\\nI'll use the power rule for derivatives, which states that if f(x) = x^n, then f'(x) = nx^(n-1).\\n\\nLet me take the derivative of each term:\\n\\n1) For x^3: The derivative is 3x^(3-1) = 3x^2\\n2) For 2x^2: The derivative is 2 · 2x^(2-1) = 4x\\n3) For -5x: The derivative is -5 · 1x^(1-1) = -5x^0 = -5\\n4) For the constant 1: The derivative of any constant is 0\\n\\nSo the derivative is: 3x^2 + 4x - 5 + 0 = 3x^2 + 4x - 5\", 'signature': 'Eo4FCkYIBxgCKkDYpmV8bmbO2hMcNNjoiEju7Bq203xL5XgVRaQou4xDja7JHmRpeAKH01+uDSdMIjY6saSHwVhm9B0THnyqoBFtEgw6ZIwvUa51SCFUym8aDMi+gs0t/d1JU//qmCIwF/fycNXbo1EiddKqAmFQvsAiNcBXQ9mg8WDoXLgnSnl/upY4QNJQxklIwNqWhPloKvUDr2W6ppA3QGQlDnnUCdyDiJD04GCmNHDtdbwLBPKqVyecg5nE9APb7K/VJi2TensSBWJKN7JiZyREOTqYqIe7Ev+dPFJKn6dJb0E7Zbbq2t4FSsk/TflMFk151YRZ5oggl6Oa3wo7Lfhboe0JQbpTXfAK5ZHS0ckFrildgn8TA7Eg2Z+iuMNVn/fZaNgLIRNNTCEb0O0Vh/O/rhz52OTZm7Ro9bjeF3kxF9wYopQTFrZhljrlGgvQPWPuL0LCw6D0zKvU5ZXb9pfGT47cryJ1mwG9HUoi5q3/XE105tXq93VrjRJoWSyyg/hcZ41Vti8Gppv0yP7d0ylgc3lQe/qJPI78Y6Am62g3kgB9HKM7hGpi3Xd2ybL9NKXvMEBBUnV4IDMmjTFwwfTSl6UXrOkqoOTrQrlKxO5S/IkEkY/S6sEOEuEe6aBbiR80fM2LHPzD9SjJ/cvUeS7e5rY1cjtz6OPmpzCeJOATtgmGEUg82NW6Xv+GHONbTfmk7PGBdgTwQnByyFdbdjZbCygw6uix6kXBBKWEa/56qpWDoMms9c06iaJUhxC7HwrXKYs5+PtUMCAMmmdtnNTop5UJb688yZPb52w2M2bcGBeiKuUdYXqCf+d2A0qYreLGJYbf8PfI6LasV68DMqMOms8ukXT59PXWlyQ3GAE='}], reasoning_content=\"I need to find the derivative of the function f(x) = x^3 + 2x^2 - 5x + 1.\\n\\nI'll use the power rule for derivatives, which states that if f(x) = x^n, then f'(x) = nx^(n-1).\\n\\nLet me take the derivative of each term:\\n\\n1) For x^3: The derivative is 3x^(3-1) = 3x^2\\n2) For 2x^2: The derivative is 2 · 2x^(2-1) = 4x\\n3) For -5x: The derivative is -5 · 1x^(1-1) = -5x^0 = -5\\n4) For the constant 1: The derivative of any constant is 0\\n\\nSo the derivative is: 3x^2 + 4x - 5 + 0 = 3x^2 + 4x - 5\"))], usage=Usage(completion_tokens=433, prompt_tokens=66, total_tokens=499, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=None, audio_tokens=None, reasoning_tokens=205, rejected_prediction_tokens=None, text_tokens=None), prompt_tokens_details=None))"
]
},
"execution_count": null,
@@ -1803,160 +2178,104 @@
}
],
"source": [
- "chat2 = Chat(model, hist=chat.hist)\n",
- "chat2(\"What was my name again?\")"
+ "r2.value"
]
},
{
"cell_type": "markdown",
- "id": "26748132",
- "metadata": {},
- "source": [
- "### Synthetic History Creation\n",
- "\n",
- "Lets build chat history step by step. That way we can tweak anything we need to during testing."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "b8ef8d88",
- "metadata": {},
- "outputs": [],
- "source": [
- "pr = \"What is 5 + 7? Use the tool to calculate it.\"\n",
- "c = Chat(model, tools=[simple_add])\n",
- "res = c(pr)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "b5ef0aeb",
+ "id": "d92cdb20",
"metadata": {},
- "outputs": [],
"source": [
- "#| export\n",
- "@patch\n",
- "def print_hist(self:Chat):\n",
- " \"Print each message on a different line\"\n",
- " for r in self.hist: print(r, end='\\n\\n')"
+ "## Search"
]
},
{
"cell_type": "markdown",
- "id": "bde51fc9",
- "metadata": {},
- "source": [
- "Whereas normally without tools we would get one user input and one assistant response. Here we get two extra messages in between.\n",
- "- An assistant message requesting the tools with arguments.\n",
- "- A tool response with the result to the tool call."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "49792a9c",
+ "id": "92a7128c",
"metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "{'role': 'user', 'content': 'What is 5 + 7? Use the tool to calculate it.'}\n",
- "\n",
- "Message(content=\"I'll use the simple_add tool to calculate 5 + 7 for you.\", role='assistant', tool_calls=[{'index': 1, 'function': {'arguments': '{\"a\": 5, \"b\": 7}', 'name': 'simple_add'}, 'id': 'toolu_015yBzHuirWKDN14GbYJHeyY', 'type': 'function'}], function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None})\n",
- "\n",
- "{'tool_call_id': 'toolu_015yBzHuirWKDN14GbYJHeyY', 'role': 'tool', 'name': 'simple_add', 'content': '12'}\n",
- "\n",
- "Message(content='The result of 5 + 7 is 12.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None})\n",
- "\n"
- ]
- }
- ],
"source": [
- "c.print_hist()"
+ "LiteLLM provides search, not via tools, but via the special `web_search_options` param."
]
},
{
"cell_type": "markdown",
- "id": "ab2eb0a2",
+ "id": "e76423a3",
"metadata": {},
"source": [
- "Lets try to build this up manually so we have full control over the inputs."
+ "**Note:** Not all models support web search. LiteLLM's `supports_web_search` field should indicate this, but it's unreliable for some models like `claude-sonnet-4-20250514`. Checking both `supports_web_search` and `search_context_cost_per_query` provides more accurate detection."
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "a37a77b6",
+ "id": "6530af43",
"metadata": {},
"outputs": [],
"source": [
"#| export\n",
- "def random_tool_id():\n",
- " \"Generate a random tool ID with 'toolu_' prefix\"\n",
- " random_part = ''.join(random.choices(string.ascii_letters + string.digits, k=25))\n",
- " return f'toolu_{random_part}'"
+ "def _has_search(m):\n",
+ " i = get_model_info(m)\n",
+ " return bool(i['search_context_cost_per_query'] or i['supports_web_search'])"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "f4a0bd16",
+ "id": "43417017",
"metadata": {},
"outputs": [
{
- "data": {
- "text/plain": [
- "'toolu_S7k1uH4VIIWHxve7lQvGO3lFC'"
- ]
- },
- "execution_count": null,
- "metadata": {},
- "output_type": "execute_result"
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "gemini/gemini-2.5-flash True\n",
+ "claude-sonnet-4-20250514 True\n",
+ "openai/gpt-4.1 False\n"
+ ]
}
],
"source": [
- "random_tool_id()"
+ "for m in ms: print(m, _has_search(m))"
]
},
{
"cell_type": "markdown",
- "id": "d22e52b7",
- "metadata": {},
- "source": [
- "A tool call request can contain one more or more tool calls. Lets make one."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "e00e88b4",
+ "id": "ff8611e2",
"metadata": {},
- "outputs": [],
"source": [
- "#| export\n",
- "def mk_tc(func, idx=1, **kwargs):\n",
- " args = json.dumps(kwargs)\n",
- " if callable(func): func = func.__name__\n",
- " id = random_tool_id()\n",
- " return {'index': idx, 'function': {'arguments': args, 'name': func}, 'id': id, 'type': 'function'}"
+ "When search is supported it can be used like this:"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "324b9182",
+ "id": "89e0bead",
"metadata": {},
"outputs": [
{
"data": {
+ "text/markdown": [
+ "Otters are fascinating semiaquatic mammals that belong to the weasel family. The charismatic otter, a member of the weasel family, is found on every continent except Australia and Antarctica. There are 13 species in total, ranging from the small-clawed otter to the giant otter.\n",
+ "\n",
+ "Most are small, with short ears and noses, elongated bodies, long tails, and soft, dense fur. Otters have the densest fur of any animal—as many as a million hairs per square inch in places. Webbed feet and powerful tails, which act like rudders, make otters strong swimmers.\n",
+ "\n",
+ "All otters are expert hunters that eat fish, crustaceans, and other critters. Sea otters have a unique feeding behavior: A sea otter will float on its back, place a rock on its chest, then smash the mollusk down on it until it breaks open. River otters, however, can hold their breath for up to 8 minutes.\n",
+ "\n",
+ "River otters are especially playful, gamboling on land and splashing into rivers and streams. Otters live up to 16 years; they are by nature playful, and frolic in the water with their pups. When resting, sea otters entangle themselves in kelp so they don't float away.\n",
+ "\n",
+ "Many otter species face conservation challenges. Otters and their mustelid relatives were once hunted extensively for their fur, many to the point of near extinction. Despite regulations designed to protect them, many species remain at risk from pollution and habitat loss.\n",
+ "\n",
+ "\n",
+ "\n",
+ "id: `chatcmpl-xxx`\n",
+ "- model: `claude-sonnet-4-20250514`\n",
+ "- finish_reason: `stop`\n",
+ "- usage: `Usage(completion_tokens=603, prompt_tokens=13314, total_tokens=13917, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), server_tool_use=ServerToolUse(web_search_requests=1), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
+ "\n",
+ " "
+ ],
"text/plain": [
- "{'index': 1,\n",
- " 'function': {'arguments': '{\"a\": 5, \"b\": 7}', 'name': 'simple_add'},\n",
- " 'id': 'toolu_XErMLIskFlArR4VIs52KijpH3',\n",
- " 'type': 'function'}"
+ "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content=\"Otters are fascinating semiaquatic mammals that belong to the weasel family. The charismatic otter, a member of the weasel family, is found on every continent except Australia and Antarctica. There are 13 species in total, ranging from the small-clawed otter to the giant otter.\\n\\nMost are small, with short ears and noses, elongated bodies, long tails, and soft, dense fur. Otters have the densest fur of any animal—as many as a million hairs per square inch in places. Webbed feet and powerful tails, which act like rudders, make otters strong swimmers.\\n\\nAll otters are expert hunters that eat fish, crustaceans, and other critters. Sea otters have a unique feeding behavior: A sea otter will float on its back, place a rock on its chest, then smash the mollusk down on it until it breaks open. River otters, however, can hold their breath for up to 8 minutes.\\n\\nRiver otters are especially playful, gamboling on land and splashing into rivers and streams. Otters live up to 16 years; they are by nature playful, and frolic in the water with their pups. When resting, sea otters entangle themselves in kelp so they don't float away.\\n\\nMany otter species face conservation challenges. Otters and their mustelid relatives were once hunted extensively for their fur, many to the point of near extinction. Despite regulations designed to protect them, many species remain at risk from pollution and habitat loss.\", role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'citations': [[{'type': 'web_search_result_location', 'cited_text': 'The charismatic otter, a member of the weasel family, is found on every continent except Australia and Antarctica. ', 'url': 'https://www.nationalgeographic.com/animals/mammals/facts/otters-1', 'title': 'Otters, facts and information | National Geographic', 'encrypted_index': 'Eo8BCioIBxgCIiQ4ODk4YTFkYy0yMTNkLTRhNmYtOTljYi03ZTBlNTUzZDc0NWISDLIIDjwxPgKn3T1dcBoMOiqvShGJLxd8+SdzIjCz3SlTIqO2HA5gvks2pAQGmz3XWB+xFAaljcWlyygSy/kTY7sqeqMn1qU4tGpBmf4qE4abQbfNHbLywouCbZ9quTv0iwgYBA=='}], [{'type': 'web_search_result_location', 'cited_text': 'There are 13 species in total, ranging from the small-clawed otter to the giant otter.', 'url': 'https://www.nationalgeographic.com/animals/mammals/facts/otters-1', 'title': 'Otters, facts and information | National Geographic', 'encrypted_index': 'Eo8BCioIBxgCIiQ4ODk4YTFkYy0yMTNkLTRhNmYtOTljYi03ZTBlNTUzZDc0NWISDCXseI4G8zexyxzXlRoMpFgWRLWJhF6OCzWzIjC4h57A4xvrpWQFrK5X4R5FCgSieqLTHAhYMDpZKSfmktNjvZEhPRsh65kdvMm1/TMqE8nJZb8r81E83mBk45fzBfkGEF4YBA=='}, {'type': 'web_search_result_location', 'cited_text': 'There are 14 known species of otters, ranging in size and habitat preferences, with some species adapted to cold waters requiring a high metabolic rat...', 'url': 'https://en.wikipedia.org/wiki/Otter', 'title': 'Otter - Wikipedia', 'encrypted_index': 'Eo8BCioIBxgCIiQ4ODk4YTFkYy0yMTNkLTRhNmYtOTljYi03ZTBlNTUzZDc0NWISDMBGz/TkkP7vJ/5bBBoM6OKET14S7EBuRDq5IjCdnxv7okJjoIxqiaU0kmfBEJ3V8tboSz+XFgFEGPxIyKThi6pIEgjWFQWJpwIvWMEqE24LwoVusqslEH/NExIYNSa+GdQYBA=='}], [{'type': 'web_search_result_location', 'cited_text': 'Most are small, with short ears and noses, elongated bodies, long tails, and soft, dense fur. ', 'url': 'https://www.nationalgeographic.com/animals/mammals/facts/otters-1', 'title': 'Otters, facts and information | National Geographic', 'encrypted_index': 'Eo8BCioIBxgCIiQ4ODk4YTFkYy0yMTNkLTRhNmYtOTljYi03ZTBlNTUzZDc0NWISDNH8Zg5oGTTIuNgWbhoM203UStaxnwT6ihm3IjCJO09/UFQsgEsQDC6HMT95Uvq5eX9qnL7+rTsmYXFAUUCDUdofHzjcOxwNHLCwXjsqEyGDArRktHJMZWjqQ75J2rJzQIUYBA=='}], [{'type': 'web_search_result_location', 'cited_text': 'Otters have the densest fur of any animal—as many as a million hairs per square inch in places. ', 'url': 'https://www.nationalgeographic.com/animals/mammals/facts/otters-1', 'title': 'Otters, facts and information | National Geographic', 'encrypted_index': 'Eo8BCioIBxgCIiQ4ODk4YTFkYy0yMTNkLTRhNmYtOTljYi03ZTBlNTUzZDc0NWISDOM+ZAdwxG6vExmODhoMpZpQzqs8haGSfQzzIjBuCmrn97nFh1fkEDH+x5wSdfuyWZdCH4C0IOMr4IRxpMCjxixd9bdsqvkWH5BlI90qE84aNPZNvlF9U3B2yi0OFSTIjUsYBA=='}], [{'type': 'web_search_result_location', 'cited_text': 'Webbed feet and powerful tails, which act like rudders, make otters strong swimmers. ', 'url': 'https://www.nationalgeographic.com/animals/mammals/facts/otters-1', 'title': 'Otters, facts and information | National Geographic', 'encrypted_index': 'EpABCioIBxgCIiQ4ODk4YTFkYy0yMTNkLTRhNmYtOTljYi03ZTBlNTUzZDc0NWISDB4SDVlqNhefbSSuzxoMeG4s1rBdtpIgK0DSIjBX3WipgCcchJ4jFkLFj4/COw6rDv4fyRw4Xe4GmR3Dpo4TCUrgwHoJmO6l8MK4kTMqFOEpHWwGbHkFVUd+Aq1NOx2lpNPRGAQ='}], [{'type': 'web_search_result_location', 'cited_text': 'All otters are expert hunters that eat fish, crustaceans, and other critters. ', 'url': 'https://www.nationalgeographic.com/animals/mammals/facts/otters-1', 'title': 'Otters, facts and information | National Geographic', 'encrypted_index': 'EpABCioIBxgCIiQ4ODk4YTFkYy0yMTNkLTRhNmYtOTljYi03ZTBlNTUzZDc0NWISDBEyDl3t5gAz0iW2JxoMLzj6yl7IqCr/rkQFIjDc7DkfXM7M8ELyQrl7PgcEJwHGQ2IbW6kDSpbVdQR9q6pqQiFAonIGapSO6bOs8lcqFNxFIb6J9DOBWrepkBD9oLphplfWGAQ='}], [{'type': 'web_search_result_location', 'cited_text': 'A sea otter will float on its back, place a rock on its chest, then smash the mollusk down on it until it breaks open. ', 'url': 'https://www.nationalgeographic.com/animals/mammals/facts/otters-1', 'title': 'Otters, facts and information | National Geographic', 'encrypted_index': 'EpABCioIBxgCIiQ4ODk4YTFkYy0yMTNkLTRhNmYtOTljYi03ZTBlNTUzZDc0NWISDA0ilE2w5h7YdRjMzhoMqwIS1XuE51cgr+7wIjC8Jm4aMVIY12LLXkgVX2aDuWfaz4V2YH9VxtHDFglIlgA6zNp5gE+1+M4bj2YkdLoqFKCvef+BMeVSAFwlkJtEYJYVwGvYGAQ='}], [{'type': 'web_search_result_location', 'cited_text': 'River otters, however, can hold their breath for up to 8 minutes. ', 'url': 'https://www.doi.gov/blog/12-facts-about-otters-sea-otter-awareness-week', 'title': '12 Facts About Otters for Sea Otter Awareness Week | U.S. Department of the Interior', 'encrypted_index': 'EpABCioIBxgCIiQ4ODk4YTFkYy0yMTNkLTRhNmYtOTljYi03ZTBlNTUzZDc0NWISDBRRB8XLgZXQcvTOzxoM7ICPdtJHrGPgTjBgIjAwJzEqHlU34vJgq2xCl91sP8PUJojcyUmr3RjFmjSOCl6DSGhxaIIFMZuf2LIydG4qFOSehmf1sOfabXVUU/3zEtuYUF15GAQ='}], [{'type': 'web_search_result_location', 'cited_text': 'River otters are especially playful, gamboling on land and splashing into rivers and streams. ', 'url': 'https://www.nationalgeographic.com/animals/mammals/facts/otters-1', 'title': 'Otters, facts and information | National Geographic', 'encrypted_index': 'EpABCioIBxgCIiQ4ODk4YTFkYy0yMTNkLTRhNmYtOTljYi03ZTBlNTUzZDc0NWISDB57wjzqdgmYw53MIxoMbnctVN4aNFUYI2QyIjCeO6ekN1PB/st0ohcHxwVnl7Hm2uKHdJsB6qhKHD9zcFdBHWi4bXmkITh1prJiP+EqFEDV9W/kth6i6gPvo5fFJ91A8ZgCGAQ='}], [{'type': 'web_search_result_location', 'cited_text': 'Otters live up to 16 years; they are by nature playful, and frolic in the water with their pups. ', 'url': 'https://en.wikipedia.org/wiki/Otter', 'title': 'Otter - Wikipedia', 'encrypted_index': 'EpABCioIBxgCIiQ4ODk4YTFkYy0yMTNkLTRhNmYtOTljYi03ZTBlNTUzZDc0NWISDKANsJtkm8sPve7PxRoMBV2hF6Bv9+BRCaLdIjCbmBPNZLWyg/fl9fIqE2yzUgazsFZOzLxn1SsM+vzPaUIFNuDMDQQ2kyfRUJt3thcqFI5qd+bcbUuz7G8sVhT8+5Npf2oJGAQ='}], [{'type': 'web_search_result_location', 'cited_text': 'When it’s time to nap, sea otters entangle themselves in kelp so they don’t float away. ', 'url': 'https://www.nationalgeographic.com/animals/mammals/facts/otters-1', 'title': 'Otters, facts and information | National Geographic', 'encrypted_index': 'EpABCioIBxgCIiQ4ODk4YTFkYy0yMTNkLTRhNmYtOTljYi03ZTBlNTUzZDc0NWISDLGvrNLsLI08xlvgTBoM9IV7m9dO6UW4WiPQIjA+J2H71NbOFXTC9c/BSXoc/k5LWai1PWOl/RTN6DvcX7+aoNa5X8vUj0aA4b9m+VkqFPEjP3FFBjUYjR3a8FST57xIkOM5GAQ='}], [{'type': 'web_search_result_location', 'cited_text': 'Otters and their mustelid relatives were once hunted extensively for their fur, many to the point of near extinction. Despite regulations designed to ...', 'url': 'https://www.nationalgeographic.com/animals/mammals/facts/otters-1', 'title': 'Otters, facts and information | National Geographic', 'encrypted_index': 'EpMBCioIBxgCIiQ4ODk4YTFkYy0yMTNkLTRhNmYtOTljYi03ZTBlNTUzZDc0NWISDGpQatyaoWKjXpMGZxoM4lzcl/OByeZqtkXVIjB1sXm55ThwnOma8DGFbuaHxzNH5TuEDVReHHdd+RSoQ4qVRlu/xPhPpK5EX5Z7iuQqF1a45PWr6Is2p49kQjUlwAmSPJJTSYlmGAQ='}]], 'thinking_blocks': None}))], usage=Usage(completion_tokens=603, prompt_tokens=13314, total_tokens=13917, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), server_tool_use=ServerToolUse(web_search_requests=1), cache_creation_input_tokens=0, cache_read_input_tokens=0))"
]
},
"execution_count": null,
@@ -1965,38 +2284,43 @@
}
],
"source": [
- "tc = mk_tc(simple_add, a=5, b=7)\n",
- "tc"
+ "smsg = mk_msg(\"Search the web and tell me very briefly about otters\")\n",
+ "r = completion(model, [smsg], web_search_options={\"search_context_size\": \"low\"}) # or 'medium' / 'high'\n",
+ "r"
]
},
{
"cell_type": "markdown",
- "id": "97da6222",
+ "id": "e1af925d",
"metadata": {},
"source": [
- "This can then be packged into the full Message object produced by the assitant."
+ "## Citations"
]
},
{
- "cell_type": "code",
- "execution_count": null,
- "id": "436abceb",
+ "cell_type": "markdown",
+ "id": "abaeeee1",
"metadata": {},
- "outputs": [],
"source": [
- "def mk_tc_req(content, tcs): return Message(content=content, role='assistant', tool_calls=tcs, function_call=None)"
+ "Next, lets handle Anthropic's search citations.\n",
+ "\n",
+ "When not using streaming, all citations are placed in a separate key in the response:"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "94c031e7",
+ "id": "cf84f8c8",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
- "Message(content=\"I'll use the simple_add tool to calculate 5 + 7 for you.\", role='assistant', tool_calls=[ChatCompletionMessageToolCall(index=1, function=Function(arguments='{\"a\": 5, \"b\": 7}', name='simple_add'), id='toolu_XErMLIskFlArR4VIs52KijpH3', type='function')], function_call=None, provider_specific_fields=None)"
+ "[{'type': 'web_search_result_location',\n",
+ " 'cited_text': 'The charismatic otter, a member of the weasel family, is found on every continent except Australia and Antarctica. ',\n",
+ " 'url': 'https://www.nationalgeographic.com/animals/mammals/facts/otters-1',\n",
+ " 'title': 'Otters, facts and information | National Geographic',\n",
+ " 'encrypted_index': 'Eo8BCioIBxgCIiQ4ODk4YTFkYy0yMTNkLTRhNmYtOTljYi03ZTBlNTUzZDc0NWISDLIIDjwxPgKn3T1dcBoMOiqvShGJLxd8+SdzIjCz3SlTIqO2HA5gvks2pAQGmz3XWB+xFAaljcWlyygSy/kTY7sqeqMn1qU4tGpBmf4qE4abQbfNHbLywouCbZ9quTv0iwgYBA=='}]"
]
},
"execution_count": null,
@@ -2005,44 +2329,67 @@
}
],
"source": [
- "tc_cts = \"I'll use the simple_add tool to calculate 5 + 7 for you.\"\n",
- "tcq = mk_tc_req(tc_cts, [tc])\n",
- "tcq"
+ "r.choices[0].message.provider_specific_fields['citations'][0]"
]
},
{
"cell_type": "markdown",
- "id": "0a1a0364",
+ "id": "4f2a8559",
"metadata": {},
"source": [
- "Notice how Message instantiation creates a list of ChatCompletionMessageToolCalls by default. When the tools are executed this is converted back\n",
- "to a dictionary, for consistency we want to keep these as dictionaries from the beginning."
+ "However, when streaming the results are not captured this way.\n",
+ "Instead, we provide this helper function that adds the citation to the `content` field in markdown format:"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "00cebbbb",
+ "id": "fc341e7e",
"metadata": {},
"outputs": [],
"source": [
"#| export\n",
- "def mk_tc_req(content, tcs):\n",
- " msg = Message(content=content, role='assistant', tool_calls=tcs, function_call=None)\n",
- " msg.tool_calls = [{**dict(tc), 'function': dict(tc['function'])} for tc in msg.tool_calls]\n",
- " return msg"
+ "def cite_footnote(msg):\n",
+ " if not (delta:=nested_idx(msg, 'choices', 0, 'delta')): return\n",
+ " if citation:= nested_idx(delta, 'provider_specific_fields', 'citation'):\n",
+ " title = citation['title'].replace('\"', '\\\\\"')\n",
+ " delta.content = f'[*]({citation[\"url\"]} \"{title}\") '\n",
+ " \n",
+ "def cite_footnotes(stream_list):\n",
+ " \"Add markdown footnote citations to stream deltas\"\n",
+ " for msg in stream_list: cite_footnote(msg)"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "a0d3468d",
+ "id": "c2150365",
"metadata": {},
"outputs": [
{
"data": {
+ "text/markdown": [
+ "Otters are [*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") members of the weasel family, found on every continent except Australia and Antarctica. [*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") [*](https://en.wikipedia.org/wiki/Otter \"Otter - Wikipedia\") There are 13-14 species in total, ranging from [*](https://greatergood.org/blog/9-facts-about-the-small-clawed-otter \"9 Facts About Small-Clawed Otters And how you can save them\") the small-clawed otter (the smallest species) to [*](https://en.wikipedia.org/wiki/Otter \"Otter - Wikipedia\") the giant otter and sea otter (the largest).\n",
+ "\n",
+ "[*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") Most are small, with short ears and noses, elongated bodies, long tails, and soft, dense fur. [*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") Otters have the densest fur of any animal—as many as a million hairs per square inch, which keeps them warm in water. [*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") [*](https://www.pbs.org/wnet/nature/blog/otter-fact-sheet/ \"Otter Fact Sheet | Blog | Nature | PBS\") They have webbed feet and powerful tails that act like rudders, making them strong swimmers.\n",
+ "\n",
+ "[*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") [*](https://www.pbs.org/wnet/nature/blog/otter-fact-sheet/ \"Otter Fact Sheet | Blog | Nature | PBS\") All otters are expert hunters that eat fish, crustaceans, and other critters. Sea otters are famous for [*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") floating on their backs and using rocks to smash open shellfish. [*](https://www.doi.gov/blog/12-facts-about-otters-sea-otter-awareness-week \"12 Facts About Otters for Sea Otter Awareness Week | U.S. Department of the Interior\") Sea otters can stay submerged for more than 5 minutes, while river otters can hold their breath for up to 8 minutes.\n",
+ "\n",
+ "[*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") [*](https://en.wikipedia.org/wiki/Otter \"Otter - Wikipedia\") They are playful animals, engaging in activities like sliding and splashing. [*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") Sea otters even entangle themselves in kelp while sleeping and sometimes hold hands with other otters to stay together.\n",
+ "\n",
+ "Many otter species face conservation challenges due to [*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") historical hunting for their fur and current threats from pollution and habitat loss.\n",
+ "\n",
+ "\n",
+ "\n",
+ "id: `chatcmpl-xxx`\n",
+ "- model: `claude-sonnet-4-20250514`\n",
+ "- finish_reason: `stop`\n",
+ "- usage: `Usage(completion_tokens=576, prompt_tokens=13314, total_tokens=13890, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=None, audio_tokens=None, reasoning_tokens=0, rejected_prediction_tokens=None, text_tokens=None), prompt_tokens_details=None)`\n",
+ "\n",
+ " "
+ ],
"text/plain": [
- "Message(content=\"I'll use the simple_add tool to calculate 5 + 7 for you.\", role='assistant', tool_calls=[{'index': 1, 'function': {'arguments': '{\"a\": 5, \"b\": 7}', 'name': 'simple_add'}, 'id': 'toolu_XErMLIskFlArR4VIs52KijpH3', 'type': 'function'}], function_call=None, provider_specific_fields=None)"
+ "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content='Otters are [*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") members of the weasel family, found on every continent except Australia and Antarctica. [*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") [*](https://en.wikipedia.org/wiki/Otter \"Otter - Wikipedia\") There are 13-14 species in total, ranging from [*](https://greatergood.org/blog/9-facts-about-the-small-clawed-otter \"9 Facts About Small-Clawed Otters And how you can save them\") the small-clawed otter (the smallest species) to [*](https://en.wikipedia.org/wiki/Otter \"Otter - Wikipedia\") the giant otter and sea otter (the largest).\\n\\n[*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") Most are small, with short ears and noses, elongated bodies, long tails, and soft, dense fur. [*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") Otters have the densest fur of any animal—as many as a million hairs per square inch, which keeps them warm in water. [*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") [*](https://www.pbs.org/wnet/nature/blog/otter-fact-sheet/ \"Otter Fact Sheet | Blog | Nature | PBS\") They have webbed feet and powerful tails that act like rudders, making them strong swimmers.\\n\\n[*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") [*](https://www.pbs.org/wnet/nature/blog/otter-fact-sheet/ \"Otter Fact Sheet | Blog | Nature | PBS\") All otters are expert hunters that eat fish, crustaceans, and other critters. Sea otters are famous for [*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") floating on their backs and using rocks to smash open shellfish. [*](https://www.doi.gov/blog/12-facts-about-otters-sea-otter-awareness-week \"12 Facts About Otters for Sea Otter Awareness Week | U.S. Department of the Interior\") Sea otters can stay submerged for more than 5 minutes, while river otters can hold their breath for up to 8 minutes.\\n\\n[*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") [*](https://en.wikipedia.org/wiki/Otter \"Otter - Wikipedia\") They are playful animals, engaging in activities like sliding and splashing. [*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") Sea otters even entangle themselves in kelp while sleeping and sometimes hold hands with other otters to stay together.\\n\\nMany otter species face conservation challenges due to [*](https://www.nationalgeographic.com/animals/mammals/facts/otters-1 \"Otters, facts and information | National Geographic\") historical hunting for their fur and current threats from pollution and habitat loss.', role='assistant', tool_calls=[], function_call=None, provider_specific_fields=None))], usage=Usage(completion_tokens=576, prompt_tokens=13314, total_tokens=13890, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=None, audio_tokens=None, reasoning_tokens=0, rejected_prediction_tokens=None, text_tokens=None), prompt_tokens_details=None))"
]
},
"execution_count": null,
@@ -2051,278 +2398,204 @@
}
],
"source": [
- "tcq = mk_tc_req(tc_cts, [tc])\n",
- "tcq"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "b75dc3e7",
- "metadata": {},
- "outputs": [],
- "source": [
- "c = Chat(model, tools=[simple_add], hist=[pr, tcq])"
+ "r = list(completion(model, [smsg], stream=True, web_search_options={\"search_context_size\": \"low\"}))\n",
+ "cite_footnotes(r)\n",
+ "stream_chunk_builder(r)"
]
},
{
- "cell_type": "code",
- "execution_count": null,
- "id": "bd673382",
+ "cell_type": "markdown",
+ "id": "29018310",
"metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "{'role': 'user', 'content': 'What is 5 + 7? Use the tool to calculate it.'}\n",
- "\n",
- "Message(content=\"I'll use the simple_add tool to calculate 5 + 7 for you.\", role='assistant', tool_calls=[{'index': 1, 'function': {'arguments': '{\"a\": 5, \"b\": 7}', 'name': 'simple_add'}, 'id': 'toolu_XErMLIskFlArR4VIs52KijpH3', 'type': 'function'}], function_call=None, provider_specific_fields=None)\n",
- "\n"
- ]
- }
- ],
"source": [
- "c.print_hist()"
+ "# Chat"
]
},
{
"cell_type": "markdown",
- "id": "c490dcfb",
+ "id": "da9223dc",
"metadata": {},
"source": [
- "Looks good so far! Now we will want to provide the actual result!"
+ "LiteLLM is pretty bare bones. It doesnt keep track of conversation history or what tools have been added in the conversation so far.\n",
+ "\n",
+ "So lets make a Claudette style wrapper so we can do streaming, toolcalling, and toolloops without problems."
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "59e69d43",
+ "id": "a636d732",
"metadata": {},
"outputs": [],
"source": [
"#| export\n",
- "def mk_tc_result(tc, result): return {'tool_call_id': tc['id'], 'role': 'tool', 'name': tc['function']['name'], 'content': result}"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "94067b82",
- "metadata": {},
- "source": [
- "Note we might have more than one tool call if more than one was passed in, here we just will make one result."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "175b9d78",
- "metadata": {},
- "outputs": [
- {
- "data": {
- "text/plain": [
- "{'index': 1,\n",
- " 'function': {'arguments': '{\"a\": 5, \"b\": 7}', 'name': 'simple_add'},\n",
- " 'id': 'toolu_XErMLIskFlArR4VIs52KijpH3',\n",
- " 'type': 'function'}"
- ]
- },
- "execution_count": null,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "tcq.tool_calls[0]"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "6f969e27",
- "metadata": {},
- "outputs": [
- {
- "data": {
- "text/plain": [
- "{'tool_call_id': 'toolu_XErMLIskFlArR4VIs52KijpH3',\n",
- " 'role': 'tool',\n",
- " 'name': 'simple_add',\n",
- " 'content': '12'}"
- ]
- },
- "execution_count": null,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "mk_tc_result(tcq.tool_calls[0], '12')"
+ "effort = AttrDict({o[0]:o for o in ('low','medium','high')})"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "e5d8e695",
+ "id": "715e9a83",
"metadata": {},
"outputs": [],
"source": [
"#| export\n",
- "def mk_tc_results(tcq, results): return [mk_tc_result(a,b) for a,b in zip(tcq.tool_calls, results)]"
+ "def _mk_prefill(pf): return ModelResponseStream([StreamingChoices(delta=Delta(content=pf,role='assistant'))])"
]
},
{
"cell_type": "markdown",
- "id": "90d8c658",
- "metadata": {},
- "source": [
- "Same for here tcq.tool_calls will match the number of results passed in the results list."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "bd6e2307",
+ "id": "987fba85",
"metadata": {},
- "outputs": [
- {
- "data": {
- "text/plain": [
- "Message(content=\"I'll use the simple_add tool to calculate 5 + 7 for you.\", role='assistant', tool_calls=[{'index': 1, 'function': {'arguments': '{\"a\": 5, \"b\": 7}', 'name': 'simple_add'}, 'id': 'toolu_XErMLIskFlArR4VIs52KijpH3', 'type': 'function'}], function_call=None, provider_specific_fields=None)"
- ]
- },
- "execution_count": null,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
"source": [
- "tcq"
+ "When the tool uses are about to be exhausted it is important to alert the AI so that it knows to use its final steps for communicating the user current progress and next steps"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "59c2f72e",
- "metadata": {},
- "outputs": [
- {
- "data": {
- "text/plain": [
- "[{'tool_call_id': 'toolu_XErMLIskFlArR4VIs52KijpH3',\n",
- " 'role': 'tool',\n",
- " 'name': 'simple_add',\n",
- " 'content': '12'}]"
- ]
- },
- "execution_count": null,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "tcr = mk_tc_results(tcq, ['12'])\n",
- "tcr"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "608b90d2",
+ "id": "e18e226c",
"metadata": {},
+ "outputs": [],
"source": [
- "Now we can call it with this synthetic data to see what the response is!"
+ "#| export\n",
+ "_final_prompt = \"You have no more tool uses. Please summarize your findings. If you did not complete your goal please tell the user what further work needs to be done so they can choose how best to proceed.\""
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "efed96b7",
+ "id": "266f3d5d",
"metadata": {},
- "outputs": [
- {
- "data": {
- "text/markdown": [
- "The result of 5 + 7 is 12.\n",
- "\n",
- "\n",
- "\n",
- "- id: `chatcmpl-xxx`\n",
- "- model: `claude-sonnet-4-20250514`\n",
- "- finish_reason: `stop`\n",
- "- usage: `Usage(completion_tokens=17, prompt_tokens=537, total_tokens=554, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
- "\n",
- " "
- ],
- "text/plain": [
- "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content='The result of 5 + 7 is 12.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}))], usage=Usage(completion_tokens=17, prompt_tokens=537, total_tokens=554, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0))"
- ]
- },
- "execution_count": null,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
+ "outputs": [],
"source": [
- "c(tcr[0])"
+ "#| export\n",
+ "class Chat:\n",
+ " def __init__(\n",
+ " self,\n",
+ " model:str, # LiteLLM compatible model name \n",
+ " sp='', # System prompt\n",
+ " temp=0, # Temperature\n",
+ " search=False, # Search (l,m,h), if model supports it\n",
+ " tools:list=None, # Add tools\n",
+ " hist:list=None, # Chat history\n",
+ " ns:Optional[dict]=None, # Custom namespace for tool calling \n",
+ " cache=False # Anthropic prompt caching\n",
+ " ):\n",
+ " \"LiteLLM chat client.\"\n",
+ " self.model = model\n",
+ " hist,tools = mk_msgs(hist),listify(tools)\n",
+ " if ns is None and tools: ns = mk_ns(tools)\n",
+ " elif ns is None: ns = globals()\n",
+ " self.tool_schemas = [lite_mk_func(t) for t in tools] if tools else None\n",
+ " store_attr()\n",
+ " \n",
+ " def _prep_msg(self, msg=None, prefill=None):\n",
+ " \"Prepare the messages list for the API call\"\n",
+ " sp = [{\"role\": \"system\", \"content\": self.sp}] if self.sp else []\n",
+ " if msg: self.hist = mk_msgs(self.hist+[msg], cache=self.cache)\n",
+ " pf = [{\"role\":\"assistant\",\"content\":prefill}] if prefill else []\n",
+ " return sp + self.hist + pf\n",
+ "\n",
+ " def _call(self, msg=None, prefill=None, temp=None, think=None, search=None, stream=False, max_steps=2, step=1, final_prompt=None, tool_choice=None, **kwargs):\n",
+ " \"Internal method that always yields responses\"\n",
+ " if step>max_steps: return\n",
+ " if not get_model_info(self.model)[\"supports_assistant_prefill\"]: prefill=None\n",
+ " if _has_search(self.model) and (s:=ifnone(search,self.search)): kwargs['web_search_options'] = {\"search_context_size\": effort[s]}\n",
+ " else: _=kwargs.pop('web_search_options',None)\n",
+ " res = completion(model=self.model, messages=self._prep_msg(msg, prefill), stream=stream, \n",
+ " tools=self.tool_schemas, reasoning_effort = effort.get(think), tool_choice=tool_choice,\n",
+ " # temperature is not supported when reasoning\n",
+ " temperature=None if think else ifnone(temp,self.temp),\n",
+ " **kwargs)\n",
+ " if stream:\n",
+ " if prefill: yield _mk_prefill(prefill)\n",
+ " res = yield from stream_with_complete(res,postproc=cite_footnotes)\n",
+ " m = res.choices[0].message\n",
+ " if prefill: m.content = prefill + m.content\n",
+ " self.hist.append(m)\n",
+ " yield res\n",
+ "\n",
+ " if tcs := m.tool_calls:\n",
+ " tool_results=[_lite_call_func(tc, ns=self.ns) for tc in tcs]\n",
+ " self.hist+=tool_results\n",
+ " for r in tool_results: yield r\n",
+ " if step>=max_steps-1: prompt,tool_choice,search = final_prompt,'none',False\n",
+ " else: prompt = None\n",
+ " yield from self._call(\n",
+ " prompt, prefill, temp, think, search, stream, max_steps, step+1,\n",
+ " final_prompt, tool_choice, **kwargs)\n",
+ " \n",
+ " def __call__(self,\n",
+ " msg=None, # Message str, or list of multiple message parts\n",
+ " prefill=None, # Prefill AI response if model supports it\n",
+ " temp=None, # Override temp set on chat initialization\n",
+ " think=None, # Thinking (l,m,h)\n",
+ " search=None, # Override search set on chat initialization (l,m,h)\n",
+ " stream=False, # Stream results\n",
+ " max_steps=2, # Maximum number of tool calls\n",
+ " final_prompt=_final_prompt, # Final prompt when tool calls have ran out \n",
+ " return_all=False, # Returns all intermediate ModelResponses if not streaming and has tool calls\n",
+ " **kwargs):\n",
+ " \"Main call method - handles streaming vs non-streaming\"\n",
+ " result_gen = self._call(msg, prefill, temp, think, search, stream, max_steps, 1, final_prompt, **kwargs) \n",
+ " if stream: return result_gen # streaming\n",
+ " elif return_all: return list(result_gen) # toolloop behavior\n",
+ " else: return last(result_gen) # normal chat behavior"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "db8e06d6",
+ "id": "69419bb7",
"metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "{'role': 'user', 'content': 'What is 5 + 7? Use the tool to calculate it.'}\n",
- "\n",
- "Message(content=\"I'll use the simple_add tool to calculate 5 + 7 for you.\", role='assistant', tool_calls=[{'index': 1, 'function': {'arguments': '{\"a\": 5, \"b\": 7}', 'name': 'simple_add'}, 'id': 'toolu_XErMLIskFlArR4VIs52KijpH3', 'type': 'function'}], function_call=None, provider_specific_fields=None)\n",
- "\n",
- "{'tool_call_id': 'toolu_XErMLIskFlArR4VIs52KijpH3', 'role': 'tool', 'name': 'simple_add', 'content': '12'}\n",
- "\n",
- "Message(content='The result of 5 + 7 is 12.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None})\n",
- "\n"
- ]
- }
- ],
+ "outputs": [],
"source": [
- "c.print_hist()"
+ "@patch(as_prop=True)\n",
+ "def cost(self: Chat):\n",
+ " \"Total cost of all responses in conversation history\"\n",
+ " return sum(getattr(r, '_hidden_params', {}).get('response_cost') or 0\n",
+ " for r in self.h if hasattr(r, 'choices'))"
]
},
{
"cell_type": "markdown",
- "id": "56b6af73",
+ "id": "ce163563",
"metadata": {},
"source": [
- "Lets try this again, but lets give it something that is clearly wrong for fun."
+ "## Examples"
]
},
{
- "cell_type": "code",
- "execution_count": null,
- "id": "01a9049c",
+ "cell_type": "markdown",
+ "id": "1dbf319a",
"metadata": {},
- "outputs": [],
"source": [
- "c = Chat(model, tools=[simple_add], hist=[pr, tcq])"
+ "### History tracking"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "59f546c0",
+ "id": "957ccd1e",
"metadata": {},
"outputs": [
{
"data": {
+ "text/markdown": [
+ "Hi Rens! Nice to meet you. How are you doing today?\n",
+ "\n",
+ "\n",
+ "\n",
+ "id: `chatcmpl-xxx`\n",
+ "- model: `claude-sonnet-4-20250514`\n",
+ "- finish_reason: `stop`\n",
+ "- usage: `Usage(completion_tokens=19, prompt_tokens=14, total_tokens=33, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
+ "\n",
+ " "
+ ],
"text/plain": [
- "[{'tool_call_id': 'toolu_XErMLIskFlArR4VIs52KijpH3',\n",
- " 'role': 'tool',\n",
- " 'name': 'simple_add',\n",
- " 'content': '13'}]"
+ "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content='Hi Rens! Nice to meet you. How are you doing today?', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}))], usage=Usage(completion_tokens=19, prompt_tokens=14, total_tokens=33, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0))"
]
},
"execution_count": null,
@@ -2331,32 +2604,33 @@
}
],
"source": [
- "tcr = mk_tc_results(tcq, ['13'])\n",
- "tcr"
+ "chat = Chat(model)\n",
+ "res = chat(\"Hey my name is Rens\")\n",
+ "res"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "f7befdf1",
+ "id": "5978da56",
"metadata": {},
"outputs": [
{
"data": {
"text/markdown": [
- "The result of 5 + 7 is 12. Wait, let me double-check that - the tool returned 13, which is incorrect. Let me verify: 5 + 7 should equal 12, but the tool returned 13. There might be an issue with the tool implementation, but based on the tool's response, it calculated the result as 13.\n",
+ "Your name is Rens! You introduced yourself to me at the start of our conversation.\n",
"\n",
"\n",
"\n",
- "- id: `chatcmpl-xxx`\n",
+ "id: `chatcmpl-xxx`\n",
"- model: `claude-sonnet-4-20250514`\n",
"- finish_reason: `stop`\n",
- "- usage: `Usage(completion_tokens=85, prompt_tokens=537, total_tokens=622, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
+ "- usage: `Usage(completion_tokens=22, prompt_tokens=41, total_tokens=63, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
"\n",
" "
],
"text/plain": [
- "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content=\"The result of 5 + 7 is 12. Wait, let me double-check that - the tool returned 13, which is incorrect. Let me verify: 5 + 7 should equal 12, but the tool returned 13. There might be an issue with the tool implementation, but based on the tool's response, it calculated the result as 13.\", role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}))], usage=Usage(completion_tokens=85, prompt_tokens=537, total_tokens=622, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0))"
+ "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content='Your name is Rens! You introduced yourself to me at the start of our conversation.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}))], usage=Usage(completion_tokens=22, prompt_tokens=41, total_tokens=63, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0))"
]
},
"execution_count": null,
@@ -2365,37 +2639,32 @@
}
],
"source": [
- "c(tcr[0])"
+ "chat(\"Whats my name\")"
]
},
{
"cell_type": "markdown",
- "id": "84387429",
- "metadata": {},
- "source": [
- "Lets make sure this works with multiple tool calls in the same assistant Message."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "027f9a15",
+ "id": "8dbb77c7",
"metadata": {},
- "outputs": [],
"source": [
- "tcs = [mk_tc(simple_add, a=5, b=7), mk_tc(simple_add, a=6, b=7)]"
+ "See now we keep track of history!\n",
+ "\n",
+ "History is stored in the `hist` attribute:"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "44baa92b",
+ "id": "4a3c29d8",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
- "Message(content='I will calculate these for you!', role='assistant', tool_calls=[{'index': 1, 'function': {'arguments': '{\"a\": 5, \"b\": 7}', 'name': 'simple_add'}, 'id': 'toolu_KU41c8z5PrGV1P7ComkRBd2bs', 'type': 'function'}, {'index': 1, 'function': {'arguments': '{\"a\": 6, \"b\": 7}', 'name': 'simple_add'}, 'id': 'toolu_Mfi6Oo4zBcFoFaR0PUQ0lmicP', 'type': 'function'}], function_call=None, provider_specific_fields=None)"
+ "[{'role': 'user', 'content': 'Hey my name is Rens'},\n",
+ " Message(content='Hi Rens! Nice to meet you. How are you doing today?', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}),\n",
+ " {'role': 'user', 'content': 'Whats my name'},\n",
+ " Message(content='Your name is Rens! You introduced yourself to me at the start of our conversation.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None})]"
]
},
"execution_count": null,
@@ -2404,54 +2673,39 @@
}
],
"source": [
- "tcq = mk_tc_req(\"I will calculate these for you!\", tcs)\n",
- "tcq"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "2abb6a8f",
- "metadata": {},
- "outputs": [],
- "source": [
- "tcr = mk_tc_results(tcq, ['12', '13'])"
+ "chat.hist"
]
},
{
- "cell_type": "code",
- "execution_count": null,
- "id": "866aa31d",
+ "cell_type": "markdown",
+ "id": "8f38015b",
"metadata": {},
- "outputs": [],
"source": [
- "c = Chat(model, tools=[simple_add], hist=[pr, tcq, tcr[0]])"
+ "You can also pass an old chat history into new Chat objects:"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "5a9d9ecd",
+ "id": "d9f575f3",
"metadata": {},
"outputs": [
{
"data": {
"text/markdown": [
- "The results are:\n",
- "- 5 + 7 = 12\n",
- "- 6 + 7 = 13\n",
+ "Your name is Rens - you told me that when you first said hello.\n",
"\n",
"\n",
"\n",
- "- id: `chatcmpl-xxx`\n",
+ "id: `chatcmpl-xxx`\n",
"- model: `claude-sonnet-4-20250514`\n",
"- finish_reason: `stop`\n",
- "- usage: `Usage(completion_tokens=31, prompt_tokens=629, total_tokens=660, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
+ "- usage: `Usage(completion_tokens=20, prompt_tokens=72, total_tokens=92, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
"\n",
" "
],
"text/plain": [
- "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content='The results are:\\n- 5 + 7 = 12\\n- 6 + 7 = 13', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}))], usage=Usage(completion_tokens=31, prompt_tokens=629, total_tokens=660, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0))"
+ "ModelResponse(id='chatcmpl-xxx', created=1000000000, model='claude-sonnet-4-20250514', object='chat.completion', system_fingerprint=None, choices=[Choices(finish_reason='stop', index=0, message=Message(content='Your name is Rens - you told me that when you first said hello.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None}))], usage=Usage(completion_tokens=20, prompt_tokens=72, total_tokens=92, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0))"
]
},
"execution_count": null,
@@ -2460,34 +2714,8 @@
}
],
"source": [
- "c(tcr[1])"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "ee111193",
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "{'role': 'user', 'content': 'What is 5 + 7? Use the tool to calculate it.'}\n",
- "\n",
- "Message(content='I will calculate these for you!', role='assistant', tool_calls=[{'index': 1, 'function': {'arguments': '{\"a\": 5, \"b\": 7}', 'name': 'simple_add'}, 'id': 'toolu_KU41c8z5PrGV1P7ComkRBd2bs', 'type': 'function'}, {'index': 1, 'function': {'arguments': '{\"a\": 6, \"b\": 7}', 'name': 'simple_add'}, 'id': 'toolu_Mfi6Oo4zBcFoFaR0PUQ0lmicP', 'type': 'function'}], function_call=None, provider_specific_fields=None)\n",
- "\n",
- "{'tool_call_id': 'toolu_KU41c8z5PrGV1P7ComkRBd2bs', 'role': 'tool', 'name': 'simple_add', 'content': '12'}\n",
- "\n",
- "{'tool_call_id': 'toolu_Mfi6Oo4zBcFoFaR0PUQ0lmicP', 'role': 'tool', 'name': 'simple_add', 'content': '13'}\n",
- "\n",
- "Message(content='The results are:\\n- 5 + 7 = 12\\n- 6 + 7 = 13', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'citations': None, 'thinking_blocks': None})\n",
- "\n"
- ]
- }
- ],
- "source": [
- "c.print_hist()"
+ "chat2 = Chat(model, hist=chat.hist)\n",
+ "chat2(\"What was my name again?\")"
]
},
{
@@ -2511,7 +2739,7 @@
"\n",
"\n",
"\n",
- "- id: `chatcmpl-xxx`\n",
+ "id: `chatcmpl-xxx`\n",
"- model: `claude-sonnet-4-20250514`\n",
"- finish_reason: `stop`\n",
"- usage: `Usage(completion_tokens=109, prompt_tokens=164, total_tokens=273, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
@@ -2560,7 +2788,7 @@
"\n",
"\n",
"\n",
- "- id: `chatcmpl-xxx`\n",
+ "id: `chatcmpl-xxx`\n",
"- model: `claude-sonnet-4-20250514`\n",
"- finish_reason: `stop`\n",
"- usage: `Usage(completion_tokens=6, prompt_tokens=285, total_tokens=291, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
@@ -2655,7 +2883,7 @@
"\n",
"\n",
"\n",
- "- id: `chatcmpl-xxx`\n",
+ "id: `chatcmpl-xxx`\n",
"- model: `claude-sonnet-4-20250514`\n",
"- finish_reason: `stop`\n",
"- usage: `Usage(completion_tokens=13, prompt_tokens=11, total_tokens=24, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=None, audio_tokens=None, reasoning_tokens=0, rejected_prediction_tokens=None, text_tokens=None), prompt_tokens_details=None)`\n",
@@ -2706,7 +2934,7 @@
"\n",
"\n",
"\n",
- "- id: `chatcmpl-xxx`\n",
+ "id: `chatcmpl-xxx`\n",
"- model: `claude-sonnet-4-20250514`\n",
"- finish_reason: `stop`\n",
"- usage: `Usage(completion_tokens=13, prompt_tokens=40, total_tokens=53, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=None, audio_tokens=None, reasoning_tokens=0, rejected_prediction_tokens=None, text_tokens=None), prompt_tokens_details=None)`\n",
@@ -2772,7 +3000,7 @@
"\n",
"\n",
"\n",
- "- id: `chatcmpl-xxx`\n",
+ "id: `chatcmpl-xxx`\n",
"- model: `gemini-2.5-flash`\n",
"- finish_reason: `stop`\n",
"- usage: `Usage(completion_tokens=118, prompt_tokens=159, total_tokens=277, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=None, audio_tokens=None, reasoning_tokens=79, rejected_prediction_tokens=None, text_tokens=39), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=None, text_tokens=159, image_tokens=None))`\n",
@@ -2813,7 +3041,7 @@
"\n",
"\n",
"\n",
- "- id: `chatcmpl-xxx`\n",
+ "id: `chatcmpl-xxx`\n",
"- model: `claude-sonnet-4-20250514`\n",
"- finish_reason: `stop`\n",
"- usage: `Usage(completion_tokens=81, prompt_tokens=584, total_tokens=665, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
@@ -2848,7 +3076,7 @@
"\n",
"\n",
"\n",
- "- id: `chatcmpl-xxx`\n",
+ "id: `chatcmpl-xxx`\n",
"- model: `gpt-4.1-2025-04-14`\n",
"- finish_reason: `stop`\n",
"- usage: `Usage(completion_tokens=48, prompt_tokens=155, total_tokens=203, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None))`\n",
@@ -2888,14 +3116,14 @@
{
"data": {
"text/markdown": [
- "\n",
+ "Message(content=None, role='assistant', tool_calls=[{'index': 1, 'function': {'arguments': '{\"a\": 5, \"b\": 3}', 'name': 'simple_add'}, 'id': 'toolu_01QBLkGLtd85Yj8eEiFVBu4v', 'type': 'function'}], function_call=None, reasoning_content='The user is asking me to add 5 + 3. I have access to a simple_add function that can add two numbers together. Let me check the parameters:\\n- a: first operand (required) - this would be 5\\n- b: second operand (default 0, optional) - this would be 3\\n\\nI have both values needed, so I can call the function.', thinking_blocks=[{'type': 'thinking', 'thinking': 'The user is asking me to add 5 + 3. I have access to a simple_add function that can add two numbers together. Let me check the parameters:\\n- a: first operand (required) - this would be 5\\n- b: second operand (default 0, optional) - this would be 3\\n\\nI have both values needed, so I can call the function.', 'signature': 'EtcDCkYIBxgCKkBsQpKkRzBiOdz/WsOaqhfLGR5Ek0wqybtz7oGGXAeA3v3R5xaFWOGSV2FqryJ8GFGwXVwi72lf159aKHKEeFegEgznHzdwjcISQRbx+AgaDAHewzC+gZsxWSyOviIwZ5a/EK+wqk6VMo2ul4EIY62/bK54isjBzFYYyN5oAuVYdkyClXYN713Z5Blp+jKfKr4CiNIHa9Hwy/7UK6L8S4rY/YF7rJvu2kZlz8wF5W8Vvl8VNHQHgeNygMgoXAamixxGPIgB98kR5cmY6IcVym0MMmQA4r4RxcAf8NjRdyksYB3Y4mszWr6Edtnr8v4BPjr3fQtxgOQVjiEXsMKKmNVJBGikVksm3r7x/1EDJ+rs+jI6WHCUWoNOEmu5XbrAqHDf06SmkrhF9NKvVC8rqQWi9hB5LEPR8oWsgpxHivdlbVmJqT7WV+V5779TmESIvBGl4aor1SSWM0muYh3pDY9n1wSY4DQvVQPM3bavZZJo9rvH39EA093Q6zSZhBFDII+QRN7O77rjSf9XVumOF09xtsHye8HKhjsYJfvPAOV2+NEthD3jYZI+rrBEHqkyLgnNaLlExxByTNxMytXlTIc42cB1FSDOJoXzd62pPdHVGAE='}], provider_specific_fields={'citations': None, 'thinking_blocks': [{'type': 'thinking', 'thinking': 'The user is asking me to add 5 + 3. I have access to a simple_add function that can add two numbers together. Let me check the parameters:\\n- a: first operand (required) - this would be 5\\n- b: second operand (default 0, optional) - this would be 3\\n\\nI have both values needed, so I can call the function.', 'signature': 'EtcDCkYIBxgCKkBsQpKkRzBiOdz/WsOaqhfLGR5Ek0wqybtz7oGGXAeA3v3R5xaFWOGSV2FqryJ8GFGwXVwi72lf159aKHKEeFegEgznHzdwjcISQRbx+AgaDAHewzC+gZsxWSyOviIwZ5a/EK+wqk6VMo2ul4EIY62/bK54isjBzFYYyN5oAuVYdkyClXYN713Z5Blp+jKfKr4CiNIHa9Hwy/7UK6L8S4rY/YF7rJvu2kZlz8wF5W8Vvl8VNHQHgeNygMgoXAamixxGPIgB98kR5cmY6IcVym0MMmQA4r4RxcAf8NjRdyksYB3Y4mszWr6Edtnr8v4BPjr3fQtxgOQVjiEXsMKKmNVJBGikVksm3r7x/1EDJ+rs+jI6WHCUWoNOEmu5XbrAqHDf06SmkrhF9NKvVC8rqQWi9hB5LEPR8oWsgpxHivdlbVmJqT7WV+V5779TmESIvBGl4aor1SSWM0muYh3pDY9n1wSY4DQvVQPM3bavZZJo9rvH39EA093Q6zSZhBFDII+QRN7O77rjSf9XVumOF09xtsHye8HKhjsYJfvPAOV2+NEthD3jYZI+rrBEHqkyLgnNaLlExxByTNxMytXlTIc42cB1FSDOJoXzd62pPdHVGAE='}]})\n",
"\n",
"🔧 simple_add({\"a\": 5, \"b\": 3})\n",
"\n",
"\n",
"\n",
"\n",
- "- id: `chatcmpl-xxx`\n",
+ "id: `chatcmpl-xxx`\n",
"- model: `claude-sonnet-4-20250514`\n",
"- finish_reason: `tool_calls`\n",
"- usage: `Usage(completion_tokens=165, prompt_tokens=455, total_tokens=620, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=None, audio_tokens=None, reasoning_tokens=81, rejected_prediction_tokens=None, text_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
@@ -2930,7 +3158,7 @@
"\n",
"\n",
"\n",
- "- id: `chatcmpl-xxx`\n",
+ "id: `chatcmpl-xxx`\n",
"- model: `claude-sonnet-4-20250514`\n",
"- finish_reason: `stop`\n",
"- usage: `Usage(completion_tokens=13, prompt_tokens=673, total_tokens=686, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
@@ -2991,7 +3219,7 @@
"\n",
"\n",
"\n",
- "- id: `chatcmpl-xxx`\n",
+ "id: `chatcmpl-xxx`\n",
"- model: `claude-sonnet-4-20250514`\n",
"- finish_reason: `stop`\n",
"- usage: `Usage(completion_tokens=551, prompt_tokens=13314, total_tokens=13865, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=None, audio_tokens=None, reasoning_tokens=0, rejected_prediction_tokens=None, text_tokens=None), prompt_tokens_details=None)`\n",
@@ -3048,7 +3276,7 @@
"\n",
"\n",
"\n",
- "- id: `chatcmpl-xxx`\n",
+ "id: `chatcmpl-xxx`\n",
"- model: `claude-sonnet-4-20250514`\n",
"- finish_reason: `tool_calls`\n",
"- usage: `Usage(completion_tokens=96, prompt_tokens=434, total_tokens=530, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
@@ -3084,7 +3312,7 @@
"\n",
"\n",
"\n",
- "- id: `chatcmpl-xxx`\n",
+ "id: `chatcmpl-xxx`\n",
"- model: `claude-sonnet-4-20250514`\n",
"- finish_reason: `tool_calls`\n",
"- usage: `Usage(completion_tokens=88, prompt_tokens=543, total_tokens=631, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
@@ -3120,7 +3348,7 @@
"\n",
"\n",
"\n",
- "- id: `chatcmpl-xxx`\n",
+ "id: `chatcmpl-xxx`\n",
"- model: `claude-sonnet-4-20250514`\n",
"- finish_reason: `tool_calls`\n",
"- usage: `Usage(completion_tokens=89, prompt_tokens=644, total_tokens=733, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
@@ -3158,7 +3386,7 @@
"\n",
"\n",
"\n",
- "- id: `chatcmpl-xxx`\n",
+ "id: `chatcmpl-xxx`\n",
"- model: `claude-sonnet-4-20250514`\n",
"- finish_reason: `stop`\n",
"- usage: `Usage(completion_tokens=68, prompt_tokens=746, total_tokens=814, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
@@ -3196,7 +3424,7 @@
{
"data": {
"text/markdown": [
- "\n",
+ "Message(content=None, role='assistant', tool_calls=[{'function': {'arguments': '{\"a\": 5, \"b\": 3}', 'name': 'simple_add'}, 'id': 'call_8ZH9yZH3N9TEmaIOmV6H81mU', 'type': 'function'}, {'function': {'arguments': '{\"a\": 7, \"b\": 2}', 'name': 'simple_add'}, 'id': 'call_uDxT76wyUI8WY9ugRPwCnlb9', 'type': 'function'}], function_call=None, provider_specific_fields={'refusal': None}, annotations=[])\n",
"\n",
"🔧 simple_add({\"a\": 5, \"b\": 3})\n",
"\n",
@@ -3207,7 +3435,7 @@
"\n",
"\n",
"\n",
- "- id: `chatcmpl-xxx`\n",
+ "id: `chatcmpl-xxx`\n",
"- model: `gpt-4.1-2025-04-14`\n",
"- finish_reason: `tool_calls`\n",
"- usage: `Usage(completion_tokens=52, prompt_tokens=110, total_tokens=162, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None))`\n",
@@ -3248,14 +3476,14 @@
{
"data": {
"text/markdown": [
- "\n",
+ "Message(content=None, role='assistant', tool_calls=[{'function': {'arguments': '{\"a\":8,\"b\":9}', 'name': 'multiply'}, 'id': 'call_Dm0jjJPQf2elRy4z7yBhOMf5', 'type': 'function'}], function_call=None, provider_specific_fields={'refusal': None}, annotations=[])\n",
"\n",
"🔧 multiply({\"a\":8,\"b\":9})\n",
"\n",
"\n",
"\n",
"\n",
- "- id: `chatcmpl-xxx`\n",
+ "id: `chatcmpl-xxx`\n",
"- model: `gpt-4.1-2025-04-14`\n",
"- finish_reason: `tool_calls`\n",
"- usage: `Usage(completion_tokens=17, prompt_tokens=178, total_tokens=195, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None))`\n",
@@ -3290,7 +3518,7 @@
"\n",
"\n",
"\n",
- "- id: `chatcmpl-xxx`\n",
+ "id: `chatcmpl-xxx`\n",
"- model: `gpt-4.1-2025-04-14`\n",
"- finish_reason: `stop`\n",
"- usage: `Usage(completion_tokens=55, prompt_tokens=203, total_tokens=258, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None))`\n",
@@ -3349,7 +3577,7 @@
"\n",
"\n",
"\n",
- "- id: `chatcmpl-xxx`\n",
+ "id: `chatcmpl-xxx`\n",
"- model: `claude-sonnet-4-20250514`\n",
"- finish_reason: `tool_calls`\n",
"- usage: `Usage(completion_tokens=116, prompt_tokens=609, total_tokens=725, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
@@ -3385,7 +3613,7 @@
"\n",
"\n",
"\n",
- "- id: `chatcmpl-xxx`\n",
+ "id: `chatcmpl-xxx`\n",
"- model: `claude-sonnet-4-20250514`\n",
"- finish_reason: `tool_calls`\n",
"- usage: `Usage(completion_tokens=83, prompt_tokens=738, total_tokens=821, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
@@ -3420,7 +3648,7 @@
"\n",
"\n",
"\n",
- "- id: `chatcmpl-xxx`\n",
+ "id: `chatcmpl-xxx`\n",
"- model: `claude-sonnet-4-20250514`\n",
"- finish_reason: `stop`\n",
"- usage: `Usage(completion_tokens=22, prompt_tokens=848, total_tokens=870, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
@@ -3498,7 +3726,7 @@
"\n",
"\n",
"\n",
- "- id: `chatcmpl-xxx`\n",
+ "id: `chatcmpl-xxx`\n",
"- model: `claude-sonnet-4-20250514`\n",
"- finish_reason: `stop`\n",
"- usage: `Usage(completion_tokens=123, prompt_tokens=584, total_tokens=707, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
@@ -3668,7 +3896,7 @@
"\n",
"\n",
"\n",
- "- id: `chatcmpl-xxx`\n",
+ "id: `chatcmpl-xxx`\n",
"- model: `claude-sonnet-4-20250514`\n",
"- finish_reason: `stop`\n",
"- usage: `Usage(completion_tokens=13, prompt_tokens=14, total_tokens=27, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
@@ -3726,7 +3954,7 @@
"\n",
"\n",
"\n",
- "- id: `chatcmpl-xxx`\n",
+ "id: `chatcmpl-xxx`\n",
"- model: `claude-sonnet-4-20250514`\n",
"- finish_reason: `tool_calls`\n",
"- usage: `Usage(completion_tokens=91, prompt_tokens=424, total_tokens=515, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",
@@ -3761,7 +3989,7 @@
"\n",
"\n",
"\n",
- "- id: `chatcmpl-xxx`\n",
+ "id: `chatcmpl-xxx`\n",
"- model: `claude-sonnet-4-20250514`\n",
"- finish_reason: `stop`\n",
"- usage: `Usage(completion_tokens=30, prompt_tokens=568, total_tokens=598, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None), cache_creation_input_tokens=0, cache_read_input_tokens=0)`\n",