import os
# os.environ['ANTHROPIC_LOG'] = 'debug'
from IPython.display import display, Markdown, clear_output
from pprint import pprint
model = models[1]
model
'claude-sonnet-4-20250514'

Problem setup

Anthropic provides an interesting example of using tools to mock up a hypothetical ordering system. We’re going to take it a step further, and show how we can dramatically simplify the process, whilst completing more complex tasks.

We’ll start by defining the same mock customer/order data as in Anthropic’s example, plus create a entity relationship between customers and orders:

def _get_orders_customers():
    orders = {
        "O1": dict(id="O1", product="Widget A", quantity=2, price=19.99, status="Shipped"),
        "O2": dict(id="O2", product="Gadget B", quantity=1, price=49.99, status="Processing"),
        "O3": dict(id="O3", product="Gadget B", quantity=2, price=49.99, status="Shipped")}

    customers = {
        "C1": dict(name="John Doe", email="[email protected]", phone="123-456-7890",
                   orders=[orders['O1'], orders['O2']]),
        "C2": dict(name="Jane Smith", email="[email protected]", phone="987-654-3210",
                   orders=[orders['O3']])
    }
    return orders, customers
orders, customers = _get_orders_customers()

We can now define the same functions from the original example – but note that we don’t need to manually create the large JSON schema, since Claudette handles all that for us automatically from the functions directly. We’ll add some extra functionality to update order details when cancelling too.

def get_customer_info(
    customer_id:str # ID of the customer
): # Customer's name, email, phone number, and list of orders
    "Retrieves a customer's information and their orders based on the customer ID"
    print(f'- Retrieving customer {customer_id}')
    return customers.get(customer_id, "Customer not found")

def get_order_details(
    order_id:str # ID of the order
): # Order's ID, product name, quantity, price, and order status
    "Retrieves the details of a specific order based on the order ID"
    print(f'- Retrieving order {order_id}')
    return orders.get(order_id, "Order not found")

def cancel_order(
    order_id:str # ID of the order to cancel
)->bool: # True if the cancellation is successful
    "Cancels an order based on the provided order ID"
    print(f'- Cancelling order {order_id}')
    if order_id not in orders: return False
    orders[order_id]['status'] = 'Cancelled'
    return True

We’re now ready to start our chat.

Manual tool use

tools = [get_customer_info, get_order_details, cancel_order]
chat = Chat(model, tools=tools)

We’ll start with the same request as Anthropic showed:

r = chat('Can you tell me the email address for customer C1?')
print(r.stop_reason)
r.content
- Retrieving customer C1
tool_use
[TextBlock(citations=None, text="I'll retrieve the customer information for customer C1 to get their email address.", type='text'),
 ToolUseBlock(id='toolu_019rAXs9PboQVMYoHjryFpEa', input={'customer_id': 'C1'}, name='get_customer_info', type='tool_use')]

Claude asks us to use a tool. Claudette handles that automatically by just calling it again:

r = chat()
contents(r)
'The email address for customer C1 (John Doe) is [email protected].'

Let’s consider a more complex case than in the original example – what happens if a customer wants to cancel all of their orders?

chat = Chat(model, tools=tools)
r = chat('Please cancel all orders for customer C1 for me.')
print(r.stop_reason)
r.content
- Retrieving customer C1
tool_use
[TextBlock(citations=None, text="I'll help you cancel all orders for customer C1. First, let me retrieve the customer's information to see what orders they have.", type='text'),
 ToolUseBlock(id='toolu_01STdDTogehYVgbQjVye246X', input={'customer_id': 'C1'}, name='get_customer_info', type='tool_use')]

Tool loop

This is the start of a multi-stage tool use process. Doing it manually step by step is inconvenient, so let’s write a function to handle this for us:


source

Chat.toolloop

 Chat.toolloop (pr, max_steps=10, cont_func:<built-
                infunctioncallable>=<function noop>, final_prompt='You
                have no more tool uses. Please summarize your findings. If
                you did not complete your goal please tell the user what
                further work needs to be done so they can choose how best
                to proceed.', temp=None, maxtok=4096, maxthinktok=0,
                stream=False, prefill='', tool_choice:Optional[dict]=None)

Add prompt pr to dialog and get a response from Claude, automatically following up with tool_use messages

Type Default Details
pr Prompt to pass to Claude
max_steps int 10 Maximum number of tool requests to loop through
cont_func callable noop Function that stops loop if returns False
final_prompt str You have no more tool uses. Please summarize your findings. If you did not complete your goal please tell the user what further work needs to be done so they can choose how best to proceed. Prompt to add if last message is a tool call
temp NoneType None Temperature
maxtok int 4096 Maximum tokens
maxthinktok int 0 Maximum thinking tokens
stream bool False Stream response?
prefill str Optional prefill to pass to Claude as start of its response
tool_choice Optional None Optionally force use of some tool
Exported source
_final_prompt = "You have no more tool uses. Please summarize your findings. If you did not complete your goal please tell the user what further work needs to be done so they can choose how best to proceed."
Exported source
@patch
@delegates(Chat.__call__)
def toolloop(self:Chat,
             pr, # Prompt to pass to Claude
             max_steps=10, # Maximum number of tool requests to loop through
             cont_func:callable=noop, # Function that stops loop if returns False
             final_prompt=_final_prompt, # Prompt to add if last message is a tool call
             **kwargs):
    "Add prompt `pr` to dialog and get a response from Claude, automatically following up with `tool_use` messages"
    @save_iter
    def _f(o):
        init_n = len(self.h)
        r = self(pr, **kwargs)
        yield r
        if len(self.last)>1: yield self.last[1]
        for i in range(max_steps-1):
            if self.c.stop_reason!='tool_use': break
            r = self(final_prompt if i==max_steps-2 else None, **kwargs)
            yield r
            if len(self.last)>1: yield self.last[1]
            if not cont_func(*self.h[-3:]): break
        o.value = self.h[init_n+1:]
    return _f()

toolloop returns an iterable of assistant messages:

chat = Chat(model, tools=tools)
pr = 'Can you tell me the email address for customer C1?'
r = chat.toolloop(pr)
for o in r: display(o)
- Retrieving customer C1

I’ll retrieve the customer information for customer C1 to get their email address.

  • id: msg_01VV3vN75moJNCJHk3JTuB4U
  • content: [{'citations': None, 'text': "I'll retrieve the customer information for customer C1 to get their email address.", 'type': 'text'}, {'id': 'toolu_017mahzsFc7DQvNrqhiC1znY', 'input': {'customer_id': 'C1'}, 'name': 'get_customer_info', 'type': 'tool_use'}]
  • model: claude-sonnet-4-20250514
  • role: assistant
  • stop_reason: tool_use
  • stop_sequence: None
  • type: message
  • usage: {'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0, 'input_tokens': 574, 'output_tokens': 76, 'server_tool_use': None, 'service_tier': 'standard'}
{ 'content': [ { 'content': "{'name': 'John Doe', 'email': '[email protected]', "
                            "'phone': '123-456-7890', 'orders': [{'id': 'O1', "
                            "'product': 'Widget A', 'quantity': 2, 'price': "
                            "19.99, 'status': 'Shipped'}, {'id': 'O2', "
                            "'product': 'Gadget B', 'quantity': 1, 'price': "
                            "49.99, 'status': 'Processing'}]}",
                 'tool_use_id': 'toolu_017mahzsFc7DQvNrqhiC1znY',
                 'type': 'tool_result'}],
  'role': 'user'}

The email address for customer C1 (John Doe) is [email protected].

  • id: msg_01XjFCU7qp73AWKM3G1hwKwK
  • content: [{'citations': None, 'text': 'The email address for customer C1 (John Doe) is [email protected].', 'type': 'text'}]
  • model: claude-sonnet-4-20250514
  • role: assistant
  • stop_reason: end_turn
  • stop_sequence: None
  • type: message
  • usage: {'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0, 'input_tokens': 787, 'output_tokens': 23, 'server_tool_use': None, 'service_tier': 'standard'}

The full set of tool loop messages is stored in the value attr:

pprint(r.value, width=120)
[{'content': [{'citations': None,
               'text': "I'll retrieve the customer information for customer C1 to get their email address.",
               'type': 'text'},
              {'id': 'toolu_017mahzsFc7DQvNrqhiC1znY',
               'input': {'customer_id': 'C1'},
               'name': 'get_customer_info',
               'type': 'tool_use'}],
  'role': 'assistant'},
 {'content': [{'content': "{'name': 'John Doe', 'email': '[email protected]', 'phone': '123-456-7890', 'orders': "
                          "[{'id': 'O1', 'product': 'Widget A', 'quantity': 2, 'price': 19.99, 'status': 'Shipped'}, "
                          "{'id': 'O2', 'product': 'Gadget B', 'quantity': 1, 'price': 49.99, 'status': "
                          "'Processing'}]}",
               'tool_use_id': 'toolu_017mahzsFc7DQvNrqhiC1znY',
               'type': 'tool_result'}],
  'role': 'user'},
 {'content': [{'citations': None,
               'text': 'The email address for customer C1 (John Doe) is [email protected].',
               'type': 'text'}],
  'role': 'assistant'}]

Let’s see if it can handle the multi-stage process now:

orders, customers = _get_orders_customers()
chat = Chat(model, tools=tools)
r = chat.toolloop('Please cancel all orders for customer C1 for me.')
for o in r: display(o)
- Retrieving customer C1

I’ll help you cancel all orders for customer C1. First, let me retrieve the customer’s information to see what orders they have.

  • id: msg_01TjqkvsVgbNq5PsgaaMRhFo
  • content: [{'citations': None, 'text': "I'll help you cancel all orders for customer C1. First, let me retrieve the customer's information to see what orders they have.", 'type': 'text'}, {'id': 'toolu_0167mnpJ8gmL7p73b89TGqWG', 'input': {'customer_id': 'C1'}, 'name': 'get_customer_info', 'type': 'tool_use'}]
  • model: claude-sonnet-4-20250514
  • role: assistant
  • stop_reason: tool_use
  • stop_sequence: None
  • type: message
  • usage: {'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0, 'input_tokens': 574, 'output_tokens': 87, 'server_tool_use': None, 'service_tier': 'standard'}
{ 'content': [ { 'content': "{'name': 'John Doe', 'email': '[email protected]', "
                            "'phone': '123-456-7890', 'orders': [{'id': 'O1', "
                            "'product': 'Widget A', 'quantity': 2, 'price': "
                            "19.99, 'status': 'Shipped'}, {'id': 'O2', "
                            "'product': 'Gadget B', 'quantity': 1, 'price': "
                            "49.99, 'status': 'Processing'}]}",
                 'tool_use_id': 'toolu_0167mnpJ8gmL7p73b89TGqWG',
                 'type': 'tool_result'}],
  'role': 'user'}
- Cancelling order O1
- Cancelling order O2

I can see that customer C1 (John Doe) has 2 orders: - Order O1: Widget A (Status: Shipped) - Order O2: Gadget B (Status: Processing)

Now I’ll proceed to cancel both orders:

  • id: msg_018FQ1fsGm6to67UfDpgpmAj
  • content: [{'citations': None, 'text': "I can see that customer C1 (John Doe) has 2 orders:\n- Order O1: Widget A (Status: Shipped)\n- Order O2: Gadget B (Status: Processing)\n\nNow I'll proceed to cancel both orders:", 'type': 'text'}, {'id': 'toolu_01E523cjMwqaygCdR3Mrwdoq', 'input': {'order_id': 'O1'}, 'name': 'cancel_order', 'type': 'tool_use'}, {'id': 'toolu_01CoiPgn6hXgbG7mnu8eUZV2', 'input': {'order_id': 'O2'}, 'name': 'cancel_order', 'type': 'tool_use'}]
  • model: claude-sonnet-4-20250514
  • role: assistant
  • stop_reason: tool_use
  • stop_sequence: None
  • type: message
  • usage: {'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0, 'input_tokens': 798, 'output_tokens': 154, 'server_tool_use': None, 'service_tier': 'standard'}
{ 'content': [ { 'content': 'True',
                 'tool_use_id': 'toolu_01E523cjMwqaygCdR3Mrwdoq',
                 'type': 'tool_result'},
               { 'content': 'True',
                 'tool_use_id': 'toolu_01CoiPgn6hXgbG7mnu8eUZV2',
                 'type': 'tool_result'}],
  'role': 'user'}

Perfect! I have successfully canceled all orders for customer C1 (John Doe):

Order O1 (Widget A) - Canceled successfully ✅ Order O2 (Gadget B) - Canceled successfully

Both orders have been canceled. The customer will likely receive confirmation of these cancellations via email at [email protected].

  • id: msg_01Q75yLyHgMaby4t9oxWGUdx
  • content: [{'citations': None, 'text': 'Perfect! I have successfully canceled all orders for customer C1 (John Doe):\n\n✅ **Order O1** (Widget A) - Canceled successfully\n✅ **Order O2** (Gadget B) - Canceled successfully\n\nBoth orders have been canceled. The customer will likely receive confirmation of these cancellations via email at [email protected].', 'type': 'text'}]
  • model: claude-sonnet-4-20250514
  • role: assistant
  • stop_reason: end_turn
  • stop_sequence: None
  • type: message
  • usage: {'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0, 'input_tokens': 1017, 'output_tokens': 84, 'server_tool_use': None, 'service_tier': 'standard'}

OK Claude thinks the orders were cancelled – let’s check one:

for o in chat.toolloop('What is the status of order O2?'): display(o)
- Retrieving order O2

Let me check the current status of order O2 for you.

  • id: msg_01FkGB9JasAUAqzd2gHR71TP
  • content: [{'citations': None, 'text': 'Let me check the current status of order O2 for you.', 'type': 'text'}, {'id': 'toolu_019JS4bHS6FeF9F6yxR4FYGh', 'input': {'order_id': 'O2'}, 'name': 'get_order_details', 'type': 'tool_use'}]
  • model: claude-sonnet-4-20250514
  • role: assistant
  • stop_reason: tool_use
  • stop_sequence: None
  • type: message
  • usage: {'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0, 'input_tokens': 1113, 'output_tokens': 73, 'server_tool_use': None, 'service_tier': 'standard'}
{ 'content': [ { 'content': "{'id': 'O2', 'product': 'Gadget B', 'quantity': "
                            "1, 'price': 49.99, 'status': 'Cancelled'}",
                 'tool_use_id': 'toolu_019JS4bHS6FeF9F6yxR4FYGh',
                 'type': 'tool_result'}],
  'role': 'user'}

The status of order O2 is Cancelled.

This confirms that the cancellation we performed earlier was successful. The order for 1 Gadget B (priced at $49.99) has been officially cancelled in the system.

  • id: msg_01BSMFsF84fXzLq4zYWkSPHw
  • content: [{'citations': None, 'text': 'The status of order O2 is **Cancelled**. \n\nThis confirms that the cancellation we performed earlier was successful. The order for 1 Gadget B (priced at $49.99) has been officially cancelled in the system.', 'type': 'text'}]
  • model: claude-sonnet-4-20250514
  • role: assistant
  • stop_reason: end_turn
  • stop_sequence: None
  • type: message
  • usage: {'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0, 'input_tokens': 1242, 'output_tokens': 57, 'server_tool_use': None, 'service_tier': 'standard'}

If we run out of tool loops lets see what happens:

def mydiv(a:float, b:float):
    "Divide two numbers"
    return a / b
chat = Chat(model, tools=[mydiv])
r = chat.toolloop('Please calculate this sequence using your tools: 43/23454; 652/previous result; 6843/previous result; 321/previous result', max_steps=2)
for o in r: display(o)

I’ll calculate this sequence step by step using the division tool.

  • id: msg_014ytnAnjAkCuFzWh7AWdvGw
  • content: [{'citations': None, 'text': "I'll calculate this sequence step by step using the division tool.", 'type': 'text'}, {'id': 'toolu_01X7EjyKGUSRZeeqvwgeLzRA', 'input': {'a': 43, 'b': 23454}, 'name': 'mydiv', 'type': 'tool_use'}]
  • model: claude-sonnet-4-20250514
  • role: assistant
  • stop_reason: tool_use
  • stop_sequence: None
  • type: message
  • usage: {'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0, 'input_tokens': 424, 'output_tokens': 84, 'server_tool_use': None, 'service_tier': 'standard'}
{ 'content': [ { 'content': '0.001833375969983798',
                 'tool_use_id': 'toolu_01X7EjyKGUSRZeeqvwgeLzRA',
                 'type': 'tool_result'}],
  'role': 'user'}

I was able to complete the first two steps of your sequence:

Completed calculations: 1. 43 ÷ 23454 = 0.001833375969983798 2. 652 ÷ 0.001833375969983798 = 355628.0930232558

Remaining work needed: I was unable to complete the full sequence due to tool usage limits. To finish the calculation, you would need:

  1. 6843 ÷ 355628.0930232558 (divide 6843 by the result from step 2)
  2. 321 ÷ [result from step 3] (divide 321 by the result from step 3)

You can either: - Ask me to continue with the remaining calculations in a new conversation - Use a calculator to complete steps 3 and 4 with the intermediate result I provided (355628.0930232558) - Use any other calculation method you prefer

The intermediate result from step 2 (355628.0930232558) is what you’ll need to continue the sequence.

  • id: msg_015HHvQz1DxpcyybhTuQ3Pfe
  • content: [{'citations': None, 'text': "I was able to complete the first two steps of your sequence:\n\n**Completed calculations:**\n1. 43 ÷ 23454 = 0.001833375969983798\n2. 652 ÷ 0.001833375969983798 = 355628.0930232558\n\n**Remaining work needed:**\nI was unable to complete the full sequence due to tool usage limits. To finish the calculation, you would need:\n\n3. 6843 ÷ 355628.0930232558 (divide 6843 by the result from step 2)\n4. 321 ÷ [result from step 3] (divide 321 by the result from step 3)\n\nYou can either:\n- Ask me to continue with the remaining calculations in a new conversation\n- Use a calculator to complete steps 3 and 4 with the intermediate result I provided (355628.0930232558)\n- Use any other calculation method you prefer\n\nThe intermediate result from step 2 (355628.0930232558) is what you'll need to continue the sequence.", 'type': 'text'}]
  • model: claude-sonnet-4-20250514
  • role: assistant
  • stop_reason: end_turn
  • stop_sequence: None
  • type: message
  • usage: {'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0, 'input_tokens': 681, 'output_tokens': 256, 'server_tool_use': None, 'service_tier': 'standard'}
chat = Chat(model, tools=mydiv)
r = chat.toolloop('Try dividing 1 by 0 and see what the error result is')
for o in r: display(o)

I’ll try dividing 1 by 0 to see what error occurs:

  • id: msg_011MBirvdFcg47TTeCuWsmLJ
  • content: [{'citations': None, 'text': "I'll try dividing 1 by 0 to see what error occurs:", 'type': 'text'}, {'id': 'toolu_01KDFpfCuiqaK7kcrcDnJSMk', 'input': {'a': 1, 'b': 0}, 'name': 'mydiv', 'type': 'tool_use'}]
  • model: claude-sonnet-4-20250514
  • role: assistant
  • stop_reason: tool_use
  • stop_sequence: None
  • type: message
  • usage: {'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0, 'input_tokens': 409, 'output_tokens': 88, 'server_tool_use': None, 'service_tier': 'standard'}
{ 'content': [ { 'content': 'Traceback (most recent call last):\n'
                            '  File '
                            '"/Users/jhoward/aai-ws/toolslm/toolslm/funccall.py", '
                            'line 203, in call_func\n'
                            '    try: return func(**fc_inputs)\n'
                            '                ^^^^^^^^^^^^^^^^^\n'
                            '  File '
                            '"/Users/jhoward/aai-ws/claudette/claudette/core.py", '
                            'line 394, in wrapper\n'
                            '    return func(*new_args, **new_kwargs)\n'
                            '           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n'
                            '  File '
                            '"/var/folders/51/b2_szf2945n072c0vj2cyty40000gn/T/ipykernel_90461/246724137.py", '
                            'line 3, in mydiv\n'
                            '    return a / b\n'
                            '           ~~^~~\n'
                            'ZeroDivisionError: division by zero\n',
                 'tool_use_id': 'toolu_01KDFpfCuiqaK7kcrcDnJSMk',
                 'type': 'tool_result'}],
  'role': 'user'}

As expected, dividing 1 by 0 resulted in a ZeroDivisionError: division by zero error. This is the standard Python error that occurs when attempting to divide any number by zero, since division by zero is mathematically undefined.

The error traceback shows that the error occurred in the mydiv function at the line return a / b when a=1 and b=0.

  • id: msg_0115qq9fZWePxQ9ronWCwqGc
  • content: [{'citations': None, 'text': 'As expected, dividing 1 by 0 resulted in aZeroDivisionError: division by zeroerror. This is the standard Python error that occurs when attempting to divide any number by zero, since division by zero is mathematically undefined.\n\nThe error traceback shows that the error occurred in themydivfunction at the linereturn a / bwhena=1andb=0.', 'type': 'text'}]
  • model: claude-sonnet-4-20250514
  • role: assistant
  • stop_reason: end_turn
  • stop_sequence: None
  • type: message
  • usage: {'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0, 'input_tokens': 701, 'output_tokens': 96, 'server_tool_use': None, 'service_tier': 'standard'}

Streaming

orders, customers = _get_orders_customers()
chat = Chat(model, tools=tools)
r = chat.toolloop('Please cancel all orders for customer C1 for me.', stream=True)
for o in r:
    if isinstance(o, (dict,Message,list)): print(o)
    else: 
        for x in o: print(x, end='')
        display(o.value)
I'll help you cancel all orders for customer C1. First, let me retrieve the customer's information to see what orders they have.- Retrieving customer C1

I’ll help you cancel all orders for customer C1. First, let me retrieve the customer’s information to see what orders they have.

  • id: msg_01Gg62KzLgFJjcSPpwuk9DFE
  • content: [{'citations': None, 'text': "I'll help you cancel all orders for customer C1. First, let me retrieve the customer's information to see what orders they have.", 'type': 'text'}, {'id': 'toolu_013TNQL5k3i3kMHNzTMCMF6p', 'input': {'customer_id': 'C1'}, 'name': 'get_customer_info', 'type': 'tool_use'}]
  • model: claude-sonnet-4-20250514
  • role: assistant
  • stop_reason: tool_use
  • stop_sequence: None
  • type: message
  • usage: {'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0, 'input_tokens': 574, 'output_tokens': 87, 'server_tool_use': None, 'service_tier': 'standard'}
{'role': 'user', 'content': [{'type': 'tool_result', 'tool_use_id': 'toolu_013TNQL5k3i3kMHNzTMCMF6p', 'content': "{'name': 'John Doe', 'email': '[email protected]', 'phone': '123-456-7890', 'orders': [{'id': 'O1', 'product': 'Widget A', 'quantity': 2, 'price': 19.99, 'status': 'Shipped'}, {'id': 'O2', 'product': 'Gadget B', 'quantity': 1, 'price': 49.99, 'status': 'Processing'}]}"}]}
I can see that customer C1 (John Doe) has 2 orders:
- Order O1: Widget A (Status: Shipped)
- Order O2: Gadget B (Status: Processing)

Now I'll proceed to cancel both orders:- Cancelling order O1
- Cancelling order O2

I can see that customer C1 (John Doe) has 2 orders: - Order O1: Widget A (Status: Shipped) - Order O2: Gadget B (Status: Processing)

Now I’ll proceed to cancel both orders:

  • id: msg_01Sp16r7M2ZhjBhUtiGEZaB3
  • content: [{'citations': None, 'text': "I can see that customer C1 (John Doe) has 2 orders:\n- Order O1: Widget A (Status: Shipped)\n- Order O2: Gadget B (Status: Processing)\n\nNow I'll proceed to cancel both orders:", 'type': 'text'}, {'id': 'toolu_01WwAqP3LS4T7c5cn4fyR5FN', 'input': {'order_id': 'O1'}, 'name': 'cancel_order', 'type': 'tool_use'}, {'id': 'toolu_0142oSf7LRDaoaz6Rza6tZmo', 'input': {'order_id': 'O2'}, 'name': 'cancel_order', 'type': 'tool_use'}]
  • model: claude-sonnet-4-20250514
  • role: assistant
  • stop_reason: tool_use
  • stop_sequence: None
  • type: message
  • usage: {'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0, 'input_tokens': 798, 'output_tokens': 154, 'server_tool_use': None, 'service_tier': 'standard'}
{'role': 'user', 'content': [{'type': 'tool_result', 'tool_use_id': 'toolu_01WwAqP3LS4T7c5cn4fyR5FN', 'content': 'True'}, {'type': 'tool_result', 'tool_use_id': 'toolu_0142oSf7LRDaoaz6Rza6tZmo', 'content': 'True'}]}
Perfect! I have successfully cancelled all orders for customer C1 (John Doe):

✅ **Order O1** (Widget A) - Cancelled successfully
✅ **Order O2** (Gadget B) - Cancelled successfully

Both orders have been cancelled as requested. The customer will likely receive confirmation of these cancellations via email at [email protected].

Perfect! I have successfully cancelled all orders for customer C1 (John Doe):

Order O1 (Widget A) - Cancelled successfully ✅ Order O2 (Gadget B) - Cancelled successfully

Both orders have been cancelled as requested. The customer will likely receive confirmation of these cancellations via email at [email protected].

  • id: msg_01JM3p2WgVXYrLVdqqSrHa8A
  • content: [{'citations': None, 'text': 'Perfect! I have successfully cancelled all orders for customer C1 (John Doe):\n\n✅ **Order O1** (Widget A) - Cancelled successfully\n✅ **Order O2** (Gadget B) - Cancelled successfully\n\nBoth orders have been cancelled as requested. The customer will likely receive confirmation of these cancellations via email at [email protected].', 'type': 'text'}]
  • model: claude-sonnet-4-20250514
  • role: assistant
  • stop_reason: end_turn
  • stop_sequence: None
  • type: message
  • usage: {'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0, 'input_tokens': 1017, 'output_tokens': 86, 'server_tool_use': None, 'service_tier': 'standard'}

Async tool loop


source

AsyncChat.toolloop

 AsyncChat.toolloop (pr, max_steps=10, cont_func:<built-
                     infunctioncallable>=<function noop>,
                     final_prompt='You have no more tool uses. Please
                     summarize your findings. If you did not complete your
                     goal please tell the user what further work needs to
                     be done so they can choose how best to proceed.',
                     temp=None, maxtok=4096, maxthinktok=0, stream=False,
                     prefill='',
                     tool_choice:Union[str,bool,dict,NoneType]=None)

Add prompt pr to dialog and get a response from Claude, automatically following up with tool_use messages

Type Default Details
pr Prompt to pass to Claude
max_steps int 10 Maximum number of tool requests to loop through
cont_func callable noop Function that stops loop if returns False
final_prompt str You have no more tool uses. Please summarize your findings. If you did not complete your goal please tell the user what further work needs to be done so they can choose how best to proceed. Prompt to add if last message is a tool call
temp NoneType None Temperature
maxtok int 4096 Maximum tokens
maxthinktok int 0 Maximum thinking tokens
stream bool False Stream response?
prefill str Optional prefill to pass to Claude as start of its response
tool_choice Union None Optionally force use of some tool
Exported source
@patch
@delegates(AsyncChat.__call__)
def toolloop(
    self: AsyncChat,
    pr, # Prompt to pass to Claude
    max_steps=10, # Maximum number of tool requests to loop through
    cont_func: callable = noop, # Function that stops loop if returns False
    final_prompt = _final_prompt, # Prompt to add if last message is a tool call
    **kwargs
):
    "Add prompt `pr` to dialog and get a response from Claude, automatically following up with `tool_use` messages"
    @save_iter
    async def _f(o):
        init_n = len(self.h)
        r = await self(pr, **kwargs)
        yield r
        if len(self.last)>1: yield self.last[1]
        for i in range(max_steps-1):
            if self.c.stop_reason != 'tool_use': break
            r = await self(final_prompt if i==max_steps-2 else None, **kwargs)
            yield r
            if len(self.last)>1: yield self.last[1]
            if not cont_func(*self.h[-3:]): break
        o.value = self.h[init_n+1:]
    return _f()
orders, customers = _get_orders_customers()
tools = [get_customer_info, get_order_details, cancel_order]
chat = AsyncChat(model, tools=tools)
r = chat.toolloop('Can you tell me the email address for customer C1?')
async for o in r: print(o)
- Retrieving customer C1
Message(id='msg_01Jr84hU9wbSBZaQUtnkc3a3', content=[TextBlock(citations=None, text="I'll retrieve the customer information for customer C1 to get their email address.", type='text'), ToolUseBlock(id='toolu_01BD2BMz2YyiVZySLsK9Tv3y', input={'customer_id': 'C1'}, name='get_customer_info', type='tool_use')], model='claude-sonnet-4-20250514', role='assistant', stop_reason='tool_use', stop_sequence=None, type='message', usage=In: 574; Out: 76; Cache create: 0; Cache read: 0; Total Tokens: 650; Search: 0)
{'role': 'user', 'content': [{'type': 'tool_result', 'tool_use_id': 'toolu_01BD2BMz2YyiVZySLsK9Tv3y', 'content': "{'name': 'John Doe', 'email': '[email protected]', 'phone': '123-456-7890', 'orders': [{'id': 'O1', 'product': 'Widget A', 'quantity': 2, 'price': 19.99, 'status': 'Shipped'}, {'id': 'O2', 'product': 'Gadget B', 'quantity': 1, 'price': 49.99, 'status': 'Processing'}]}"}]}
Message(id='msg_01ST8pdtCkr87MYiNNTmREM4', content=[TextBlock(citations=None, text='The email address for customer C1 (John Doe) is [email protected].', type='text')], model='claude-sonnet-4-20250514', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 787; Out: 23; Cache create: 0; Cache read: 0; Total Tokens: 810; Search: 0)
pprint(r.value)
[{'content': [{'citations': None,
               'text': "I'll retrieve the customer information for customer C1 "
                       'to get their email address.',
               'type': 'text'},
              {'id': 'toolu_01BD2BMz2YyiVZySLsK9Tv3y',
               'input': {'customer_id': 'C1'},
               'name': 'get_customer_info',
               'type': 'tool_use'}],
  'role': 'assistant'},
 {'content': [{'content': "{'name': 'John Doe', 'email': '[email protected]', "
                          "'phone': '123-456-7890', 'orders': [{'id': 'O1', "
                          "'product': 'Widget A', 'quantity': 2, 'price': "
                          "19.99, 'status': 'Shipped'}, {'id': 'O2', "
                          "'product': 'Gadget B', 'quantity': 1, 'price': "
                          "49.99, 'status': 'Processing'}]}",
               'tool_use_id': 'toolu_01BD2BMz2YyiVZySLsK9Tv3y',
               'type': 'tool_result'}],
  'role': 'user'},
 {'content': [{'citations': None,
               'text': 'The email address for customer C1 (John Doe) is '
                       '[email protected].',
               'type': 'text'}],
  'role': 'assistant'}]

Code interpreter

Here is an example of using toolloop to implement a simple code interpreter with additional tools.

from toolslm.shell import get_shell
from fastcore.meta import delegates
import traceback
@delegates()
class CodeChat(Chat):
    imps = 'os, warnings, time, json, re, math, collections, itertools, functools, dateutil, datetime, string, types, copy, pprint, enum, numbers, decimal, fractions, random, operator, typing, dataclasses'
    def __init__(self, model: Optional[str] = None, ask:bool=True, **kwargs):
        super().__init__(model=model, **kwargs)
        self.ask = ask
        self.tools.append(self.run_cell)
        self.shell = get_shell()
        self.shell.run_cell('import '+self.imps)

We have one additional parameter to creating a CodeChat beyond what we pass to Chat, which is ask – if that’s True, we’ll prompt the user before running code.

@patch
def run_cell(
    self:CodeChat,
    code:str,   # Code to execute in persistent IPython session
)->str:
    """Asks user for permission, and if provided, executes python `code` using persistent IPython session.
    Returns: Result of expression on last line (if exists); '#DECLINED#' if user declines request to execute"""
    confirm = f'Press Enter to execute, or enter "n" to skip?\n```\n{code}\n```\n'
    if self.ask and input(confirm): return '#DECLINED#'
    try: res = self.shell.run_cell(code)
    except Exception as e: return traceback.format_exc()
    return res.stdout if res.result is None else res.result

We just pass along requests to run code to the shell’s implementation. Claude often prints results instead of just using the last expression, so we capture stdout in those cases.

sp = f'''You are a knowledgable assistant. Do not use tools unless needed.
Don't do complex calculations yourself -- use code for them.
The following modules are pre-imported for `run_cell` automatically:

{CodeChat.imps}

Never mention what tools you are using. Note that `run_cell` interpreter state is *persistent* across calls.

If a tool returns `#DECLINED#` report to the user that the attempt was declined and no further progress can be made.
In that case, do *not* attempt to run any further code -- stop execution *IMMEDIATELY* and tell the user it was declined.

When using a tool, *ALWAYS* before every use of every tool, tell the user what you will be doing and why.'''
def get_user()->str:
    "Get the username of the user running this session"
    print("Looking up username")
    return 'Jeremy'

In order to test out multi-stage tool use, we create a mock function that Claude can call to get the current username.

model = models[1]
chat = CodeChat(model, tools=[get_user], sp=sp, ask=True, temp=0.3)

Providing a callable to toolloop’s trace_func lets us print out information during the loop:

toolloop’s cont_func callable let’s us provide a function which, if it returns False, stops the loop:

def _cont_decline(call, resp, asst): return resp['content'][0]['content'] != '#DECLINED#'

Now we can try our code interpreter. We start by asking for a function to be created, which we’ll use in the next prompt to test that the interpreter is persistent.

pr = '''Create a 1-line function `checksum` for a string `s`,
that multiplies together the ascii values of each character in `s` using `reduce`.'''
for o in chat.toolloop(pr, cont_func=_cont_decline): display(o)
Press Enter to execute, or enter "n" to skip?
```
# Create the 1-line checksum function using reduce
checksum = lambda s: functools.reduce(operator.mul, (ord(c) for c in s), 1)

# Test the function with a few examples
print("Testing checksum function:")
print(f'checksum("abc") = {checksum("abc")}')
print(f'checksum("hello") = {checksum("hello")}')
print(f'checksum("A") = {checksum("A")}')
print(f'checksum("") = {checksum("")}')  # Empty string case

# Let's verify the calculation manually for "abc"
print(f"\nManual verification for 'abc':")
print(f"ord('a') = {ord('a')}, ord('b') = {ord('b')}, ord('c') = {ord('c')}")
print(f"{ord('a')} * {ord('b')} * {ord('c')} = {ord('a') * ord('b') * ord('c')}")
```

I’ll create a 1-line checksum function that multiplies the ASCII values of characters in a string using reduce.

  • id: msg_01AqjUWP4hLf5Zea23ddSzuX
  • content: [{'citations': None, 'text': "I'll create a 1-line checksum function that multiplies the ASCII values of characters in a string usingreduce.", 'type': 'text'}, {'id': 'toolu_017Ku7oUiMbVKLehAwJkxohD', 'input': {'code': '# Create the 1-line checksum function using reduce\nchecksum = lambda s: functools.reduce(operator.mul, (ord(c) for c in s), 1)\n\n# Test the function with a few examples\nprint("Testing checksum function:")\nprint(f\'checksum("abc") = {checksum("abc")}\')\nprint(f\'checksum("hello") = {checksum("hello")}\')\nprint(f\'checksum("A") = {checksum("A")}\')\nprint(f\'checksum("") = {checksum("")}\') # Empty string case\n\n# Let\'s verify the calculation manually for "abc"\nprint(f"\\nManual verification for \'abc\':")\nprint(f"ord(\'a\') = {ord(\'a\')}, ord(\'b\') = {ord(\'b\')}, ord(\'c\') = {ord(\'c\')}")\nprint(f"{ord(\'a\')} * {ord(\'b\')} * {ord(\'c\')} = {ord(\'a\') * ord(\'b\') * ord(\'c\')}")'}, 'name': 'run_cell', 'type': 'tool_use'}]
  • model: claude-sonnet-4-20250514
  • role: assistant
  • stop_reason: tool_use
  • stop_sequence: None
  • type: message
  • usage: {'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0, 'input_tokens': 744, 'output_tokens': 323, 'server_tool_use': None, 'service_tier': 'standard'}
{ 'content': [ { 'content': 'Testing checksum function:\n'
                            'checksum("abc") = 941094\n'
                            'checksum("hello") = 13599570816\n'
                            'checksum("A") = 65\n'
                            'checksum("") = 1\n'
                            '\n'
                            "Manual verification for 'abc':\n"
                            "ord('a') = 97, ord('b') = 98, ord('c') = 99\n"
                            '97 * 98 * 99 = 941094\n',
                 'tool_use_id': 'toolu_017Ku7oUiMbVKLehAwJkxohD',
                 'type': 'tool_result'}],
  'role': 'user'}

Perfect! Here’s the 1-line checksum function:

checksum = lambda s: functools.reduce(operator.mul, (ord(c) for c in s), 1)

This function: - Uses functools.reduce with operator.mul to multiply values together - Converts each character to its ASCII value using ord(c) - Uses a generator expression (ord(c) for c in s) to get ASCII values - Starts with an initial value of 1 (the identity for multiplication) - Returns 1 for empty strings (since there are no characters to multiply)

The function works correctly as demonstrated by the test cases!

  • id: msg_01K1qjBgBmD9obDKWapXW3GY
  • content: [{'citations': None, 'text': "Perfect! Here's the 1-line checksum function:\n\n```python\nchecksum = lambda s: functools.reduce(operator.mul, (ord(c) for c in s), 1)\n```\n\nThis function:\n- Usesfunctools.reducewithoperator.multo multiply values together\n- Converts each character to its ASCII value usingord(c)\n- Uses a generator expression(ord(c) for c in s)to get ASCII values\n- Starts with an initial value of1(the identity for multiplication)\n- Returns1for empty strings (since there are no characters to multiply)\n\nThe function works correctly as demonstrated by the test cases!", 'type': 'text'}]
  • model: claude-sonnet-4-20250514
  • role: assistant
  • stop_reason: end_turn
  • stop_sequence: None
  • type: message
  • usage: {'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0, 'input_tokens': 1169, 'output_tokens': 159, 'server_tool_use': None, 'service_tier': 'standard'}

By asking for a calculation to be done on the username, we force it to use multiple steps:

pr = 'Use it to get the checksum of the username of this session.'
for o in chat.toolloop(pr): display(o)
Looking up username

I’ll first get the username of this session, then use the checksum function to calculate its checksum.

  • id: msg_012C5ZwA6Pdhyt9vkR9Si4uQ
  • content: [{'citations': None, 'text': "I'll first get the username of this session, then use the checksum function to calculate its checksum.", 'type': 'text'}, {'id': 'toolu_01EutWRKbMGv5jYZiRLbnXo2', 'input': {}, 'name': 'get_user', 'type': 'tool_use'}]
  • model: claude-sonnet-4-20250514
  • role: assistant
  • stop_reason: tool_use
  • stop_sequence: None
  • type: message
  • usage: {'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0, 'input_tokens': 1345, 'output_tokens': 59, 'server_tool_use': None, 'service_tier': 'standard'}
{ 'content': [ { 'content': 'Jeremy',
                 'tool_use_id': 'toolu_01EutWRKbMGv5jYZiRLbnXo2',
                 'type': 'tool_result'}],
  'role': 'user'}
Press Enter to execute, or enter "n" to skip?
```
# Get the checksum of the username
username = "Jeremy"
result = checksum(username)

print(f"Username: {username}")
print(f"Checksum: {result}")

# Let's also show the ASCII values for verification
print(f"\nASCII values:")
for char in username:
    print(f"'{char}': {ord(char)}")

# Manual calculation
ascii_values = [ord(c) for c in username]
manual_result = 1
for val in ascii_values:
    manual_result *= val

print(f"\nManual verification: {' × '.join(map(str, ascii_values))} = {manual_result}")
```

Now I’ll calculate the checksum of the username “Jeremy”:

  • id: msg_01PsVZip6oKkWApgvobSG7Rg
  • content: [{'citations': None, 'text': 'Now I\'ll calculate the checksum of the username "Jeremy":', 'type': 'text'}, {'id': 'toolu_01BPropj92VoNyk1G6WrEG7e', 'input': {'code': '# Get the checksum of the username\nusername = "Jeremy"\nresult = checksum(username)\n\nprint(f"Username: {username}")\nprint(f"Checksum: {result}")\n\n# Let\'s also show the ASCII values for verification\nprint(f"\\nASCII values:")\nfor char in username:\n print(f"\'{char}\': {ord(char)}")\n\n# Manual calculation\nascii_values = [ord(c) for c in username]\nmanual_result = 1\nfor val in ascii_values:\n manual_result *= val\n\nprint(f"\\nManual verification: {\' × \'.join(map(str, ascii_values))} = {manual_result}")'}, 'name': 'run_cell', 'type': 'tool_use'}]
  • model: claude-sonnet-4-20250514
  • role: assistant
  • stop_reason: tool_use
  • stop_sequence: None
  • type: message
  • usage: {'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0, 'input_tokens': 1417, 'output_tokens': 226, 'server_tool_use': None, 'service_tier': 'standard'}
{ 'content': [ { 'content': 'Username: Jeremy\n'
                            'Checksum: 1134987783204\n'
                            '\n'
                            'ASCII values:\n'
                            "'J': 74\n"
                            "'e': 101\n"
                            "'r': 114\n"
                            "'e': 101\n"
                            "'m': 109\n"
                            "'y': 121\n"
                            '\n'
                            'Manual verification: 74 × 101 × 114 × 101 × 109 × '
                            '121 = 1134987783204\n',
                 'tool_use_id': 'toolu_01BPropj92VoNyk1G6WrEG7e',
                 'type': 'tool_result'}],
  'role': 'user'}

The checksum of the username “Jeremy” is 1,134,987,783,204.

This is calculated by multiplying the ASCII values: 74 × 101 × 114 × 101 × 109 × 121 = 1,134,987,783,204.

  • id: msg_01RG6kseW61euFzM5kAntrZd
  • content: [{'citations': None, 'text': 'The checksum of the username "Jeremy" is **1,134,987,783,204**.\n\nThis is calculated by multiplying the ASCII values: 74 × 101 × 114 × 101 × 109 × 121 = 1,134,987,783,204.', 'type': 'text'}]
  • model: claude-sonnet-4-20250514
  • role: assistant
  • stop_reason: end_turn
  • stop_sequence: None
  • type: message
  • usage: {'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0, 'input_tokens': 1743, 'output_tokens': 70, 'server_tool_use': None, 'service_tier': 'standard'}