= models[1]
model = AsyncAnthropic() cli
The async version
Setup
Async SDK
= {'role': 'user', 'content': "I'm Jeremy"}
m = await cli.messages.create(messages=[m], model=model, max_tokens=100)
r r
Hi Jeremy! Nice to meet you. I’m Claude, an AI assistant created by Anthropic. How can I help you today?
- id:
msg_01UBPA1yCoPZdk4vDbCwdXGm
- content:
[{'text': "Hi Jeremy! Nice to meet you. I'm Claude, an AI assistant created by Anthropic. How can I help you today?", 'type': 'text'}]
- model:
claude-3-5-sonnet-20241022
- role:
assistant
- stop_reason:
end_turn
- stop_sequence:
None
- type:
message
- usage:
{'input_tokens': 10, 'output_tokens': 31}
AsyncClient
AsyncClient (model, cli=None, log=False)
Async Anthropic messages client.
Exported source
class AsyncClient(Client):
def __init__(self, model, cli=None, log=False):
"Async Anthropic messages client."
super().__init__(model,cli,log)
if not cli: self.c = AsyncAnthropic(default_headers={'anthropic-beta': 'prompt-caching-2024-07-31'})
= AsyncClient(model) c
c._r(r) c.use
In: 10; Out: 31; Cache create: 0; Cache read: 0; Total: 41
AsyncClient.__call__
AsyncClient.__call__ (msgs:list, sp='', temp=0, maxtok=4096, prefill='', stream:bool=False, stop=None, tools:Optional[list]=None, tool_choice:Optional[dict]=None, cli=None, log=False)
Make an async call to Claude.
Type | Default | Details | |
---|---|---|---|
msgs | list | List of messages in the dialog | |
sp | str | The system prompt | |
temp | int | 0 | Temperature |
maxtok | int | 4096 | Maximum tokens |
prefill | str | Optional prefill to pass to Claude as start of its response | |
stream | bool | False | Stream response? |
stop | NoneType | None | Stop sequence |
tools | Optional | None | List of tools to make available to Claude |
tool_choice | Optional | None | Optionally force use of some tool |
cli | NoneType | None | |
log | bool | False |
Exported source
@patch
async def _stream(self:AsyncClient, msgs:list, prefill='', **kwargs):
async with self.c.messages.stream(model=self.model, messages=mk_msgs(msgs), **kwargs) as s:
if prefill: yield prefill
async for o in s.text_stream: yield o
self._log(await s.get_final_message(), prefill, msgs, kwargs)
Exported source
@patch
@delegates(Client)
async def __call__(self:AsyncClient,
list, # List of messages in the dialog
msgs:='', # The system prompt
sp=0, # Temperature
temp=4096, # Maximum tokens
maxtok='', # Optional prefill to pass to Claude as start of its response
prefillbool=False, # Stream response?
stream:=None, # Stop sequence
stoplist]=None, # List of tools to make available to Claude
tools:Optional[dict]=None, # Optionally force use of some tool
tool_choice:Optional[**kwargs):
"Make an async call to Claude."
if tools: kwargs['tools'] = [get_schema(o) for o in listify(tools)]
if tool_choice: kwargs['tool_choice'] = mk_tool_choice(tool_choice)
= self._precall(msgs, prefill, stop, kwargs)
msgs if stream: return self._stream(msgs, prefill=prefill, max_tokens=maxtok, system=sp, temperature=temp, **kwargs)
= await self.c.messages.create(
res =self.model, messages=msgs, max_tokens=maxtok, system=sp, temperature=temp, **kwargs)
modelreturn self._log(res, prefill, msgs, maxtok, sp, temp, stream=stream, stop=stop, **kwargs)
= AsyncClient(model, log=True)
c c.use
In: 0; Out: 0; Cache create: 0; Cache read: 0; Total: 0
= models[1]
c.model await c('Hi')
Hello! How can I help you today?
- id:
msg_01TC5wq1bS1ZcJMopq8bZ4o2
- content:
[{'text': 'Hello! How can I help you today?', 'type': 'text'}]
- model:
claude-3-5-sonnet-20241022
- role:
assistant
- stop_reason:
end_turn
- stop_sequence:
None
- type:
message
- usage:
{'input_tokens': 8, 'output_tokens': 12, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}
c.use
In: 8; Out: 12; Cache create: 0; Cache read: 0; Total: 20
= "Concisely, what is the meaning of life?"
q = 'According to Douglas Adams,'
pref await c(q, prefill=pref)
According to Douglas Adams, it’s 42. More seriously, there’s no universal answer - it’s deeply personal. Common perspectives include: finding happiness, creating meaning through relationships and achievements, pursuing knowledge, helping others, or following spiritual/religious beliefs.
- id:
msg_012ZvrAoNyBherFi2q4fDRWq
- content:
[{'text': "According to Douglas Adams, it's 42. More seriously, there's no universal answer - it's deeply personal. Common perspectives include: finding happiness, creating meaning through relationships and achievements, pursuing knowledge, helping others, or following spiritual/religious beliefs.", 'type': 'text'}]
- model:
claude-3-5-sonnet-20241022
- role:
assistant
- stop_reason:
end_turn
- stop_sequence:
None
- type:
message
- usage:
{'input_tokens': 24, 'output_tokens': 50, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}
async for o in (await c('Hi', stream=True)): print(o, end='')
Hello! How can I help you today?
c.use
In: 40; Out: 74; Cache create: 0; Cache read: 0; Total: 114
async for o in (await c(q, prefill=pref, stream=True)): print(o, end='')
According to Douglas Adams, it's 42. More seriously, there's no universal answer - it's deeply personal. Common perspectives include: finding happiness, creating meaning through relationships and achievements, pursuing knowledge, helping others, or following spiritual/religious beliefs.
c.use
In: 64; Out: 124; Cache create: 0; Cache read: 0; Total: 188
def sums(
int, # First thing to sum
a:int=1 # Second thing to sum
b:-> int: # The sum of the inputs
) "Adds a + b."
print(f"Finding the sum of {a} and {b}")
return a + b
= 604542,6458932
a,b = f"What is {a}+{b}?"
pr = "You are a summing expert." sp
=[get_schema(sums)]
tools= mk_tool_choice('sums') choice
= mk_msgs(pr)
msgs = await c(msgs, sp=sp, tools=sums, tool_choice=sums)
r = mk_toolres(r, ns=globals())
tr += tr
msgs await c(msgs, sp=sp, tools=sums)) contents(
Finding the sum of 604542 and 6458932
'The sum of 604542 and 6458932 is 7063474.'
# tools = [get_schema(sums)]
# msgs = mk_msgs(pr)
# r = await c(msgs, sp=sp, tools=tools, tool_choice=choice)
# tr = mk_toolres(r, ns=globals())
# msgs += tr
# contents(await c(msgs, sp=sp, tools=tools))
Structured Output
AsyncClient.structured
AsyncClient.structured (msgs:list, tools:Optional[list]=None, obj:Optional=None, ns:Optional[collections.abc.Mapping]=None, sp='', temp=0, maxtok=4096, prefill='', stream:bool=False, stop=None, tool_choice:Optional[dict]=None, metadata:MetadataParam|NotGiven=NOT_GIVEN, stop_sequences:List[str]|NotGiven=NOT_GIVEN, syst em:Union[str,Iterable[TextBlockParam]]|NotGiven=N OT_GIVEN, temperature:float|NotGiven=NOT_GIVEN, top_k:int|NotGiven=NOT_GIVEN, top_p:float|NotGiven=NOT_GIVEN, extra_headers:Headers|None=None, extra_query:Query|None=None, extra_body:Body|None=None, timeout:float|httpx.Ti meout|None|NotGiven=NOT_GIVEN)
Return the value of all tool calls (generally used for structured outputs)
Type | Default | Details | |
---|---|---|---|
msgs | list | List of messages in the dialog | |
tools | Optional | None | List of tools to make available to Claude |
obj | Optional | None | Class to search for tools |
ns | Optional | None | Namespace to search for tools |
sp | str | The system prompt | |
temp | int | 0 | Temperature |
maxtok | int | 4096 | Maximum tokens |
prefill | str | Optional prefill to pass to Claude as start of its response | |
stream | bool | False | Stream response? |
stop | NoneType | None | Stop sequence |
tool_choice | Optional | None | Optionally force use of some tool |
metadata | MetadataParam | NotGiven | NOT_GIVEN | |
stop_sequences | List[str] | NotGiven | NOT_GIVEN | |
system | Union[str, Iterable[TextBlockParam]] | NotGiven | NOT_GIVEN | |
temperature | float | NotGiven | NOT_GIVEN | |
top_k | int | NotGiven | NOT_GIVEN | |
top_p | float | NotGiven | NOT_GIVEN | |
extra_headers | Headers | None | None | |
extra_query | Query | None | None | |
extra_body | Body | None | None | |
timeout | float | httpx.Timeout | None | NotGiven | NOT_GIVEN |
await c.structured(pr, sums)
Finding the sum of 604542 and 6458932
[7063474]
AsyncChat
AsyncChat
AsyncChat (model:Optional[str]=None, cli:Optional[claudette.core.Client]=None, sp='', tools:Optional[list]=None, temp=0, cont_pr:Optional[str]=None)
Anthropic async chat client.
Type | Default | Details | |
---|---|---|---|
model | Optional | None | Model to use (leave empty if passing cli ) |
cli | Optional | None | Client to use (leave empty if passing model ) |
sp | str | ||
tools | Optional | None | |
temp | int | 0 | |
cont_pr | Optional | None |
Exported source
@delegates()
class AsyncChat(Chat):
def __init__(self,
str]=None, # Model to use (leave empty if passing `cli`)
model:Optional[=None, # Client to use (leave empty if passing `model`)
cli:Optional[Client]**kwargs):
"Anthropic async chat client."
super().__init__(model, cli, **kwargs)
if not cli: self.c = AsyncClient(model)
= "Never mention what tools you use."
sp = AsyncChat(model, sp=sp)
chat chat.c.use, chat.h
(In: 0; Out: 0; Cache create: 0; Cache read: 0; Total: 0, [])
AsyncChat.__call__
AsyncChat.__call__ (pr=None, temp=0, maxtok=4096, stream=False, prefill='', **kw)
Call self as a function.
Type | Default | Details | |
---|---|---|---|
pr | NoneType | None | Prompt / message |
temp | int | 0 | Temperature |
maxtok | int | 4096 | Maximum tokens |
stream | bool | False | Stream response? |
prefill | str | Optional prefill to pass to Claude as start of its response | |
kw |
Exported source
@patch
async def _stream(self:AsyncChat, res):
async for o in res: yield o
self.h += mk_toolres(self.c.result, ns=self.tools, obj=self)
Exported source
@patch
async def _append_pr(self:AsyncChat, pr=None):
= nested_idx(self.h, -1, 'role') if self.h else 'assistant' # First message should be 'user' if no history
prev_role if pr and prev_role == 'user': await self()
self._post_pr(pr, prev_role)
Exported source
@patch
async def __call__(self:AsyncChat,
=None, # Prompt / message
pr=0, # Temperature
temp=4096, # Maximum tokens
maxtok=False, # Stream response?
stream='', # Optional prefill to pass to Claude as start of its response
prefill**kw):
await self._append_pr(pr)
= await self.c(self.h, stream=stream, prefill=prefill, sp=self.sp, temp=temp, maxtok=maxtok, **kw)
res if stream: return self._stream(res)
self.h += mk_toolres(self.c.result, ns=mk_ns(*listify(self.tools)), obj=self)
return res
await chat("I'm Jeremy")
await chat("What's my name?")
Your name is Jeremy.
- id:
msg_01BNxuSzZGanZupYuJxFDTgi
- content:
[{'text': 'Your name is Jeremy.', 'type': 'text'}]
- model:
claude-3-5-sonnet-20241022
- role:
assistant
- stop_reason:
end_turn
- stop_sequence:
None
- type:
message
- usage:
{'input_tokens': 41, 'output_tokens': 8, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}
= "Concisely, what is the meaning of life?"
q = 'According to Douglas Adams,'
pref await chat(q, prefill=pref)
According to Douglas Adams, 42. But more seriously: to find purpose, create meaning, love, grow, and make a positive impact during our time here.
- id:
msg_012WB8YcoMyPk2Uuh69eUxnF
- content:
[{'text': 'According to Douglas Adams, 42. But more seriously: to find purpose, create meaning, love, grow, and make a positive impact during our time here.', 'type': 'text'}]
- model:
claude-3-5-sonnet-20241022
- role:
assistant
- stop_reason:
end_turn
- stop_sequence:
None
- type:
message
- usage:
{'input_tokens': 69, 'output_tokens': 31, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}
= AsyncChat(model, sp=sp)
chat async for o in (await chat("I'm Jeremy", stream=True)): print(o, end='')
Hello Jeremy! Nice to meet you. How are you today?
= f"What is {a}+{b}?"
pr = AsyncChat(model, sp=sp, tools=[sums])
chat = await chat(pr)
r r
7,063,474
- id:
msg_013c9kT2obX52nLMf9PmWWHH
- content:
[{'text': '7,063,474', 'type': 'text'}]
- model:
claude-3-5-sonnet-20241022
- role:
assistant
- stop_reason:
end_turn
- stop_sequence:
None
- type:
message
- usage:
{'input_tokens': 24, 'output_tokens': 9, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}
+= "Say the answer in a sentence."
pr = AsyncChat(model, sp=sp, tools=[sums])
chat = await chat(pr)
r r
The sum of 604,542 and 6,458,932 is 7,063,474.
- id:
msg_01F6Jms2nhe8VEDN16ZUGomv
- content:
[{'text': 'The sum of 604,542 and 6,458,932 is 7,063,474.', 'type': 'text'}]
- model:
claude-3-5-sonnet-20241022
- role:
assistant
- stop_reason:
end_turn
- stop_sequence:
None
- type:
message
- usage:
{'input_tokens': 31, 'output_tokens': 27, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}
= Path('samples/puppy.jpg')
fn = fn.read_bytes() img
= "In brief, what color flowers are in this image?"
q = mk_msg([img, q])
msg await c([msg])
In this adorable puppy photo, there are purple/lavender colored flowers (appears to be asters or similar daisy-like flowers) in the background.
- id:
msg_01MLGBSn2QYJoKnthg7W2Tkj
- content:
[{'text': 'In this adorable puppy photo, there are purple/lavender colored flowers (appears to be asters or similar daisy-like flowers) in the background.', 'type': 'text'}]
- model:
claude-3-5-sonnet-20241022
- role:
assistant
- stop_reason:
end_turn
- stop_sequence:
None
- type:
message
- usage:
{'input_tokens': 110, 'output_tokens': 37, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}