Skip to content

Commit b4ef8d5

Browse files
committed
Update anthropic test
1 parent 4ede00f commit b4ef8d5

4 files changed

Lines changed: 50 additions & 37 deletions

File tree

test-anthropic/main.py

Lines changed: 12 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -3,12 +3,14 @@
33
from anthropic import Anthropic
44

55
import sentry_sdk
6-
from sentry_sdk.integrations.anthropic import AnthropicIntegration
76
from sentry_sdk.consts import SPANTEMPLATE
7+
from sentry_sdk.integrations.anthropic import AnthropicIntegration
88

99

1010
@sentry_sdk.trace(name="Custom AI Agent", template=SPANTEMPLATE.AI_AGENT)
11-
def my_pipeline(client):
11+
def my_custom_agent(client):
12+
print("~~~ Starting my_custom_agent ~~~")
13+
1214
# Sync create message
1315
message = client.messages.create(
1416
messages=[
@@ -20,8 +22,8 @@ def my_pipeline(client):
2022
model="claude-3-5-haiku-latest",
2123
max_tokens=1024,
2224
)
23-
print("Message:")
24-
print(message.dict())
25+
print("~~~ First result (sync message): ~~~")
26+
print(message.model_dump())
2527

2628
# Sync create streaming message
2729
stream = client.messages.create(
@@ -35,9 +37,11 @@ def my_pipeline(client):
3537
max_tokens=1024,
3638
stream=True,
3739
)
38-
print("Message (Stream):")
40+
print("~~~ Second result (sync streaming message): ~~~")
3941
for event in stream:
40-
print(event.dict())
42+
print(event.model_dump())
43+
44+
print("~~~ Done ~~~")
4145

4246

4347
def main():
@@ -56,8 +60,8 @@ def main():
5660
api_key=os.environ.get("ANTHROPIC_API_KEY"),
5761
)
5862

59-
with sentry_sdk.start_transaction(name="anthropic-sync"):
60-
my_pipeline(client)
63+
# with sentry_sdk.start_transaction(name="anthropic-sync"):
64+
my_custom_agent(client)
6165

6266

6367
if __name__ == "__main__":

test-anthropic/main_async.py

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,9 @@
99

1010

1111
@sentry_sdk.trace(name="Custom AI Agent", template=SPANTEMPLATE.AI_AGENT)
12-
async def my_pipeline(client):
12+
async def my_custom_agent(client):
13+
print("~~~ Starting my_custom_agent ~~~")
14+
1315
# Async create message
1416
message = await client.messages.create(
1517
messages=[
@@ -21,7 +23,8 @@ async def my_pipeline(client):
2123
model="claude-3-haiku-20240307",
2224
max_tokens=1024,
2325
)
24-
print(message.dict())
26+
print("~~~ First result (async message): ~~~")
27+
print(message.model_dump())
2528

2629
# Async create streaming message
2730
stream = await client.messages.create(
@@ -35,8 +38,11 @@ async def my_pipeline(client):
3538
max_tokens=1024,
3639
stream=True,
3740
)
41+
print("~~~ Second result (async streaming message): ~~~")
3842
async for event in stream:
39-
print(event.dict())
43+
print(event.model_dump())
44+
45+
print("~~~ Done ~~~")
4046

4147

4248
async def main():
@@ -56,7 +62,7 @@ async def main():
5662
)
5763

5864
with sentry_sdk.start_transaction(name="anthropic-async"):
59-
await my_pipeline(client)
65+
await my_custom_agent(client)
6066

6167

6268
asyncio.run(main())

test-anthropic/main_tool.py

Lines changed: 27 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -3,35 +3,35 @@
33
from anthropic import Anthropic
44

55
import sentry_sdk
6-
from sentry_sdk.integrations.anthropic import AnthropicIntegration
76
from sentry_sdk.consts import SPANTEMPLATE
7+
from sentry_sdk.integrations.anthropic import AnthropicIntegration
88

99

10+
@sentry_sdk.trace(name="get_weather", template=SPANTEMPLATE.AI_TOOL)
1011
def get_weather(location):
11-
return f"It is sunny with a high of 23°C."
12+
return "It is sunny with a high of 23°C."
1213

1314

14-
# Define the tool
15-
tools = [
16-
{
17-
"name": "get_weather",
18-
"description": "Get the current weather in a given location",
19-
"input_schema": {
20-
"type": "object",
21-
"properties": {
15+
tools = [{
16+
"name": "get_weather",
17+
"description": "Get the current weather in a given location",
18+
"input_schema": {
19+
"type": "object",
20+
"properties": {
2221
"location": {
23-
"type": "string",
24-
"description": "The city and state, e.g. San Francisco, CA"
22+
"type": "string",
23+
"description": "The city and state, e.g. San Francisco, CA"
2524
}
26-
},
27-
"required": ["location"]
28-
}
29-
}
30-
]
25+
},
26+
"required": ["location"]
27+
}
28+
}]
3129

3230

3331
@sentry_sdk.trace(name="Custom AI Agent", template=SPANTEMPLATE.AI_AGENT)
34-
def my_pipeline(client):
32+
def my_custom_agent(client):
33+
print("~~~ Starting my_custom_agent ~~~")
34+
3535
# Sync create message with tools
3636
message = client.messages.create(
3737
messages=[
@@ -45,19 +45,21 @@ def my_pipeline(client):
4545
max_tokens=1024,
4646
temperature=0,
4747
)
48-
print("Message:")
49-
print(message.dict())
48+
print("~~~ First result: ~~~")
49+
print(message.model_dump())
50+
5051
# If model wants to run a tool, run it.
5152
if message.stop_reason == "tool_use":
53+
print("~~~ Tool use detected ~~~")
5254
tool_use_block = message.content[1]
5355
function_name = tool_use_block.name
5456
tool_args = tool_use_block.input
5557

5658
if function_name in globals() and callable(globals()[function_name]):
5759
tool_result = globals()[function_name](**tool_args)
58-
print(f"Tool result: {tool_result}")
60+
print(f"~~~ Tool result: {tool_result}")
5961

60-
# Continue the conversation with the tool result
62+
# Continue the conversation including the tool result
6163
response = client.messages.create(
6264
messages=[
6365
{
@@ -85,8 +87,9 @@ def my_pipeline(client):
8587
temperature=0,
8688
)
8789

88-
print("Final response:")
90+
print("~~~ Final result: ~~~")
8991
print(response.content[0].text)
92+
print("~~~ Done ~~~")
9093

9194

9295
def main():
@@ -106,7 +109,7 @@ def main():
106109
)
107110

108111
with sentry_sdk.start_transaction(name="anthropic-sync-tool"):
109-
my_pipeline(client)
112+
my_custom_agent(client)
110113

111114

112115
if __name__ == "__main__":

test-anthropic/uv.lock

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)