1+ from anthropic import Anthropic , AsyncAnthropic
2+ from flask import Flask , request # $ Source
3+
4+ app = Flask (__name__ )
5+ client = Anthropic ()
6+ async_client = AsyncAnthropic ()
7+
8+
9+ @app .route ("/anthropic" )
10+ async def get_input_anthropic ():
11+ persona = request .args .get ("persona" )
12+ query = request .args .get ("query" )
13+
14+ response1 = client .messages .create (
15+ model = "claude-sonnet-4-20250514" ,
16+ max_tokens = 256 ,
17+ system = "Talk like " + persona , # $ Alert[py/prompt-injection]
18+ messages = [
19+ {
20+ "role" : "user" ,
21+ "content" : query , # $ Alert[py/prompt-injection]
22+ }
23+ ],
24+ )
25+
26+ response2 = client .messages .stream (
27+ model = "claude-sonnet-4-20250514" ,
28+ max_tokens = 256 ,
29+ system = "Talk like " + persona , # $ Alert[py/prompt-injection]
30+ messages = [
31+ {
32+ "role" : "user" ,
33+ "content" : query , # $ Alert[py/prompt-injection]
34+ }
35+ ],
36+ )
37+
38+ response3 = await async_client .messages .create (
39+ model = "claude-sonnet-4-20250514" ,
40+ max_tokens = 256 ,
41+ system = "Talk like " + persona , # $ Alert[py/prompt-injection]
42+ messages = [
43+ {
44+ "role" : "user" ,
45+ "content" : query , # $ Alert[py/prompt-injection]
46+ }
47+ ],
48+ )
49+
50+ response4 = client .beta .messages .create (
51+ model = "claude-sonnet-4-20250514" ,
52+ max_tokens = 256 ,
53+ system = "Talk like " + persona , # $ Alert[py/prompt-injection]
54+ messages = [
55+ {
56+ "role" : "user" ,
57+ "content" : query , # $ Alert[py/prompt-injection]
58+ }
59+ ],
60+ betas = ["prompt-caching-2024-07-31" ],
61+ )
62+
63+ print (response1 , response2 , response3 , response4 )
0 commit comments