gtani commited on
Commit
324e5e6
·
verified ·
1 Parent(s): 69f182f

Delete bedrock_test.py

Browse files
Files changed (1) hide show
  1. bedrock_test.py +0 -215
bedrock_test.py DELETED
@@ -1,215 +0,0 @@
1
-
2
-
3
-
4
- #%%
5
- import boto3
6
- import json
7
- import logging
8
- aws_access_key_id='AKIA2UC26WDCAFOF7X64'
9
- aws_secret_access_key='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'
10
- aws_region = "eu-north-1"
11
- #%%
12
- from anthropic import AnthropicBedrock
13
-
14
- client = AnthropicBedrock(
15
- # Authenticate by either providing the keys below or use the default AWS credential providers, such as
16
- # using ~/.aws/credentials or the "AWS_SECRET_ACCESS_KEY" and "AWS_ACCESS_KEY_ID" environment variables.
17
- aws_access_key=aws_access_key_id,
18
- aws_secret_key=aws_secret_access_key,
19
- # Temporary credentials can be used with aws_session_token.
20
- # Read more at https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp.html.
21
- aws_session_token=None,
22
- # aws_region changes the aws region to which the request is made. By default, we read AWS_REGION,
23
- # and if that's not present, we default to us-east-1. Note that we do not read ~/.aws/config for the region.
24
- aws_region=aws_region,
25
- )
26
- modelid='anthropic.claude-v2:1'
27
- model_id='eu.anthropic.claude-3-7-sonnet-20250219-v1:0'
28
- message = client.messages.create(
29
- model=modelid,
30
- max_tokens=256,
31
- messages=[{"role": "user", "content": "Hello, world"}]
32
- )
33
- print(message.content)
34
-
35
-
36
- #%%
37
- from botocore.exceptions import ClientError
38
- session = boto3.Session(
39
- aws_access_key_id=aws_access_key_id,
40
- aws_secret_access_key=aws_secret_access_key,
41
- region_name=aws_region
42
- )
43
-
44
- bedrock_runtime = session.client("bedrock-runtime")
45
-
46
- model_id = "anthropic.claude-v2:1"
47
-
48
- payload = {
49
- "anthropic_version": "bedrock-2023-05-31",
50
- "max_tokens": 100,
51
- "messages": [{"role": "user", "content": "Hello!"}]
52
- }
53
-
54
- try:
55
- response = bedrock_runtime.invoke_model_with_response_stream(
56
- modelId=model_id,
57
- body=json.dumps(payload),
58
- contentType="application/json",
59
- accept="application/json"
60
- )
61
-
62
- print("✅ Streaming appears to be enabled for Claude v2.1.")
63
- for event in response['body']:
64
- chunk = event['chunk']['bytes']
65
- print(chunk.decode(), end="")
66
-
67
- except ClientError as e:
68
- code = e.response['Error']['Code']
69
- if code == "AccessDeniedException":
70
- print("❌ Streaming is NOT enabled for Claude v2.1: Access denied.")
71
- elif code == "ValidationException":
72
- print("⚠️ Model does not support streaming or bad payload.")
73
- else:
74
- print(f"❌ Unexpected error: {e}")
75
- except Exception as e:
76
- print(f"❌ General error: {e}")
77
- #%%
78
- messages = [
79
- {"role": "user", "content": "Can you tell me a fun fact about llamas?"}
80
- ]
81
-
82
- payload = {
83
- "anthropic_version": "bedrock-2023-05-31",
84
- "max_tokens": 256,
85
- "messages": messages
86
- }
87
-
88
- # ✅ 1. Test NON-streaming (invoke_model)
89
- print("🧪 Testing invoke_model (non-streaming)...")
90
- try:
91
- response = client.invoke_model(
92
- modelId=model_id,
93
- body=json.dumps(payload),
94
- contentType="application/json",
95
- accept="application/json"
96
- )
97
- result = json.loads(response["body"].read().decode("utf-8"))
98
- print("✅ invoke_model succeeded.")
99
- print("🧠 Claude's reply:", result["content"][0]["text"])
100
- except ClientError as e:
101
- print("❌ invoke_model failed:", e)
102
-
103
- # ❌ 2. Test Streaming (invoke_model_with_response_stream)
104
- print("\n🧪 Testing invoke_model_with_response_stream (streaming)...")
105
- try:
106
- stream_response = client.invoke_model_with_response_stream(
107
- modelId=model_id,
108
- body=json.dumps(payload),
109
- contentType="application/json",
110
- accept="application/json"
111
- )
112
-
113
- print("✅ Streaming supported. Response:")
114
- for event in stream_response["body"]:
115
- chunk = event.get("chunk", {}).get("bytes", b"")
116
- if chunk:
117
- decoded = json.loads(chunk.decode("utf-8"))
118
- delta = decoded.get("delta", {}).get("content", "")
119
- print(delta, end="", flush=True)
120
-
121
- except ClientError as e:
122
- code = e.response["Error"]["Code"]
123
- if code == "AccessDeniedException":
124
- print("❌ AccessDeniedException: Streaming is not enabled for your role.")
125
- elif code == "ValidationException":
126
- print("⚠️ ValidationException: Model might not support streaming or payload is malformed.")
127
- else:
128
- print(f"❌ Unexpected error: {e}")
129
- except Exception as e:
130
- print(f"❌ General error: {e}")
131
- #%%
132
-
133
-
134
- AWS_ACCESS_KEY_ID='AKIA2UC26WDCAFOF7X64'
135
- AWS_SECRET_ACCESS_KEY='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'
136
- aws_region = "eu-west-1"
137
-
138
-
139
- #%%
140
- AWS_ACCESS_KEY_ID='AKIA2UC26WDCAFOF7X64'
141
- AWS_SECRET_ACCESS_KEY='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'
142
- aws_region = "eu-west-1"
143
-
144
- from langchain_aws import BedrockLLM
145
- modelid='anthropic.claude-v2:1'
146
- modelid="mistral.mistral-large-2402-v1:0"
147
- #model_id='eu.anthropic.claude-3-7-sonnet-20250219-v1:0'
148
- custom_llm = BedrockLLM(
149
- aws_access_key_id = AWS_ACCESS_KEY_ID,
150
- aws_secret_access_key = AWS_SECRET_ACCESS_KEY,
151
-
152
- region_name = aws_region,
153
-
154
- # which Bedrock “provider” you’re talking to:
155
- # – use "anthropic" for Claude models
156
- # – use "cohere" for the Cohere models
157
- provider = "mistral",
158
-
159
- model_id = modelid,
160
- model_kwargs = {"temperature": 0.7},
161
- streaming = True,
162
- )
163
-
164
- print(custom_llm.invoke("What’s the recipe for mayonnaise?"))
165
-
166
- #%%
167
- from langchain_aws import ChatBedrockConverse
168
- import os
169
- system_prompt = (
170
- "Du bist DevalBot, ein konversationeller Assistent des Deutschen Evaluierungsinstituts "
171
- "für Entwicklungsbewertung (DEval). DEval bietet staatlichen und zivilgesellschaftlichen "
172
- "Organisationen in der Entwicklungszusammenarbeit unabhängige und wissenschaftlich fundierte "
173
- "Evaluierungen. Deine Hauptsprache ist Deutsch; antworte daher standardmäßig auf Deutsch. "
174
- "Du kannst zudem bei statistischen Analysen und Programmierung in Stata und R unterstützen."
175
- )
176
- #%%
177
- # Initialize the streaming Bedrock chat model
178
- bedrock_llm = ChatBedrockConverse(
179
- aws_access_key_id=AWS_ACCESS_KEY_ID,
180
- aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
181
- region_name=os.environ.get("AWS_DEFAULT_REGION", "eu-west-1"),
182
- model_id="mistral.mistral-large-2402-v1:0", # or your preferred Bedrock model
183
- temperature= 0.7)
184
- #%%
185
- from bedrock_client import bedrock_llm, system_prompt
186
- from langchain.schema import SystemMessage, HumanMessage, AIMessage
187
-
188
-
189
-
190
- def build_messages(
191
- user_message: str,
192
- history: list[dict]) -> list:
193
-
194
- messages: list = []
195
-
196
- # 1) Add the system prompt first
197
- messages.append(SystemMessage(content=system_prompt))
198
-
199
- # 2) Walk the history and map to HumanMessage or AIMessage
200
- for msg in history:
201
- if msg["role"] == "user":
202
- messages.append(HumanMessage(content=msg["content"]))
203
- elif msg["role"] == "assistant":
204
- messages.append(AIMessage(content=msg["content"]))
205
- else:
206
- # you can choose to ignore or log unexpected roles
207
- continue
208
-
209
- # 3) Finally, append the new user message
210
- messages.append(HumanMessage(content=user_message))
211
- return messages
212
-
213
-
214
- build_messages('hi',[])
215
- #%%