Siddharth-74 commited on
Commit
31982a4
·
verified ·
1 Parent(s): aba9352

Upload folder using huggingface_hub

Browse files
.DS_Store ADDED
Binary file (6.15 kB). View file
 
agent.py ADDED
@@ -0,0 +1,313 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import boto3
2
+ import json
3
+ import time
4
+ import zipfile
5
+ from io import BytesIO
6
+
7
+ iam_client = boto3.client('iam')
8
+ sts_client = boto3.client('sts')
9
+ session = boto3.session.Session()
10
+ region = session.region_name
11
+ account_id = sts_client.get_caller_identity()["Account"]
12
+ dynamodb_client = boto3.client('dynamodb')
13
+ dynamodb_resource = boto3.resource('dynamodb')
14
+ lambda_client = boto3.client('lambda')
15
+ bedrock_agent_client = boto3.client('bedrock-agent')
16
+
17
+
18
+ def create_dynamodb(table_name):
19
+ table = dynamodb_resource.create_table(
20
+ TableName=table_name,
21
+ KeySchema=[
22
+ {
23
+ 'AttributeName': 'booking_id',
24
+ 'KeyType': 'HASH'
25
+ }
26
+ ],
27
+ AttributeDefinitions=[
28
+ {
29
+ 'AttributeName': 'booking_id',
30
+ 'AttributeType': 'S'
31
+ }
32
+ ],
33
+ BillingMode='PAY_PER_REQUEST' # Use on-demand capacity mode
34
+ )
35
+
36
+ # Wait for the table to be created
37
+ print(f'Creating table {table_name}...')
38
+ table.wait_until_exists()
39
+ print(f'Table {table_name} created successfully!')
40
+ return
41
+
42
+
43
+ def create_lambda(lambda_function_name, lambda_iam_role):
44
+ # add to function
45
+
46
+ # Package up the lambda function code
47
+ s = BytesIO()
48
+ z = zipfile.ZipFile(s, 'w')
49
+ z.write("lambda_function.py")
50
+ z.close()
51
+ zip_content = s.getvalue()
52
+
53
+ # Create Lambda Function
54
+ lambda_function = lambda_client.create_function(
55
+ FunctionName=lambda_function_name,
56
+ Runtime='python3.12',
57
+ Timeout=60,
58
+ Role=lambda_iam_role['Role']['Arn'],
59
+ Code={'ZipFile': zip_content},
60
+ Handler='lambda_function.lambda_handler'
61
+ )
62
+ return lambda_function
63
+
64
+
65
+ def create_lambda_role(agent_name, dynamodb_table_name):
66
+ lambda_function_role = f'{agent_name}-lambda-role'
67
+ dynamodb_access_policy_name = f'{agent_name}-dynamodb-policy'
68
+ # Create IAM Role for the Lambda function
69
+ try:
70
+ assume_role_policy_document = {
71
+ "Version": "2012-10-17",
72
+ "Statement": [
73
+ {
74
+ "Effect": "Allow",
75
+ "Principal": {
76
+ "Service": "lambda.amazonaws.com"
77
+ },
78
+ "Action": "sts:AssumeRole"
79
+ }
80
+ ]
81
+ }
82
+
83
+ assume_role_policy_document_json = json.dumps(assume_role_policy_document)
84
+
85
+ lambda_iam_role = iam_client.create_role(
86
+ RoleName=lambda_function_role,
87
+ AssumeRolePolicyDocument=assume_role_policy_document_json
88
+ )
89
+
90
+ # Pause to make sure role is created
91
+ time.sleep(10)
92
+ except iam_client.exceptions.EntityAlreadyExistsException:
93
+ lambda_iam_role = iam_client.get_role(RoleName=lambda_function_role)
94
+
95
+ # Attach the AWSLambdaBasicExecutionRole policy
96
+ iam_client.attach_role_policy(
97
+ RoleName=lambda_function_role,
98
+ PolicyArn='arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole'
99
+ )
100
+
101
+ # Create a policy to grant access to the DynamoDB table
102
+ dynamodb_access_policy = {
103
+ "Version": "2012-10-17",
104
+ "Statement": [
105
+ {
106
+ "Effect": "Allow",
107
+ "Action": [
108
+ "dynamodb:GetItem",
109
+ "dynamodb:PutItem",
110
+ "dynamodb:DeleteItem"
111
+ ],
112
+ "Resource": "arn:aws:dynamodb:{}:{}:table/{}".format(
113
+ region, account_id, dynamodb_table_name
114
+ )
115
+ }
116
+ ]
117
+ }
118
+
119
+ # Create the policy
120
+ dynamodb_access_policy_json = json.dumps(dynamodb_access_policy)
121
+ dynamodb_access_policy_response = iam_client.create_policy(
122
+ PolicyName=dynamodb_access_policy_name,
123
+ PolicyDocument=dynamodb_access_policy_json
124
+ )
125
+
126
+ # Attach the policy to the Lambda function's role
127
+ iam_client.attach_role_policy(
128
+ RoleName=lambda_function_role,
129
+ PolicyArn=dynamodb_access_policy_response['Policy']['Arn']
130
+ )
131
+ return lambda_iam_role
132
+
133
+
134
+ def create_agent_role_and_policies(agent_name, agent_foundation_model, kb_id=None):
135
+ agent_bedrock_allow_policy_name = f"{agent_name}-ba"
136
+ agent_role_name = f'AmazonBedrockExecutionRoleForAgents_{agent_name}'
137
+ # Create IAM policies for agent
138
+ statements = [
139
+ {
140
+ "Sid": "AmazonBedrockAgentBedrockFoundationModelPolicy",
141
+ "Effect": "Allow",
142
+ "Action": "bedrock:InvokeModel",
143
+ "Resource": [
144
+ f"arn:aws:bedrock:{region}::foundation-model/{agent_foundation_model}"
145
+ ]
146
+ }
147
+ ]
148
+ # add Knowledge Base retrieve and retrieve and generate permissions if agent has KB attached to it
149
+ if kb_id:
150
+ statements.append(
151
+ {
152
+ "Sid": "QueryKB",
153
+ "Effect": "Allow",
154
+ "Action": [
155
+ "bedrock:Retrieve",
156
+ "bedrock:RetrieveAndGenerate"
157
+ ],
158
+ "Resource": [
159
+ f"arn:aws:bedrock:{region}:{account_id}:knowledge-base/{kb_id}"
160
+ ]
161
+ }
162
+ )
163
+
164
+ bedrock_agent_bedrock_allow_policy_statement = {
165
+ "Version": "2012-10-17",
166
+ "Statement": statements
167
+ }
168
+
169
+ bedrock_policy_json = json.dumps(bedrock_agent_bedrock_allow_policy_statement)
170
+
171
+ agent_bedrock_policy = iam_client.create_policy(
172
+ PolicyName=agent_bedrock_allow_policy_name,
173
+ PolicyDocument=bedrock_policy_json
174
+ )
175
+
176
+ # Create IAM Role for the agent and attach IAM policies
177
+ assume_role_policy_document = {
178
+ "Version": "2012-10-17",
179
+ "Statement": [{
180
+ "Effect": "Allow",
181
+ "Principal": {
182
+ "Service": "bedrock.amazonaws.com"
183
+ },
184
+ "Action": "sts:AssumeRole"
185
+ }]
186
+ }
187
+
188
+ assume_role_policy_document_json = json.dumps(assume_role_policy_document)
189
+ agent_role = iam_client.create_role(
190
+ RoleName=agent_role_name,
191
+ AssumeRolePolicyDocument=assume_role_policy_document_json
192
+ )
193
+
194
+ # Pause to make sure role is created
195
+ time.sleep(10)
196
+
197
+ iam_client.attach_role_policy(
198
+ RoleName=agent_role_name,
199
+ PolicyArn=agent_bedrock_policy['Policy']['Arn']
200
+ )
201
+ return agent_role
202
+
203
+
204
+ def delete_agent_roles_and_policies(agent_name):
205
+ agent_bedrock_allow_policy_name = f"{agent_name}-ba"
206
+ agent_role_name = f'AmazonBedrockExecutionRoleForAgents_{agent_name}'
207
+ dynamodb_access_policy_name = f'{agent_name}-dynamodb-policy'
208
+ lambda_function_role = f'{agent_name}-lambda-role'
209
+
210
+ for policy in [agent_bedrock_allow_policy_name]:
211
+ try:
212
+ iam_client.detach_role_policy(
213
+ RoleName=agent_role_name,
214
+ PolicyArn=f'arn:aws:iam::{account_id}:policy/{policy}'
215
+ )
216
+ except Exception as e:
217
+ print(f"Could not detach {policy} from {agent_role_name}")
218
+ print(e)
219
+
220
+ for policy in [dynamodb_access_policy_name]:
221
+ try:
222
+ iam_client.detach_role_policy(
223
+ RoleName=lambda_function_role,
224
+ PolicyArn=f'arn:aws:iam::{account_id}:policy/{policy}'
225
+ )
226
+ except Exception as e:
227
+ print(f"Could not detach {policy} from {lambda_function_role}")
228
+ print(e)
229
+
230
+ try:
231
+ iam_client.detach_role_policy(
232
+ RoleName=lambda_function_role,
233
+ PolicyArn='arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole'
234
+ )
235
+ except Exception as e:
236
+ print(f"Could not detach AWSLambdaBasicExecutionRole from {lambda_function_role}")
237
+ print(e)
238
+
239
+ for role_name in [agent_role_name, lambda_function_role]:
240
+ try:
241
+ iam_client.delete_role(
242
+ RoleName=role_name
243
+ )
244
+ except Exception as e:
245
+ print(f"Could not delete role {role_name}")
246
+ print(e)
247
+
248
+ for policy in [agent_bedrock_allow_policy_name, dynamodb_access_policy_name]:
249
+ try:
250
+ iam_client.delete_policy(
251
+ PolicyArn=f'arn:aws:iam::{account_id}:policy/{policy}'
252
+ )
253
+ except Exception as e:
254
+ print(f"Could not delete policy {policy}")
255
+ print(e)
256
+
257
+
258
+ def clean_up_resources(
259
+ table_name, lambda_function, lambda_function_name, agent_action_group_response, agent_functions,
260
+ agent_id, kb_id, alias_id
261
+ ):
262
+ action_group_id = agent_action_group_response['agentActionGroup']['actionGroupId']
263
+ action_group_name = agent_action_group_response['agentActionGroup']['actionGroupName']
264
+ # Delete Agent Action Group, Agent Alias, and Agent
265
+ try:
266
+ bedrock_agent_client.update_agent_action_group(
267
+ agentId=agent_id,
268
+ agentVersion='DRAFT',
269
+ actionGroupId= action_group_id,
270
+ actionGroupName=action_group_name,
271
+ actionGroupExecutor={
272
+ 'lambda': lambda_function['FunctionArn']
273
+ },
274
+ functionSchema={
275
+ 'functions': agent_functions
276
+ },
277
+ actionGroupState='DISABLED',
278
+ )
279
+ bedrock_agent_client.disassociate_agent_knowledge_base(
280
+ agentId=agent_id,
281
+ agentVersion='DRAFT',
282
+ knowledgeBaseId=kb_id
283
+ )
284
+ bedrock_agent_client.delete_agent_action_group(
285
+ agentId=agent_id,
286
+ agentVersion='DRAFT',
287
+ actionGroupId=action_group_id
288
+ )
289
+ bedrock_agent_client.delete_agent_alias(
290
+ agentAliasId=alias_id,
291
+ agentId=agent_id
292
+ )
293
+ bedrock_agent_client.delete_agent(agentId=agent_id)
294
+ print(f"Agent {agent_id}, Agent Alias {alias_id}, and Action Group have been deleted.")
295
+ except Exception as e:
296
+ print(f"Error deleting Agent resources: {e}")
297
+
298
+ # Delete Lambda function
299
+ try:
300
+ lambda_client.delete_function(FunctionName=lambda_function_name)
301
+ print(f"Lambda function {lambda_function_name} has been deleted.")
302
+ except Exception as e:
303
+ print(f"Error deleting Lambda function {lambda_function_name}: {e}")
304
+
305
+ # Delete DynamoDB table
306
+ try:
307
+ dynamodb_client.delete_table(TableName=table_name)
308
+ print(f"Table {table_name} is being deleted...")
309
+ waiter = dynamodb_client.get_waiter('table_not_exists')
310
+ waiter.wait(TableName=table_name)
311
+ print(f"Table {table_name} has been deleted.")
312
+ except Exception as e:
313
+ print(f"Error deleting table {table_name}: {e}")
aws_bedrock.py ADDED
@@ -0,0 +1,135 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import typing as t
3
+
4
+ from ragas.messages import AIMessage, HumanMessage
5
+
6
+
7
+ def get_last_orchestration_value(traces: t.List[t.Dict[str, t.Any]], key: str):
8
+ """
9
+ Iterates through the traces to find the last occurrence of a specified key
10
+ within the orchestrationTrace.
11
+
12
+ Returns:
13
+ (index, value): Tuple where index is the last index at which the key was found, and value is the corresponding value, or (None, None) if not found.
14
+ """
15
+ last_index = -1
16
+ last_value = None
17
+ for i, trace in enumerate(traces):
18
+ orchestration = trace.get("trace", {}).get("orchestrationTrace", {})
19
+ if key in orchestration:
20
+ last_index = i
21
+ last_value = orchestration[key]
22
+ return last_index, last_value
23
+
24
+
25
+ def extract_messages_from_model_invocation(model_inv):
26
+ """
27
+ Extracts messages from the 'text' field of the modelInvocationInput.
28
+ Ensures that each message's content is cast to a string.
29
+
30
+ Returns:
31
+ List of messages as HumanMessage or AIMessage objects.
32
+ """
33
+ messages = []
34
+ text_json = json.loads(model_inv.get("text", "{}"))
35
+ for msg in text_json.get("messages", []):
36
+ content_str = str(msg.get("content", ""))
37
+ role = msg.get("role")
38
+ if role == "user":
39
+ messages.append(HumanMessage(content=content_str))
40
+ elif role == "assistant":
41
+ messages.append(AIMessage(content=content_str))
42
+ return messages[:-1]
43
+
44
+
45
+ def convert_to_ragas_messages(traces: t.List):
46
+ """
47
+ Converts a list of trace dictionaries into a list of messages.
48
+ It extracts messages from the last modelInvocationInput and appends
49
+ the finalResponse from the observation (if it occurs after the model invocation).
50
+
51
+ Returns:
52
+ List of HumanMessage and AIMessage objects.
53
+ """
54
+ result = []
55
+
56
+ # Get the last modelInvocationInput from the traces.
57
+ last_model_inv_index, last_model_inv = get_last_orchestration_value(
58
+ traces, "modelInvocationInput"
59
+ )
60
+ if last_model_inv is not None:
61
+ result.extend(extract_messages_from_model_invocation(last_model_inv))
62
+
63
+ # Get the last observation from the traces.
64
+ last_obs_index, last_observation = get_last_orchestration_value(
65
+ traces, "observation"
66
+ )
67
+ if last_observation is not None and last_obs_index > last_model_inv_index:
68
+ final_text = str(last_observation.get("finalResponse", {}).get("text", ""))
69
+ result.append(AIMessage(content=final_text))
70
+
71
+ return result
72
+
73
+
74
+ def extract_kb_trace(traces):
75
+ """
76
+ Extracts groups of traces that follow the specific order:
77
+ 1. An element with 'trace' -> 'orchestrationTrace' containing an 'invocationInput'
78
+ with invocationType == "KNOWLEDGE_BASE"
79
+ 2. Followed (later in the list or within the same trace) by an element with an 'observation'
80
+ that contains 'knowledgeBaseLookupOutput'
81
+ 3. Followed by an element with an 'observation' that contains 'finalResponse'
82
+
83
+ Returns a list of dictionaries each with keys:
84
+ 'user_input', 'retrieved_contexts', and 'response'
85
+
86
+ This version supports multiple knowledge base invocation groups.
87
+ """
88
+ results = []
89
+ groups_in_progress = [] # list to keep track of groups in progress
90
+
91
+ for trace in traces:
92
+ orchestration = trace.get("trace", {}).get("orchestrationTrace", {})
93
+
94
+ # 1. Look for a KB invocation input.
95
+ inv_input = orchestration.get("invocationInput")
96
+ if inv_input and inv_input.get("invocationType") == "KNOWLEDGE_BASE":
97
+ kb_input = inv_input.get("knowledgeBaseLookupInput", {})
98
+ # Start a new group with the user's input text.
99
+ groups_in_progress.append({"user_input": kb_input.get("text")})
100
+
101
+ # 2. Process observations.
102
+ obs = orchestration.get("observation", {})
103
+ if obs:
104
+ # If the observation contains a KB output, assign it to the earliest group
105
+ # that does not yet have a 'retrieved_contexts' key.
106
+ if "knowledgeBaseLookupOutput" in obs:
107
+ for group in groups_in_progress:
108
+ if "user_input" in group and "retrieved_contexts" not in group:
109
+ kb_output = obs["knowledgeBaseLookupOutput"]
110
+ group["retrieved_contexts"] = [
111
+ retrieved.get("content", {}).get("text")
112
+ for retrieved in kb_output.get("retrievedReferences", [])
113
+ ]
114
+ break
115
+
116
+ # 3. When we see a final response, assign it to all groups that have already
117
+ # received their KB output but still lack a response.
118
+ if "finalResponse" in obs:
119
+ final_text = obs["finalResponse"].get("text")
120
+ completed_groups = []
121
+ for group in groups_in_progress:
122
+ if (
123
+ "user_input" in group
124
+ and "retrieved_contexts" in group
125
+ and "response" not in group
126
+ ):
127
+ group["response"] = final_text
128
+ completed_groups.append(group)
129
+ # Remove completed groups from the in-progress list and add to the final results.
130
+ groups_in_progress = [
131
+ g for g in groups_in_progress if g not in completed_groups
132
+ ]
133
+ results.extend(completed_groups)
134
+
135
+ return results
dataset/Restaurant_Childrens_Menu.pdf ADDED
Binary file (31 kB). View file
 
dataset/Restaurant_Dinner_Menu.pdf ADDED
Binary file (31.1 kB). View file
 
dataset/Restaurant_week_specials.pdf ADDED
Binary file (75.1 kB). View file
 
knowledge_base.py ADDED
@@ -0,0 +1,632 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import boto3
3
+ import time
4
+ from botocore.exceptions import ClientError
5
+ from opensearchpy import OpenSearch, RequestsHttpConnection, AWSV4SignerAuth, RequestError
6
+ import pprint
7
+ from retrying import retry
8
+
9
+ valid_embedding_models = ["cohere.embed-multilingual-v3", "cohere.embed-english-v3", "amazon.titan-embed-text-v1"]
10
+ pp = pprint.PrettyPrinter(indent=2)
11
+
12
+
13
+ def interactive_sleep(seconds: int):
14
+ """
15
+ Support functionality to induce an artificial 'sleep' to the code in order to wait for resources to be available
16
+ Args:
17
+ seconds (int): number of seconds to sleep for
18
+ """
19
+ dots = ''
20
+ for i in range(seconds):
21
+ dots += '.'
22
+ print(dots, end='\r')
23
+ time.sleep(1)
24
+
25
+
26
+ class BedrockKnowledgeBase:
27
+ """
28
+ Support class that allows for:
29
+ - creation (or retrieval) of a Knowledge Base for Amazon Bedrock with all its pre-requisites
30
+ (including OSS, IAM roles and Permissions and S3 bucket)
31
+ - Ingestion of data into the Knowledge Base
32
+ - Deletion of all resources created
33
+ """
34
+ def __init__(
35
+ self,
36
+ kb_name,
37
+ kb_description=None,
38
+ data_bucket_name=None,
39
+ embedding_model="amazon.titan-embed-text-v1"
40
+ ):
41
+ """
42
+ Class initializer
43
+ Args:
44
+ kb_name (str): the knowledge base name
45
+ kb_description (str): knowledge base description
46
+ data_bucket_name (str): name of s3 bucket to connect with knowledge base
47
+ embedding_model (str): embedding model to use
48
+ """
49
+ boto3_session = boto3.session.Session()
50
+ self.region_name = boto3_session.region_name
51
+ self.iam_client = boto3_session.client('iam')
52
+ self.account_number = boto3.client('sts').get_caller_identity().get('Account')
53
+ self.suffix = str(self.account_number)[:4]
54
+ self.identity = boto3.client('sts').get_caller_identity()['Arn']
55
+ self.aoss_client = boto3_session.client('opensearchserverless')
56
+ self.s3_client = boto3.client('s3')
57
+ self.bedrock_agent_client = boto3.client('bedrock-agent')
58
+ credentials = boto3.Session().get_credentials()
59
+ self.awsauth = AWSV4SignerAuth(credentials, self.region_name, 'aoss')
60
+
61
+ self.kb_name = kb_name
62
+ self.kb_description = kb_description
63
+ if data_bucket_name is not None:
64
+ self.bucket_name = data_bucket_name
65
+ else:
66
+ self.bucket_name = f"{self.kb_name}-{self.suffix}"
67
+ if embedding_model not in valid_embedding_models:
68
+ valid_embeddings_str = str(valid_embedding_models)
69
+ raise ValueError(f"Invalid embedding model. Your embedding model should be one of {valid_embeddings_str}")
70
+ self.embedding_model = embedding_model
71
+ self.encryption_policy_name = f"bedrock-sample-rag-sp-{self.suffix}"
72
+ self.network_policy_name = f"bedrock-sample-rag-np-{self.suffix}"
73
+ self.access_policy_name = f'bedrock-sample-rag-ap-{self.suffix}'
74
+ self.kb_execution_role_name = f'AmazonBedrockExecutionRoleForKnowledgeBase_{self.suffix}'
75
+ self.fm_policy_name = f'AmazonBedrockFoundationModelPolicyForKnowledgeBase_{self.suffix}'
76
+ self.s3_policy_name = f'AmazonBedrockS3PolicyForKnowledgeBase_{self.suffix}'
77
+ self.oss_policy_name = f'AmazonBedrockOSSPolicyForKnowledgeBase_{self.suffix}'
78
+
79
+ self.vector_store_name = f'bedrock-sample-rag-{self.suffix}'
80
+ self.index_name = f"bedrock-sample-rag-index-{self.suffix}"
81
+ print("========================================================================================")
82
+ print(f"Step 1 - Creating or retrieving {self.bucket_name} S3 bucket for Knowledge Base documents")
83
+ self.create_s3_bucket()
84
+ print("========================================================================================")
85
+ print(f"Step 2 - Creating Knowledge Base Execution Role ({self.kb_execution_role_name}) and Policies")
86
+ self.bedrock_kb_execution_role = self.create_bedrock_kb_execution_role()
87
+ print("========================================================================================")
88
+ print(f"Step 3 - Creating OSS encryption, network and data access policies")
89
+ self.encryption_policy, self.network_policy, self.access_policy = self.create_policies_in_oss()
90
+ print("========================================================================================")
91
+ print(f"Step 4 - Creating OSS Collection (this step takes a couple of minutes to complete)")
92
+ self.host, self.collection, self.collection_id, self.collection_arn = self.create_oss()
93
+ # Build the OpenSearch client
94
+ self.oss_client = OpenSearch(
95
+ hosts=[{'host': self.host, 'port': 443}],
96
+ http_auth=self.awsauth,
97
+ use_ssl=True,
98
+ verify_certs=True,
99
+ connection_class=RequestsHttpConnection,
100
+ timeout=300
101
+ )
102
+ print("========================================================================================")
103
+ print(f"Step 5 - Creating OSS Vector Index")
104
+ self.create_vector_index()
105
+ print("========================================================================================")
106
+ print(f"Step 6 - Creating Knowledge Base")
107
+ self.knowledge_base, self.data_source = self.create_knowledge_base()
108
+ print("========================================================================================")
109
+
110
+ def create_s3_bucket(self):
111
+ """
112
+ Check if bucket exists, and if not create S3 bucket for knowledge base data source
113
+ """
114
+ try:
115
+ self.s3_client.head_bucket(Bucket=self.bucket_name)
116
+ print(f'Bucket {self.bucket_name} already exists - retrieving it!')
117
+ except ClientError as e:
118
+ print(f'Creating bucket {self.bucket_name}')
119
+ if self.region_name == "us-east-1":
120
+ self.s3_client.create_bucket(
121
+ Bucket=self.bucket_name
122
+ )
123
+ else:
124
+ self.s3_client.create_bucket(
125
+ Bucket=self.bucket_name,
126
+ CreateBucketConfiguration={'LocationConstraint': self.region_name}
127
+ )
128
+
129
+ def create_bedrock_kb_execution_role(self):
130
+ """
131
+ Create Knowledge Base Execution IAM Role and its required policies.
132
+ If role and/or policies already exist, retrieve them
133
+ Returns:
134
+ IAM role
135
+ """
136
+ foundation_model_policy_document = {
137
+ "Version": "2012-10-17",
138
+ "Statement": [
139
+ {
140
+ "Effect": "Allow",
141
+ "Action": [
142
+ "bedrock:InvokeModel",
143
+ ],
144
+ "Resource": [
145
+ f"arn:aws:bedrock:{self.region_name}::foundation-model/{self.embedding_model}"
146
+ ]
147
+ }
148
+ ]
149
+ }
150
+
151
+ s3_policy_document = {
152
+ "Version": "2012-10-17",
153
+ "Statement": [
154
+ {
155
+ "Effect": "Allow",
156
+ "Action": [
157
+ "s3:GetObject",
158
+ "s3:ListBucket"
159
+ ],
160
+ "Resource": [
161
+ f"arn:aws:s3:::{self.bucket_name}",
162
+ f"arn:aws:s3:::{self.bucket_name}/*"
163
+ ],
164
+ "Condition": {
165
+ "StringEquals": {
166
+ "aws:ResourceAccount": f"{self.account_number}"
167
+ }
168
+ }
169
+ }
170
+ ]
171
+ }
172
+
173
+ assume_role_policy_document = {
174
+ "Version": "2012-10-17",
175
+ "Statement": [
176
+ {
177
+ "Effect": "Allow",
178
+ "Principal": {
179
+ "Service": "bedrock.amazonaws.com"
180
+ },
181
+ "Action": "sts:AssumeRole"
182
+ }
183
+ ]
184
+ }
185
+ try:
186
+ # create policies based on the policy documents
187
+ fm_policy = self.iam_client.create_policy(
188
+ PolicyName=self.fm_policy_name,
189
+ PolicyDocument=json.dumps(foundation_model_policy_document),
190
+ Description='Policy for accessing foundation model',
191
+ )
192
+ except self.iam_client.exceptions.EntityAlreadyExistsException:
193
+ fm_policy = self.iam_client.get_policy(
194
+ PolicyArn=f"arn:aws:iam::{self.account_number}:policy/{self.fm_policy_name}"
195
+ )
196
+
197
+ try:
198
+ s3_policy = self.iam_client.create_policy(
199
+ PolicyName=self.s3_policy_name,
200
+ PolicyDocument=json.dumps(s3_policy_document),
201
+ Description='Policy for reading documents from s3')
202
+ except self.iam_client.exceptions.EntityAlreadyExistsException:
203
+ s3_policy = self.iam_client.get_policy(
204
+ PolicyArn=f"arn:aws:iam::{self.account_number}:policy/{self.s3_policy_name}"
205
+ )
206
+ # create bedrock execution role
207
+ try:
208
+ bedrock_kb_execution_role = self.iam_client.create_role(
209
+ RoleName=self.kb_execution_role_name,
210
+ AssumeRolePolicyDocument=json.dumps(assume_role_policy_document),
211
+ Description='Amazon Bedrock Knowledge Base Execution Role for accessing OSS and S3',
212
+ MaxSessionDuration=3600
213
+ )
214
+ except self.iam_client.exceptions.EntityAlreadyExistsException:
215
+ bedrock_kb_execution_role = self.iam_client.get_role(
216
+ RoleName=self.kb_execution_role_name
217
+ )
218
+ # fetch arn of the policies and role created above
219
+ s3_policy_arn = s3_policy["Policy"]["Arn"]
220
+ fm_policy_arn = fm_policy["Policy"]["Arn"]
221
+
222
+ # attach policies to Amazon Bedrock execution role
223
+ self.iam_client.attach_role_policy(
224
+ RoleName=bedrock_kb_execution_role["Role"]["RoleName"],
225
+ PolicyArn=fm_policy_arn
226
+ )
227
+ self.iam_client.attach_role_policy(
228
+ RoleName=bedrock_kb_execution_role["Role"]["RoleName"],
229
+ PolicyArn=s3_policy_arn
230
+ )
231
+ return bedrock_kb_execution_role
232
+
233
+ def create_oss_policy_attach_bedrock_execution_role(self, collection_id):
234
+ """
235
+ Create OpenSearch Serverless policy and attach it to the Knowledge Base Execution role.
236
+ If policy already exists, attaches it
237
+ """
238
+ # define oss policy document
239
+ oss_policy_document = {
240
+ "Version": "2012-10-17",
241
+ "Statement": [
242
+ {
243
+ "Effect": "Allow",
244
+ "Action": [
245
+ "aoss:APIAccessAll"
246
+ ],
247
+ "Resource": [
248
+ f"arn:aws:aoss:{self.region_name}:{self.account_number}:collection/{collection_id}"
249
+ ]
250
+ }
251
+ ]
252
+ }
253
+
254
+ oss_policy_arn = f"arn:aws:iam::{self.account_number}:policy/{self.oss_policy_name}"
255
+ created = False
256
+ try:
257
+ self.iam_client.create_policy(
258
+ PolicyName=self.oss_policy_name,
259
+ PolicyDocument=json.dumps(oss_policy_document),
260
+ Description='Policy for accessing opensearch serverless',
261
+ )
262
+ created = True
263
+ except self.iam_client.exceptions.EntityAlreadyExistsException:
264
+ print(f"Policy {oss_policy_arn} already exists, skipping creation")
265
+ print("Opensearch serverless arn: ", oss_policy_arn)
266
+
267
+ self.iam_client.attach_role_policy(
268
+ RoleName=self.bedrock_kb_execution_role["Role"]["RoleName"],
269
+ PolicyArn=oss_policy_arn
270
+ )
271
+ return created
272
+
273
+ def create_policies_in_oss(self):
274
+ """
275
+ Create OpenSearch Serverless encryption, network and data access policies.
276
+ If policies already exist, retrieve them
277
+ """
278
+ try:
279
+ encryption_policy = self.aoss_client.create_security_policy(
280
+ name=self.encryption_policy_name,
281
+ policy=json.dumps(
282
+ {
283
+ 'Rules': [{'Resource': ['collection/' + self.vector_store_name],
284
+ 'ResourceType': 'collection'}],
285
+ 'AWSOwnedKey': True
286
+ }),
287
+ type='encryption'
288
+ )
289
+ except self.aoss_client.exceptions.ConflictException:
290
+ encryption_policy = self.aoss_client.get_security_policy(
291
+ name=self.encryption_policy_name,
292
+ type='encryption'
293
+ )
294
+
295
+ try:
296
+ network_policy = self.aoss_client.create_security_policy(
297
+ name=self.network_policy_name,
298
+ policy=json.dumps(
299
+ [
300
+ {'Rules': [{'Resource': ['collection/' + self.vector_store_name],
301
+ 'ResourceType': 'collection'}],
302
+ 'AllowFromPublic': True}
303
+ ]),
304
+ type='network'
305
+ )
306
+ except self.aoss_client.exceptions.ConflictException:
307
+ network_policy = self.aoss_client.get_security_policy(
308
+ name=self.network_policy_name,
309
+ type='network'
310
+ )
311
+
312
+ try:
313
+ access_policy = self.aoss_client.create_access_policy(
314
+ name=self.access_policy_name,
315
+ policy=json.dumps(
316
+ [
317
+ {
318
+ 'Rules': [
319
+ {
320
+ 'Resource': ['collection/' + self.vector_store_name],
321
+ 'Permission': [
322
+ 'aoss:CreateCollectionItems',
323
+ 'aoss:DeleteCollectionItems',
324
+ 'aoss:UpdateCollectionItems',
325
+ 'aoss:DescribeCollectionItems'],
326
+ 'ResourceType': 'collection'
327
+ },
328
+ {
329
+ 'Resource': ['index/' + self.vector_store_name + '/*'],
330
+ 'Permission': [
331
+ 'aoss:CreateIndex',
332
+ 'aoss:DeleteIndex',
333
+ 'aoss:UpdateIndex',
334
+ 'aoss:DescribeIndex',
335
+ 'aoss:ReadDocument',
336
+ 'aoss:WriteDocument'],
337
+ 'ResourceType': 'index'
338
+ }],
339
+ 'Principal': [self.identity, self.bedrock_kb_execution_role['Role']['Arn']],
340
+ 'Description': 'Easy data policy'}
341
+ ]),
342
+ type='data'
343
+ )
344
+ except self.aoss_client.exceptions.ConflictException:
345
+ access_policy = self.aoss_client.get_access_policy(
346
+ name=self.access_policy_name,
347
+ type='data'
348
+ )
349
+
350
+ return encryption_policy, network_policy, access_policy
351
+
352
+ def create_oss(self):
353
+ """
354
+ Create OpenSearch Serverless Collection. If already existent, retrieve
355
+ """
356
+ try:
357
+ collection = self.aoss_client.create_collection(name=self.vector_store_name, type='VECTORSEARCH')
358
+ collection_id = collection['createCollectionDetail']['id']
359
+ collection_arn = collection['createCollectionDetail']['arn']
360
+ except self.aoss_client.exceptions.ConflictException:
361
+ collection = self.aoss_client.batch_get_collection(names=[self.vector_store_name])['collectionDetails'][0]
362
+ pp.pprint(collection)
363
+ collection_id = collection['id']
364
+ collection_arn = collection['arn']
365
+ pp.pprint(collection)
366
+
367
+ # Get the OpenSearch serverless collection URL
368
+ host = collection_id + '.' + self.region_name + '.aoss.amazonaws.com'
369
+ print(host)
370
+ # wait for collection creation
371
+ # This can take couple of minutes to finish
372
+ response = self.aoss_client.batch_get_collection(names=[self.vector_store_name])
373
+ # Periodically check collection status
374
+ while (response['collectionDetails'][0]['status']) == 'CREATING':
375
+ print('Creating collection...')
376
+ interactive_sleep(30)
377
+ response = self.aoss_client.batch_get_collection(names=[self.vector_store_name])
378
+ print('\nCollection successfully created:')
379
+ pp.pprint(response["collectionDetails"])
380
+ # create opensearch serverless access policy and attach it to Bedrock execution role
381
+ try:
382
+ created = self.create_oss_policy_attach_bedrock_execution_role(collection_id)
383
+ if created:
384
+ # It can take up to a minute for data access rules to be enforced
385
+ print("Sleeping for a minute to ensure data access rules have been enforced")
386
+ interactive_sleep(60)
387
+ return host, collection, collection_id, collection_arn
388
+ except Exception as e:
389
+ print("Policy already exists")
390
+ pp.pprint(e)
391
+
392
+ def create_vector_index(self):
393
+ """
394
+ Create OpenSearch Serverless vector index. If existent, ignore
395
+ """
396
+ body_json = {
397
+ "settings": {
398
+ "index.knn": "true",
399
+ "number_of_shards": 1,
400
+ "knn.algo_param.ef_search": 512,
401
+ "number_of_replicas": 0,
402
+ },
403
+ "mappings": {
404
+ "properties": {
405
+ "vector": {
406
+ "type": "knn_vector",
407
+ "dimension": 1536,
408
+ "method": {
409
+ "name": "hnsw",
410
+ "engine": "faiss",
411
+ "space_type": "l2"
412
+ },
413
+ },
414
+ "text": {
415
+ "type": "text"
416
+ },
417
+ "text-metadata": {
418
+ "type": "text"}
419
+ }
420
+ }
421
+ }
422
+
423
+ # Create index
424
+ try:
425
+ response = self.oss_client.indices.create(index=self.index_name, body=json.dumps(body_json))
426
+ print('\nCreating index:')
427
+ pp.pprint(response)
428
+
429
+ # index creation can take up to a minute
430
+ interactive_sleep(60)
431
+ except RequestError as e:
432
+ # you can delete the index if its already exists
433
+ # oss_client.indices.delete(index=index_name)
434
+ print(
435
+ f'Error while trying to create the index, with error {e.error}\nyou may unmark the delete above to '
436
+ f'delete, and recreate the index')
437
+
438
+ @retry(wait_random_min=1000, wait_random_max=2000, stop_max_attempt_number=7)
439
+ def create_knowledge_base(self):
440
+ """
441
+ Create Knowledge Base and its Data Source. If existent, retrieve
442
+ """
443
+ opensearch_serverless_configuration = {
444
+ "collectionArn": self.collection_arn,
445
+ "vectorIndexName": self.index_name,
446
+ "fieldMapping": {
447
+ "vectorField": "vector",
448
+ "textField": "text",
449
+ "metadataField": "text-metadata"
450
+ }
451
+ }
452
+
453
+ # Ingest strategy - How to ingest data from the data source
454
+ chunking_strategy_configuration = {
455
+ "chunkingStrategy": "FIXED_SIZE",
456
+ "fixedSizeChunkingConfiguration": {
457
+ "maxTokens": 512,
458
+ "overlapPercentage": 20
459
+ }
460
+ }
461
+
462
+ # The data source to ingest documents from, into the OpenSearch serverless knowledge base index
463
+ s3_configuration = {
464
+ "bucketArn": f"arn:aws:s3:::{self.bucket_name}",
465
+ # "inclusionPrefixes":["*.*"] # you can use this if you want to create a KB using data within s3 prefixes.
466
+ }
467
+
468
+ # The embedding model used by Bedrock to embed ingested documents, and realtime prompts
469
+ embedding_model_arn = f"arn:aws:bedrock:{self.region_name}::foundation-model/{self.embedding_model}"
470
+ try:
471
+ create_kb_response = self.bedrock_agent_client.create_knowledge_base(
472
+ name=self.kb_name,
473
+ description=self.kb_description,
474
+ roleArn=self.bedrock_kb_execution_role['Role']['Arn'],
475
+ knowledgeBaseConfiguration={
476
+ "type": "VECTOR",
477
+ "vectorKnowledgeBaseConfiguration": {
478
+ "embeddingModelArn": embedding_model_arn
479
+ }
480
+ },
481
+ storageConfiguration={
482
+ "type": "OPENSEARCH_SERVERLESS",
483
+ "opensearchServerlessConfiguration": opensearch_serverless_configuration
484
+ }
485
+ )
486
+ kb = create_kb_response["knowledgeBase"]
487
+ pp.pprint(kb)
488
+ except self.bedrock_agent_client.exceptions.ConflictException:
489
+ kbs = self.bedrock_agent_client.list_knowledge_bases(
490
+ maxResults=100
491
+ )
492
+ kb_id = None
493
+ for kb in kbs['knowledgeBaseSummaries']:
494
+ if kb['name'] == self.kb_name:
495
+ kb_id = kb['knowledgeBaseId']
496
+ response = self.bedrock_agent_client.get_knowledge_base(knowledgeBaseId=kb_id)
497
+ kb = response['knowledgeBase']
498
+ pp.pprint(kb)
499
+
500
+ # Create a DataSource in KnowledgeBase
501
+ try:
502
+ create_ds_response = self.bedrock_agent_client.create_data_source(
503
+ name=self.kb_name,
504
+ description=self.kb_description,
505
+ knowledgeBaseId=kb['knowledgeBaseId'],
506
+ dataSourceConfiguration={
507
+ "type": "S3",
508
+ "s3Configuration": s3_configuration
509
+ },
510
+ vectorIngestionConfiguration={
511
+ "chunkingConfiguration": chunking_strategy_configuration
512
+ }
513
+ )
514
+ ds = create_ds_response["dataSource"]
515
+ pp.pprint(ds)
516
+ except self.bedrock_agent_client.exceptions.ConflictException:
517
+ ds_id = self.bedrock_agent_client.list_data_sources(
518
+ knowledgeBaseId=kb['knowledgeBaseId'],
519
+ maxResults=100
520
+ )['dataSourceSummaries'][0]['dataSourceId']
521
+ get_ds_response = self.bedrock_agent_client.get_data_source(
522
+ dataSourceId=ds_id,
523
+ knowledgeBaseId=kb['knowledgeBaseId']
524
+ )
525
+ ds = get_ds_response["dataSource"]
526
+ pp.pprint(ds)
527
+ return kb, ds
528
+
529
+ def start_ingestion_job(self):
530
+ """
531
+ Start an ingestion job to synchronize data from an S3 bucket to the Knowledge Base
532
+ """
533
+ # Start an ingestion job
534
+ start_job_response = self.bedrock_agent_client.start_ingestion_job(
535
+ knowledgeBaseId=self.knowledge_base['knowledgeBaseId'],
536
+ dataSourceId=self.data_source["dataSourceId"]
537
+ )
538
+ job = start_job_response["ingestionJob"]
539
+ pp.pprint(job)
540
+ # Get job
541
+ while job['status'] != 'COMPLETE':
542
+ get_job_response = self.bedrock_agent_client.get_ingestion_job(
543
+ knowledgeBaseId=self.knowledge_base['knowledgeBaseId'],
544
+ dataSourceId=self.data_source["dataSourceId"],
545
+ ingestionJobId=job["ingestionJobId"]
546
+ )
547
+ job = get_job_response["ingestionJob"]
548
+ pp.pprint(job)
549
+ interactive_sleep(40)
550
+
551
+ def get_knowledge_base_id(self):
552
+ """
553
+ Get Knowledge Base Id
554
+ """
555
+ pp.pprint(self.knowledge_base["knowledgeBaseId"])
556
+ return self.knowledge_base["knowledgeBaseId"]
557
+
558
+ def get_bucket_name(self):
559
+ """
560
+ Get the name of the bucket connected with the Knowledge Base Data Source
561
+ """
562
+ pp.pprint(f"Bucket connected with KB: {self.bucket_name}")
563
+ return self.bucket_name
564
+
565
+ def delete_kb(self, delete_s3_bucket=False, delete_iam_roles_and_policies=True):
566
+ """
567
+ Delete the Knowledge Base resources
568
+ Args:
569
+ delete_s3_bucket (bool): boolean to indicate if s3 bucket should also be deleted
570
+ delete_iam_roles_and_policies (bool): boolean to indicate if IAM roles and Policies should also be deleted
571
+ """
572
+ self.bedrock_agent_client.delete_data_source(
573
+ dataSourceId=self.data_source["dataSourceId"],
574
+ knowledgeBaseId=self.knowledge_base['knowledgeBaseId']
575
+ )
576
+ self.bedrock_agent_client.delete_knowledge_base(
577
+ knowledgeBaseId=self.knowledge_base['knowledgeBaseId']
578
+ )
579
+ self.oss_client.indices.delete(index=self.index_name)
580
+ self.aoss_client.delete_collection(id=self.collection_id)
581
+ self.aoss_client.delete_access_policy(
582
+ type="data",
583
+ name=self.access_policy_name
584
+ )
585
+ self.aoss_client.delete_security_policy(
586
+ type="network",
587
+ name=self.network_policy_name
588
+ )
589
+ self.aoss_client.delete_security_policy(
590
+ type="encryption",
591
+ name=self.encryption_policy_name
592
+ )
593
+ if delete_s3_bucket:
594
+ self.delete_s3()
595
+ if delete_iam_roles_and_policies:
596
+ self.delete_iam_roles_and_policies()
597
+
598
+ def delete_iam_roles_and_policies(self):
599
+ """
600
+ Delete IAM Roles and policies used by the Knowledge Base
601
+ """
602
+ fm_policy_arn = f"arn:aws:iam::{self.account_number}:policy/{self.fm_policy_name}"
603
+ s3_policy_arn = f"arn:aws:iam::{self.account_number}:policy/{self.s3_policy_name}"
604
+ oss_policy_arn = f"arn:aws:iam::{self.account_number}:policy/{self.oss_policy_name}"
605
+ self.iam_client.detach_role_policy(
606
+ RoleName=self.kb_execution_role_name,
607
+ PolicyArn=s3_policy_arn
608
+ )
609
+ self.iam_client.detach_role_policy(
610
+ RoleName=self.kb_execution_role_name,
611
+ PolicyArn=fm_policy_arn
612
+ )
613
+ self.iam_client.detach_role_policy(
614
+ RoleName=self.kb_execution_role_name,
615
+ PolicyArn=oss_policy_arn
616
+ )
617
+ self.iam_client.delete_role(RoleName=self.kb_execution_role_name)
618
+ self.iam_client.delete_policy(PolicyArn=s3_policy_arn)
619
+ self.iam_client.delete_policy(PolicyArn=fm_policy_arn)
620
+ self.iam_client.delete_policy(PolicyArn=oss_policy_arn)
621
+ return 0
622
+
623
+ def delete_s3(self):
624
+ """
625
+ Delete the objects contained in the Knowledge Base S3 bucket.
626
+ Once the bucket is empty, delete the bucket
627
+ """
628
+ objects = self.s3_client.list_objects(Bucket=self.bucket_name)
629
+ if 'Contents' in objects:
630
+ for obj in objects['Contents']:
631
+ self.s3_client.delete_object(Bucket=self.bucket_name, Key=obj['Key'])
632
+ self.s3_client.delete_bucket(Bucket=self.bucket_name)