Spaces:
Running
Running
hatmanstack
commited on
Commit
·
9c1e305
1
Parent(s):
4b6b3ad
added bucket for image caching
Browse files- generate.py +46 -6
generate.py
CHANGED
|
@@ -4,6 +4,7 @@ import boto3
|
|
| 4 |
import json
|
| 5 |
import logging
|
| 6 |
import io
|
|
|
|
| 7 |
from dotenv import load_dotenv
|
| 8 |
from PIL import Image
|
| 9 |
from functools import wraps
|
|
@@ -45,6 +46,8 @@ config = ImageConfig()
|
|
| 45 |
model_id = 'amazon.nova-canvas-v1:0'
|
| 46 |
aws_id = os.getenv('AWS_ID')
|
| 47 |
aws_secret = os.getenv('AWS_SECRET')
|
|
|
|
|
|
|
| 48 |
|
| 49 |
class ImageProcessor:
|
| 50 |
def __init__(self, image):
|
|
@@ -108,15 +111,45 @@ class ImageProcessor:
|
|
| 108 |
# Function to generate an image using Amazon Nova Canvas model
|
| 109 |
class BedrockClient:
|
| 110 |
|
| 111 |
-
def __init__(self, aws_id, aws_secret, model_id,
|
| 112 |
self.model_id = model_id
|
| 113 |
-
self.
|
| 114 |
service_name='bedrock-runtime',
|
| 115 |
aws_access_key_id=aws_id,
|
| 116 |
aws_secret_access_key=aws_secret,
|
| 117 |
-
region_name=
|
| 118 |
config=Config(read_timeout=timeout)
|
| 119 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 120 |
|
| 121 |
def _handle_error(self, err):
|
| 122 |
"""Handle client errors"""
|
|
@@ -125,20 +158,27 @@ class BedrockClient:
|
|
| 125 |
def generate_image(self, body):
|
| 126 |
"""Generate image using Bedrock service."""
|
| 127 |
try:
|
| 128 |
-
response = self.
|
| 129 |
body=body,
|
| 130 |
modelId=self.model_id,
|
| 131 |
accept="application/json",
|
| 132 |
contentType="application/json"
|
| 133 |
)
|
| 134 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 135 |
except ClientError as err:
|
| 136 |
self._handle_error(err)
|
| 137 |
|
| 138 |
@handle_bedrock_errors
|
| 139 |
def generate_prompt(self, body):
|
| 140 |
try:
|
| 141 |
-
response = self.
|
| 142 |
modelId=self.model_id,
|
| 143 |
messages=body
|
| 144 |
)
|
|
|
|
| 4 |
import json
|
| 5 |
import logging
|
| 6 |
import io
|
| 7 |
+
from datetime import datetime
|
| 8 |
from dotenv import load_dotenv
|
| 9 |
from PIL import Image
|
| 10 |
from functools import wraps
|
|
|
|
| 46 |
model_id = 'amazon.nova-canvas-v1:0'
|
| 47 |
aws_id = os.getenv('AWS_ID')
|
| 48 |
aws_secret = os.getenv('AWS_SECRET')
|
| 49 |
+
nova_image_bucket='nova-image-data'
|
| 50 |
+
bucket_region='us-west-2'
|
| 51 |
|
| 52 |
class ImageProcessor:
|
| 53 |
def __init__(self, image):
|
|
|
|
| 111 |
# Function to generate an image using Amazon Nova Canvas model
|
| 112 |
class BedrockClient:
|
| 113 |
|
| 114 |
+
def __init__(self, aws_id, aws_secret, model_id, timeout=300):
|
| 115 |
self.model_id = model_id
|
| 116 |
+
self.bedrock_client = boto3.client(
|
| 117 |
service_name='bedrock-runtime',
|
| 118 |
aws_access_key_id=aws_id,
|
| 119 |
aws_secret_access_key=aws_secret,
|
| 120 |
+
region_name='us-east-1',
|
| 121 |
config=Config(read_timeout=timeout)
|
| 122 |
)
|
| 123 |
+
self.s3_client = boto3.client(
|
| 124 |
+
service_name='s3',
|
| 125 |
+
aws_access_key_id=aws_id,
|
| 126 |
+
aws_secret_access_key=aws_secret,
|
| 127 |
+
region_name=bucket_region
|
| 128 |
+
)
|
| 129 |
+
|
| 130 |
+
def _store_response(self, response_body, image_data=None):
|
| 131 |
+
"""Store response and image in S3."""
|
| 132 |
+
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
| 133 |
+
|
| 134 |
+
# Store response body
|
| 135 |
+
response_key = f'responses/{timestamp}_response.json'
|
| 136 |
+
self.s3_client.put_object(
|
| 137 |
+
Bucket=nova_image_bucket,
|
| 138 |
+
Key=response_key,
|
| 139 |
+
Body=json.dumps(response_body),
|
| 140 |
+
ContentType='application/json'
|
| 141 |
+
)
|
| 142 |
+
|
| 143 |
+
# Store image if present
|
| 144 |
+
if image_data:
|
| 145 |
+
image_key = f'images/{timestamp}_image.png'
|
| 146 |
+
self.s3_client.put_object(
|
| 147 |
+
Bucket=nova_image_bucket,
|
| 148 |
+
Key=image_key,
|
| 149 |
+
Body=image_data,
|
| 150 |
+
ContentType='image/png'
|
| 151 |
+
)
|
| 152 |
+
|
| 153 |
|
| 154 |
def _handle_error(self, err):
|
| 155 |
"""Handle client errors"""
|
|
|
|
| 158 |
def generate_image(self, body):
|
| 159 |
"""Generate image using Bedrock service."""
|
| 160 |
try:
|
| 161 |
+
response = self.bedrock_client.invoke_model(
|
| 162 |
body=body,
|
| 163 |
modelId=self.model_id,
|
| 164 |
accept="application/json",
|
| 165 |
contentType="application/json"
|
| 166 |
)
|
| 167 |
+
image_data = self._process_response(response)
|
| 168 |
+
|
| 169 |
+
self._store_response(
|
| 170 |
+
body,
|
| 171 |
+
image_data
|
| 172 |
+
)
|
| 173 |
+
|
| 174 |
+
return image_data
|
| 175 |
except ClientError as err:
|
| 176 |
self._handle_error(err)
|
| 177 |
|
| 178 |
@handle_bedrock_errors
|
| 179 |
def generate_prompt(self, body):
|
| 180 |
try:
|
| 181 |
+
response = self.bedrock_client.converse(
|
| 182 |
modelId=self.model_id,
|
| 183 |
messages=body
|
| 184 |
)
|