update
Browse files- app.py +226 -2
- backend.py +0 -219
app.py
CHANGED
@@ -1,7 +1,231 @@
|
|
1 |
import gradio as gr
|
2 |
from gradio_multimodalchatbot import MultimodalChatbot
|
3 |
from gradio.data_classes import FileData
|
4 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5 |
def multimodal_results(description_df):
|
6 |
conversation = []
|
7 |
for _, row in description_df.iterrows():
|
@@ -29,7 +253,7 @@ def llm_results(description_df):
|
|
29 |
conversation = [[{"text": "Based on your search...", "files": []}, {"text": f"**My recommendation:** {result}", "files": []}]]
|
30 |
return conversation
|
31 |
|
32 |
-
|
33 |
def chatbot_response(user_input, conversation):
|
34 |
bot_initial_message = {
|
35 |
"text": f"Looking for hotels in {user_input}...",
|
|
|
1 |
import gradio as gr
|
2 |
from gradio_multimodalchatbot import MultimodalChatbot
|
3 |
from gradio.data_classes import FileData
|
4 |
+
import os
|
5 |
+
import pandas as pd
|
6 |
+
import requests
|
7 |
+
from PIL import Image, UnidentifiedImageError
|
8 |
+
from io import BytesIO
|
9 |
+
import matplotlib.pyplot as plt
|
10 |
+
import urllib3
|
11 |
+
from transformers import pipeline
|
12 |
+
from transformers import BitsAndBytesConfig
|
13 |
+
import torch
|
14 |
+
import textwrap
|
15 |
+
import pandas as pd
|
16 |
+
import numpy as np
|
17 |
+
from haversine import haversine # Install haversine library: pip install haversine
|
18 |
+
from transformers import AutoProcessor, LlavaForConditionalGeneration
|
19 |
+
from transformers import BitsAndBytesConfig
|
20 |
+
import torch
|
21 |
+
from huggingface_hub import InferenceClient
|
22 |
+
|
23 |
+
IS_SPACES_ZERO = os.environ.get("SPACES_ZERO_GPU", "0") == "1"
|
24 |
+
IS_SPACE = os.environ.get("SPACE_ID", None) is not None
|
25 |
+
|
26 |
+
# Constants
|
27 |
+
DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
|
28 |
+
LOW_MEMORY = os.getenv("LOW_MEMORY", "0") == "1"
|
29 |
+
MODEL_ID = "llava-hf/llava-1.5-7b-hf"
|
30 |
+
TEXT_MODEL_ID = "mistralai/Mistral-7B-Instruct-v0.2"
|
31 |
+
|
32 |
+
# Print device and memory info
|
33 |
+
print(f"Using device: {DEVICE}")
|
34 |
+
print(f"Low memory: {LOW_MEMORY}")
|
35 |
+
|
36 |
+
# Quantization configuration for efficient model loading
|
37 |
+
quantization_config = BitsAndBytesConfig(
|
38 |
+
load_in_4bit=True,
|
39 |
+
bnb_4bit_compute_dtype=torch.float16
|
40 |
+
)
|
41 |
+
|
42 |
+
# Load models only once
|
43 |
+
processor = AutoProcessor.from_pretrained(MODEL_ID)
|
44 |
+
model = LlavaForConditionalGeneration.from_pretrained(MODEL_ID, quantization_config=quantization_config, device_map="auto").to(DEVICE)
|
45 |
+
pipe_image_to_text = pipeline("image-to-text", model=model, model_kwargs={"quantization_config": quantization_config})
|
46 |
+
# Initialize the text generation pipeline
|
47 |
+
pipe_text = pipeline("text-generation", model=TEXT_MODEL_ID, model_kwargs={"quantization_config": quantization_config})
|
48 |
+
|
49 |
+
# Ensure data files are available
|
50 |
+
current_directory = os.getcwd()
|
51 |
+
geocoded_hotels_path = os.path.join(current_directory, 'geocoded_hotels.csv')
|
52 |
+
csv_file_path = os.path.join(current_directory, 'hotel_multimodal.csv')
|
53 |
+
|
54 |
+
# Load geocoded hotels data
|
55 |
+
if not os.path.isfile(geocoded_hotels_path):
|
56 |
+
url = 'https://github.com/ruslanmv/watsonx-with-multimodal-llava/raw/master/geocoded_hotels.csv'
|
57 |
+
response = requests.get(url)
|
58 |
+
if response.status_code == 200:
|
59 |
+
with open(geocoded_hotels_path, 'wb') as f:
|
60 |
+
f.write(response.content)
|
61 |
+
print(f"File {geocoded_hotels_path} downloaded successfully!")
|
62 |
+
else:
|
63 |
+
print(f"Error downloading file. Status code: {response.status_code}")
|
64 |
+
else:
|
65 |
+
print(f"File {geocoded_hotels_path} already exists.")
|
66 |
+
geocoded_hotels = pd.read_csv(geocoded_hotels_path)
|
67 |
+
|
68 |
+
# Load hotel dataset
|
69 |
+
if not os.path.exists(csv_file_path):
|
70 |
+
dataset = load_dataset("ruslanmv/hotel-multimodal")
|
71 |
+
df_hotels = dataset['train'].to_pandas()
|
72 |
+
df_hotels.to_csv(csv_file_path, index=False)
|
73 |
+
print("Dataset downloaded and saved as CSV.")
|
74 |
+
else:
|
75 |
+
df_hotels = pd.read_csv(csv_file_path)
|
76 |
+
|
77 |
+
def get_current_location():
|
78 |
+
try:
|
79 |
+
response = requests.get('https://ipinfo.io/json')
|
80 |
+
data = response.json()
|
81 |
+
location = data.get('loc', '')
|
82 |
+
if location:
|
83 |
+
return map(float, location.split(','))
|
84 |
+
else:
|
85 |
+
return None, None
|
86 |
+
except Exception as e:
|
87 |
+
print(f"An error occurred: {e}")
|
88 |
+
return None, None
|
89 |
+
|
90 |
+
def get_coordinates(location_name):
|
91 |
+
geolocator = Nominatim(user_agent="coordinate_finder")
|
92 |
+
location = geolocator.geocode(location_name)
|
93 |
+
if location:
|
94 |
+
return location.latitude, location.longitude
|
95 |
+
else:
|
96 |
+
return None
|
97 |
+
|
98 |
+
def find_nearby(place=None):
|
99 |
+
if place:
|
100 |
+
coordinates = get_coordinates(place)
|
101 |
+
if coordinates:
|
102 |
+
latitude, longitude = coordinates
|
103 |
+
print(f"The coordinates of {place} are: Latitude: {latitude}, Longitude: {longitude}")
|
104 |
+
else:
|
105 |
+
print(f"Location not found: {place}")
|
106 |
+
return None
|
107 |
+
else:
|
108 |
+
latitude, longitude = get_current_location()
|
109 |
+
if not latitude or not longitude:
|
110 |
+
print("Could not retrieve the current location.")
|
111 |
+
return None
|
112 |
+
|
113 |
+
geocoded_hotels['distance_km'] = geocoded_hotels.apply(
|
114 |
+
lambda row: haversine((latitude, longitude), (row['latitude'], row['longitude'])),
|
115 |
+
axis=1
|
116 |
+
)
|
117 |
+
|
118 |
+
closest_hotels = geocoded_hotels.sort_values(by='distance_km').head(5)
|
119 |
+
print("The 5 closest locations are:\n")
|
120 |
+
print(closest_hotels)
|
121 |
+
return closest_hotels
|
122 |
+
|
123 |
+
@spaces.GPU
|
124 |
+
# Define the respond function
|
125 |
+
def search_hotel(place=None):
|
126 |
+
df_found = find_nearby(place)
|
127 |
+
if df_found is None:
|
128 |
+
return pd.DataFrame()
|
129 |
+
hotel_ids = df_found["hotel_id"].values.tolist()
|
130 |
+
filtered_df = df_hotels[df_hotels['hotel_id'].isin(hotel_ids)]
|
131 |
+
filtered_df['hotel_id'] = pd.Categorical(filtered_df['hotel_id'], categories=hotel_ids, ordered=True)
|
132 |
+
filtered_df = filtered_df.sort_values('hotel_id').reset_index(drop=True)
|
133 |
+
grouped_df = filtered_df.groupby('hotel_id', observed=True).head(2)
|
134 |
+
description_data = []
|
135 |
+
|
136 |
+
for index, row in grouped_df.iterrows():
|
137 |
+
hotel_id = row['hotel_id']
|
138 |
+
hotel_name = row['hotel_name']
|
139 |
+
image_url = row['image_url']
|
140 |
+
|
141 |
+
try:
|
142 |
+
response = requests.get(image_url, verify=False)
|
143 |
+
response.raise_for_status()
|
144 |
+
img = Image.open(BytesIO(response.content))
|
145 |
+
|
146 |
+
prompt = "USER: <image>\nAnalyze this image. Give me feedback on whether this hotel is worth visiting based on the picture. Provide a summary review.\nASSISTANT:"
|
147 |
+
outputs = pipe_image_to_text(img, prompt=prompt, generate_kwargs={"max_new_tokens": 200})
|
148 |
+
description = outputs[0]["generated_text"].split("\nASSISTANT:")[-1].strip()
|
149 |
+
|
150 |
+
description_data.append({'hotel_name': hotel_name, 'hotel_id': hotel_id, 'image': img, 'description': description})
|
151 |
+
except (requests.RequestException, UnidentifiedImageError):
|
152 |
+
print(f"Skipping image at URL: {image_url}")
|
153 |
+
|
154 |
+
return pd.DataFrame(description_data)
|
155 |
+
|
156 |
+
def show_hotels(place=None):
|
157 |
+
description_df = search_hotel(place)
|
158 |
+
if description_df.empty:
|
159 |
+
print("No hotels found.")
|
160 |
+
return
|
161 |
+
num_images = len(description_df)
|
162 |
+
num_rows = (num_images + 1) // 2
|
163 |
+
|
164 |
+
fig, axs = plt.subplots(num_rows * 2, 2, figsize=(20, 10 * num_rows))
|
165 |
+
|
166 |
+
current_index = 0
|
167 |
+
for _, row in description_df.iterrows():
|
168 |
+
img = row['image']
|
169 |
+
description = row['description']
|
170 |
+
|
171 |
+
if img is None:
|
172 |
+
continue
|
173 |
+
|
174 |
+
row_idx = (current_index // 2) * 2
|
175 |
+
col_idx = current_index % 2
|
176 |
+
|
177 |
+
axs[row_idx, col_idx].imshow(img)
|
178 |
+
axs[row_idx, col_idx].axis('off')
|
179 |
+
axs[row_idx, col_idx].set_title(f"{row['hotel_name']}\nHotel ID: {row['hotel_id']} Image {current_index + 1}", fontsize=16)
|
180 |
+
|
181 |
+
wrapped_description = "\n".join(textwrap.wrap(description, width=50))
|
182 |
+
axs[row_idx + 1, col_idx].text(0.5, 0.5, wrapped_description, ha='center', va='center', wrap=True, fontsize=14)
|
183 |
+
axs[row_idx + 1, col_idx].axis('off')
|
184 |
+
|
185 |
+
current_index += 1
|
186 |
+
|
187 |
+
plt.tight_layout()
|
188 |
+
plt.show()
|
189 |
+
|
190 |
+
def grouped_description(description_df):
|
191 |
+
grouped_descriptions = description_df.groupby('hotel_id')['description'].apply(lambda x: ' '.join(x.astype(str))).reset_index()
|
192 |
+
result_df = pd.merge(grouped_descriptions, description_df[['hotel_id', 'hotel_name']], on='hotel_id', how='left')
|
193 |
+
result_df = result_df.drop_duplicates(subset='hotel_id', keep='first')
|
194 |
+
result_df = result_df[['hotel_name', 'hotel_id', 'description']]
|
195 |
+
return result_df
|
196 |
+
|
197 |
+
def create_prompt_result(result_df):
|
198 |
+
prompt = ""
|
199 |
+
for _, row in result_df.iterrows():
|
200 |
+
hotel_name = row['hotel_name']
|
201 |
+
hotel_id = row['hotel_id']
|
202 |
+
description = row['description']
|
203 |
+
prompt += f"Hotel Name: {hotel_name}\nHotel ID: {hotel_id}\nDescription: {description}\n\n"
|
204 |
+
return prompt
|
205 |
+
|
206 |
+
def build_prompt(context_result):
|
207 |
+
hotel_recommendation_template = """
|
208 |
+
<s>[INST] <<SYS>>
|
209 |
+
You are a helpful and informative chatbot assistant.
|
210 |
+
<</SYS>>
|
211 |
+
Based on the following hotel descriptions, recommend the best hotel:
|
212 |
+
{context_result}
|
213 |
+
[/INST]
|
214 |
+
"""
|
215 |
+
return hotel_recommendation_template.format(context_result=context_result)
|
216 |
+
@spaces.GPU
|
217 |
+
# Define the respond function
|
218 |
+
def generate_text_response(prompt):
|
219 |
+
outputs = pipe_text(prompt, max_new_tokens=500)
|
220 |
+
response = outputs[0]['generated_text'].split("[/INST]")[-1].strip()
|
221 |
+
return response
|
222 |
+
|
223 |
+
|
224 |
+
|
225 |
+
|
226 |
+
|
227 |
+
|
228 |
+
|
229 |
def multimodal_results(description_df):
|
230 |
conversation = []
|
231 |
for _, row in description_df.iterrows():
|
|
|
253 |
conversation = [[{"text": "Based on your search...", "files": []}, {"text": f"**My recommendation:** {result}", "files": []}]]
|
254 |
return conversation
|
255 |
|
256 |
+
@spaces.GPU
|
257 |
def chatbot_response(user_input, conversation):
|
258 |
bot_initial_message = {
|
259 |
"text": f"Looking for hotels in {user_input}...",
|
backend.py
DELETED
@@ -1,219 +0,0 @@
|
|
1 |
-
import os
|
2 |
-
import pandas as pd
|
3 |
-
import requests
|
4 |
-
from PIL import Image, UnidentifiedImageError
|
5 |
-
from io import BytesIO
|
6 |
-
import matplotlib.pyplot as plt
|
7 |
-
import urllib3
|
8 |
-
from transformers import pipeline
|
9 |
-
from transformers import BitsAndBytesConfig
|
10 |
-
import torch
|
11 |
-
import textwrap
|
12 |
-
import pandas as pd
|
13 |
-
import numpy as np
|
14 |
-
from haversine import haversine # Install haversine library: pip install haversine
|
15 |
-
from transformers import AutoProcessor, LlavaForConditionalGeneration
|
16 |
-
from transformers import BitsAndBytesConfig
|
17 |
-
import torch
|
18 |
-
from huggingface_hub import InferenceClient
|
19 |
-
|
20 |
-
IS_SPACES_ZERO = os.environ.get("SPACES_ZERO_GPU", "0") == "1"
|
21 |
-
IS_SPACE = os.environ.get("SPACE_ID", None) is not None
|
22 |
-
|
23 |
-
# Constants
|
24 |
-
DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
|
25 |
-
LOW_MEMORY = os.getenv("LOW_MEMORY", "0") == "1"
|
26 |
-
MODEL_ID = "llava-hf/llava-1.5-7b-hf"
|
27 |
-
TEXT_MODEL_ID = "mistralai/Mistral-7B-Instruct-v0.2"
|
28 |
-
|
29 |
-
# Print device and memory info
|
30 |
-
print(f"Using device: {DEVICE}")
|
31 |
-
print(f"Low memory: {LOW_MEMORY}")
|
32 |
-
|
33 |
-
# Quantization configuration for efficient model loading
|
34 |
-
quantization_config = BitsAndBytesConfig(
|
35 |
-
load_in_4bit=True,
|
36 |
-
bnb_4bit_compute_dtype=torch.float16
|
37 |
-
)
|
38 |
-
|
39 |
-
# Load models only once
|
40 |
-
processor = AutoProcessor.from_pretrained(MODEL_ID)
|
41 |
-
model = LlavaForConditionalGeneration.from_pretrained(MODEL_ID, quantization_config=quantization_config, device_map="auto").to(DEVICE)
|
42 |
-
pipe_image_to_text = pipeline("image-to-text", model=model, model_kwargs={"quantization_config": quantization_config})
|
43 |
-
|
44 |
-
# Initialize the text generation pipeline
|
45 |
-
pipe_text = pipeline("text-generation", model=TEXT_MODEL_ID, model_kwargs={"quantization_config": quantization_config})
|
46 |
-
|
47 |
-
# Ensure data files are available
|
48 |
-
current_directory = os.getcwd()
|
49 |
-
geocoded_hotels_path = os.path.join(current_directory, 'geocoded_hotels.csv')
|
50 |
-
csv_file_path = os.path.join(current_directory, 'hotel_multimodal.csv')
|
51 |
-
|
52 |
-
# Load geocoded hotels data
|
53 |
-
if not os.path.isfile(geocoded_hotels_path):
|
54 |
-
url = 'https://github.com/ruslanmv/watsonx-with-multimodal-llava/raw/master/geocoded_hotels.csv'
|
55 |
-
response = requests.get(url)
|
56 |
-
if response.status_code == 200:
|
57 |
-
with open(geocoded_hotels_path, 'wb') as f:
|
58 |
-
f.write(response.content)
|
59 |
-
print(f"File {geocoded_hotels_path} downloaded successfully!")
|
60 |
-
else:
|
61 |
-
print(f"Error downloading file. Status code: {response.status_code}")
|
62 |
-
else:
|
63 |
-
print(f"File {geocoded_hotels_path} already exists.")
|
64 |
-
geocoded_hotels = pd.read_csv(geocoded_hotels_path)
|
65 |
-
|
66 |
-
# Load hotel dataset
|
67 |
-
if not os.path.exists(csv_file_path):
|
68 |
-
dataset = load_dataset("ruslanmv/hotel-multimodal")
|
69 |
-
df_hotels = dataset['train'].to_pandas()
|
70 |
-
df_hotels.to_csv(csv_file_path, index=False)
|
71 |
-
print("Dataset downloaded and saved as CSV.")
|
72 |
-
else:
|
73 |
-
df_hotels = pd.read_csv(csv_file_path)
|
74 |
-
|
75 |
-
def get_current_location():
|
76 |
-
try:
|
77 |
-
response = requests.get('https://ipinfo.io/json')
|
78 |
-
data = response.json()
|
79 |
-
location = data.get('loc', '')
|
80 |
-
if location:
|
81 |
-
return map(float, location.split(','))
|
82 |
-
else:
|
83 |
-
return None, None
|
84 |
-
except Exception as e:
|
85 |
-
print(f"An error occurred: {e}")
|
86 |
-
return None, None
|
87 |
-
|
88 |
-
def get_coordinates(location_name):
|
89 |
-
geolocator = Nominatim(user_agent="coordinate_finder")
|
90 |
-
location = geolocator.geocode(location_name)
|
91 |
-
if location:
|
92 |
-
return location.latitude, location.longitude
|
93 |
-
else:
|
94 |
-
return None
|
95 |
-
|
96 |
-
def find_nearby(place=None):
|
97 |
-
if place:
|
98 |
-
coordinates = get_coordinates(place)
|
99 |
-
if coordinates:
|
100 |
-
latitude, longitude = coordinates
|
101 |
-
print(f"The coordinates of {place} are: Latitude: {latitude}, Longitude: {longitude}")
|
102 |
-
else:
|
103 |
-
print(f"Location not found: {place}")
|
104 |
-
return None
|
105 |
-
else:
|
106 |
-
latitude, longitude = get_current_location()
|
107 |
-
if not latitude or not longitude:
|
108 |
-
print("Could not retrieve the current location.")
|
109 |
-
return None
|
110 |
-
|
111 |
-
geocoded_hotels['distance_km'] = geocoded_hotels.apply(
|
112 |
-
lambda row: haversine((latitude, longitude), (row['latitude'], row['longitude'])),
|
113 |
-
axis=1
|
114 |
-
)
|
115 |
-
|
116 |
-
closest_hotels = geocoded_hotels.sort_values(by='distance_km').head(5)
|
117 |
-
print("The 5 closest locations are:\n")
|
118 |
-
print(closest_hotels)
|
119 |
-
return closest_hotels
|
120 |
-
|
121 |
-
@spaces.GPU
|
122 |
-
# Define the respond function
|
123 |
-
def search_hotel(place=None):
|
124 |
-
df_found = find_nearby(place)
|
125 |
-
if df_found is None:
|
126 |
-
return pd.DataFrame()
|
127 |
-
hotel_ids = df_found["hotel_id"].values.tolist()
|
128 |
-
filtered_df = df_hotels[df_hotels['hotel_id'].isin(hotel_ids)]
|
129 |
-
filtered_df['hotel_id'] = pd.Categorical(filtered_df['hotel_id'], categories=hotel_ids, ordered=True)
|
130 |
-
filtered_df = filtered_df.sort_values('hotel_id').reset_index(drop=True)
|
131 |
-
grouped_df = filtered_df.groupby('hotel_id', observed=True).head(2)
|
132 |
-
description_data = []
|
133 |
-
|
134 |
-
for index, row in grouped_df.iterrows():
|
135 |
-
hotel_id = row['hotel_id']
|
136 |
-
hotel_name = row['hotel_name']
|
137 |
-
image_url = row['image_url']
|
138 |
-
|
139 |
-
try:
|
140 |
-
response = requests.get(image_url, verify=False)
|
141 |
-
response.raise_for_status()
|
142 |
-
img = Image.open(BytesIO(response.content))
|
143 |
-
|
144 |
-
prompt = "USER: <image>\nAnalyze this image. Give me feedback on whether this hotel is worth visiting based on the picture. Provide a summary review.\nASSISTANT:"
|
145 |
-
outputs = pipe_image_to_text(img, prompt=prompt, generate_kwargs={"max_new_tokens": 200})
|
146 |
-
description = outputs[0]["generated_text"].split("\nASSISTANT:")[-1].strip()
|
147 |
-
|
148 |
-
description_data.append({'hotel_name': hotel_name, 'hotel_id': hotel_id, 'image': img, 'description': description})
|
149 |
-
except (requests.RequestException, UnidentifiedImageError):
|
150 |
-
print(f"Skipping image at URL: {image_url}")
|
151 |
-
|
152 |
-
return pd.DataFrame(description_data)
|
153 |
-
|
154 |
-
def show_hotels(place=None):
|
155 |
-
description_df = search_hotel(place)
|
156 |
-
if description_df.empty:
|
157 |
-
print("No hotels found.")
|
158 |
-
return
|
159 |
-
num_images = len(description_df)
|
160 |
-
num_rows = (num_images + 1) // 2
|
161 |
-
|
162 |
-
fig, axs = plt.subplots(num_rows * 2, 2, figsize=(20, 10 * num_rows))
|
163 |
-
|
164 |
-
current_index = 0
|
165 |
-
for _, row in description_df.iterrows():
|
166 |
-
img = row['image']
|
167 |
-
description = row['description']
|
168 |
-
|
169 |
-
if img is None:
|
170 |
-
continue
|
171 |
-
|
172 |
-
row_idx = (current_index // 2) * 2
|
173 |
-
col_idx = current_index % 2
|
174 |
-
|
175 |
-
axs[row_idx, col_idx].imshow(img)
|
176 |
-
axs[row_idx, col_idx].axis('off')
|
177 |
-
axs[row_idx, col_idx].set_title(f"{row['hotel_name']}\nHotel ID: {row['hotel_id']} Image {current_index + 1}", fontsize=16)
|
178 |
-
|
179 |
-
wrapped_description = "\n".join(textwrap.wrap(description, width=50))
|
180 |
-
axs[row_idx + 1, col_idx].text(0.5, 0.5, wrapped_description, ha='center', va='center', wrap=True, fontsize=14)
|
181 |
-
axs[row_idx + 1, col_idx].axis('off')
|
182 |
-
|
183 |
-
current_index += 1
|
184 |
-
|
185 |
-
plt.tight_layout()
|
186 |
-
plt.show()
|
187 |
-
|
188 |
-
def grouped_description(description_df):
|
189 |
-
grouped_descriptions = description_df.groupby('hotel_id')['description'].apply(lambda x: ' '.join(x.astype(str))).reset_index()
|
190 |
-
result_df = pd.merge(grouped_descriptions, description_df[['hotel_id', 'hotel_name']], on='hotel_id', how='left')
|
191 |
-
result_df = result_df.drop_duplicates(subset='hotel_id', keep='first')
|
192 |
-
result_df = result_df[['hotel_name', 'hotel_id', 'description']]
|
193 |
-
return result_df
|
194 |
-
|
195 |
-
def create_prompt_result(result_df):
|
196 |
-
prompt = ""
|
197 |
-
for _, row in result_df.iterrows():
|
198 |
-
hotel_name = row['hotel_name']
|
199 |
-
hotel_id = row['hotel_id']
|
200 |
-
description = row['description']
|
201 |
-
prompt += f"Hotel Name: {hotel_name}\nHotel ID: {hotel_id}\nDescription: {description}\n\n"
|
202 |
-
return prompt
|
203 |
-
|
204 |
-
def build_prompt(context_result):
|
205 |
-
hotel_recommendation_template = """
|
206 |
-
<s>[INST] <<SYS>>
|
207 |
-
You are a helpful and informative chatbot assistant.
|
208 |
-
<</SYS>>
|
209 |
-
Based on the following hotel descriptions, recommend the best hotel:
|
210 |
-
{context_result}
|
211 |
-
[/INST]
|
212 |
-
"""
|
213 |
-
return hotel_recommendation_template.format(context_result=context_result)
|
214 |
-
@spaces.GPU
|
215 |
-
# Define the respond function
|
216 |
-
def generate_text_response(prompt):
|
217 |
-
outputs = pipe_text(prompt, max_new_tokens=500)
|
218 |
-
response = outputs[0]['generated_text'].split("[/INST]")[-1].strip()
|
219 |
-
return response
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|