File size: 5,898 Bytes
07037b8 51782e8 086f34e 90c6fe9 07037b8 feba6f5 90c6fe9 feba6f5 07037b8 4d09653 619c894 4d09653 07037b8 4d09653 07037b8 5be2eda 07037b8 5be2eda 07037b8 ed4e4a3 015e894 07037b8 338a544 c7a20f3 7eb9f14 338a544 7eb9f14 4a18196 1a3195c 20d2574 e99d706 1a3195c 5b88659 e99d706 5b88659 e99d706 07037b8 7eb9f14 20d2574 4a18196 20d2574 bf16a97 7eb9f14 20d2574 7eb9f14 20d2574 7eb9f14 20d2574 7eb9f14 015e894 1a3195c 015e894 338a544 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 |
# Read the data
import pandas as pd
df = pd.read_csv('./Automobile_data.csv')
#df = df.drop(columns = ['normalized-losses','symboling'], axis = 1)
context_data = []
for i in range(len(df)): # Loop over rows
context = ""
for j in range(26): # Loop over the first 8 columns
context += df.columns[j] # Add column name
context += ": "
context += str(df.iloc[i][j]) # Convert value to string
context += " "
context_data.append(context)
import os
# Get the secret key from the environment
groq_key = os.environ.get('groq_API_Keys')
## LLM used for RAG
from langchain_groq import ChatGroq
llm = ChatGroq(model="llama-3.1-70b-versatile",api_key=groq_key)
## Embedding model!
from langchain_huggingface import HuggingFaceEmbeddings
embed_model = HuggingFaceEmbeddings(model_name="mixedbread-ai/mxbai-embed-large-v1")
# create vector store!
from langchain_chroma import Chroma
vectorstore = Chroma(
collection_name="car_dataset_store",
embedding_function=embed_model,
persist_directory="./",
)
# add data to vector nstore
vectorstore.add_texts(context_data)
retriever = vectorstore.as_retriever()
from langchain_core.prompts import PromptTemplate
template = ("""You are a car expert.
Use the provided context to answer the question.
If you don't know the answer, say so. Explain your answer in detail.
Do not discuss the context in your response; just provide the answer directly.
Context: {context}
Question: {question}
Answer:""")
rag_prompt = PromptTemplate.from_template(template)
from langchain_core.output_parsers import StrOutputParser
from langchain_core.runnables import RunnablePassthrough
rag_chain = (
{"context": retriever, "question": RunnablePassthrough()}
| rag_prompt
| llm
| StrOutputParser()
)
import gradio as gr
# Function for the chatbot response stream with error handling
def rag_memory_stream(message, history):
partial_text = ""
try:
for new_text in rag_chain.stream(message):
print(f"Processing: {new_text}") # Debugging output
partial_text += new_text
yield partial_text
except Exception as e:
yield f"An error occurred: {str(e)}"
# Function to process car preferences
def process_preferences(make, budget, fuel_type):
return (
f"You've selected:\n"
f"- **Preferred Make**: {make}\n"
f"- **Budget**: ${budget}\n"
f"- **Fuel Type**: {fuel_type}\n\n"
f"Based on your preferences, I recommend exploring the latest models of {make} "
f"that fit your budget and offer {fuel_type.lower()} options!"
)
# Examples and app information
examples = ['I need a car', 'What is the make and fuel type of a car?']
description = "An advanced chatbot that helps you choose the right car based on your preferences and budget."
title = "Car Expert :) Let Me Help You Find the Perfect Ride!"
# Custom theme with sky-blue background and black text
custom_theme = gr.themes.Base(primary_hue="blue", secondary_hue="green").set(
body_background_fill="#87CEEB", # Sky blue background
body_text_color="#000000", # Black text
)
# Advanced Interface with Car Preferences
with gr.Blocks(theme=custom_theme) as demo:
gr.Markdown(f"# {title}")
gr.Markdown(description)
with gr.Tabs():
# Chat Tab
with gr.Tab("Chat"):
chat_interface = gr.ChatInterface(
fn=rag_memory_stream,
type="messages",
examples=examples,
fill_height=True,
)
# Car Preferences Tab
with gr.Tab("Car Preferences"):
gr.Markdown("### Provide your preferences to get tailored advice:")
make = gr.Dropdown(
choices=["Toyota", "Honda", "BMW", "Tesla", "Ford"],
label="Preferred Make",
info="Choose the car manufacturer you prefer.",
)
budget = gr.Slider(
minimum=5000, maximum=100000, step=500,
label="Budget (in USD)",
info="Select your budget range.",
)
fuel_type = gr.Radio(
choices=["Gasoline", "Diesel", "Electric", "Hybrid"],
label="Fuel Type",
info="Choose the type of fuel you prefer.",
)
submit_button = gr.Button("Submit Preferences")
output = gr.Textbox(
label="Recommendation",
placeholder="Your recommendations will appear here...",
)
# Link the submit button to the processing function
submit_button.click(
process_preferences, # Function to call
inputs=[make, budget, fuel_type], # Inputs from UI
outputs=output, # Where to display the result
)
# Upload Documents Tab
with gr.Tab("Upload Documents"):
gr.Markdown("### Upload any related documents for personalized suggestions:")
file_upload = gr.File(label="Upload Car Listings or Preferences")
# Help Tab
with gr.Tab("Help"):
gr.Markdown("### Need Assistance?")
gr.Markdown(
"""
- Use the **Chat** tab to ask questions about cars.
- Fill in your **Car Preferences** for tailored recommendations.
- Upload files in the **Upload Documents** tab.
- Contact support at: [email protected]
"""
)
gr.Markdown("### About")
gr.Markdown(
"""
This chatbot is powered by LangChain and Groq API for real-time AI interactions.
Designed to provide personalized car-buying assistance!
"""
)
# Launch the app
if __name__ == "__main__":
demo.launch()
|