Car / app.py
bezaime's picture
Update app.py
20d2574 verified
raw
history blame
4.69 kB
# Read the data
import pandas as pd
df = pd.read_csv('./Automobile_data.csv')
#df = df.drop(columns = ['normalized-losses','symboling'], axis = 1)
context_data = []
for i in range(len(df)): # Loop over rows
context = ""
for j in range(26): # Loop over the first 8 columns
context += df.columns[j] # Add column name
context += ": "
context += str(df.iloc[i][j]) # Convert value to string
context += " "
context_data.append(context)
import os
# Get the secret key from the environment
groq_key = os.environ.get('groq_API_Keys')
## LLM used for RAG
from langchain_groq import ChatGroq
llm = ChatGroq(model="llama-3.1-70b-versatile",api_key=groq_key)
## Embedding model!
from langchain_huggingface import HuggingFaceEmbeddings
embed_model = HuggingFaceEmbeddings(model_name="mixedbread-ai/mxbai-embed-large-v1")
# create vector store!
from langchain_chroma import Chroma
vectorstore = Chroma(
collection_name="car_dataset_store",
embedding_function=embed_model,
persist_directory="./",
)
# add data to vector nstore
vectorstore.add_texts(context_data)
retriever = vectorstore.as_retriever()
from langchain_core.prompts import PromptTemplate
template = ("""You are a car expert.
Use the provided context to answer the question.
If you don't know the answer, say so. Explain your answer in detail.
Do not discuss the context in your response; just provide the answer directly.
Context: {context}
Question: {question}
Answer:""")
rag_prompt = PromptTemplate.from_template(template)
from langchain_core.output_parsers import StrOutputParser
from langchain_core.runnables import RunnablePassthrough
rag_chain = (
{"context": retriever, "question": RunnablePassthrough()}
| rag_prompt
| llm
| StrOutputParser()
)
import gradio as gr
# Function to process streaming responses
def rag_memory_stream(message, history):
partial_text = ""
for new_text in rag_chain.stream(message):
partial_text += new_text
yield partial_text
# Examples and app information
examples = ['I need a car', 'What is the make and fuel type of a car?']
description = "An advanced chatbot that helps you choose the right car based on your preferences and budget."
title = "Car Expert :) Let Me Help You Find the Perfect Ride!"
# Custom theme with advanced styling
custom_theme = gr.themes.Base(primary_hue="blue", secondary_hue="green").set(
body_background_fill="#000000", # Black background
body_text_color="#FFFFFF", # White text for contrast
)
# Additional UI Components
with gr.Blocks(theme=custom_theme) as demo:
gr.Markdown(f"# {title}")
gr.Markdown(description)
with gr.Tabs():
with gr.Tab("Chat"):
chat_interface = gr.ChatInterface(
fn=rag_memory_stream,
type="messages",
examples=examples,
fill_height=True,
)
with gr.Tab("Car Preferences"):
gr.Markdown("### Provide your preferences to get tailored advice:")
make = gr.Dropdown(
choices=["Toyota", "Honda", "BMW", "Tesla", "Ford"],
label="Preferred Make",
info="Choose the car manufacturer you prefer.",
)
budget = gr.Slider(
minimum=5000, maximum=100000, step=500,
label="Budget (in USD)",
info="Select your budget range.",
)
fuel_type = gr.Radio(
choices=["Gasoline", "Diesel", "Electric", "Hybrid"],
label="Fuel Type",
info="Choose the type of fuel you prefer.",
)
submit_button = gr.Button("Submit Preferences")
with gr.Tab("Upload Documents"):
gr.Markdown("### Upload any related documents for personalized suggestions:")
file_upload = gr.File(label="Upload Car Listings or Preferences")
with gr.Tab("Help"):
gr.Markdown("### Need Assistance?")
gr.Markdown(
"""
- Use the **Chat** tab to ask questions about cars.
- Fill in your **Car Preferences** for tailored recommendations.
- Upload files in the **Upload Documents** tab.
- Contact support at: [email protected]
"""
)
gr.Markdown("### About")
gr.Markdown(
"""
This chatbot is powered by LangChain and Groq API for real-time AI interactions.
Designed to provide personalized car-buying assistance!
"""
)
# Launch the app
if __name__ == "__main__":
demo.launch()