File size: 5,685 Bytes
294266d
 
a2f1c6d
413f976
 
 
 
 
 
294266d
413f976
294266d
a2f1c6d
413f976
a2f1c6d
 
 
 
413f976
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a2f1c6d
413f976
 
 
 
229b0f2
413f976
229b0f2
a2f1c6d
413f976
 
 
 
 
294266d
413f976
a2f1c6d
 
 
 
 
294266d
 
413f976
a2f1c6d
413f976
a2f1c6d
413f976
 
 
a2f1c6d
 
294266d
413f976
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
dec32d6
 
 
 
 
 
 
413f976
c06d9dd
229b0f2
c06d9dd
 
413f976
 
c06d9dd
413f976
c06d9dd
413f976
c06d9dd
413f976
c06d9dd
413f976
c06d9dd
413f976
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c06d9dd
413f976
 
 
 
 
c06d9dd
413f976
 
294266d
413f976
294266d
229b0f2
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
import os
import requests
import gradio as gr
import pandas as pd
import tempfile
from fpdf import FPDF
from io import BytesIO
import nglview as nv
import ipywidgets

# API setup
groq_api_key = os.getenv("GROQ_API_KEY")
if not groq_api_key:
    raise ValueError("GROQ_API_KEY is missing!")

url = "https://api.groq.com/openai/v1/chat/completions"
headers = {"Authorization": f"Bearer {groq_api_key}"}

# Globals to hold table for export
comparison_table = None

# Function to extract markdown table and convert to dataframe
def extract_table(md):
    global comparison_table
    lines = [line.strip() for line in md.splitlines() if "|" in line and "---" not in line]
    headers = [x.strip() for x in lines[0].split("|")[1:-1]]
    data = []
    for row in lines[1:]:
        values = [x.strip() for x in row.split("|")[1:-1]]
        data.append(values)
    df = pd.DataFrame(data, columns=headers)
    comparison_table = df
    return df

# Main chatbot function
def chat_with_groq(user_input):
    keywords = ["material", "materials", "alloy", "composite", "polymer", "ceramic",
                "application", "mechanical", "thermal", "corrosion", "creep", "fatigue",
                "strength", "tensile", "impact", "fracture", "modulus", "AI", "ML", "machine learning"]

    if not any(word in user_input.lower() for word in keywords):
        return "⚠️ I am an expert in Materials Science. Ask me anything about it and I’ll try my best. For other topics, try ChatGPT! 🙂"

    system_prompt = (
        "You are a materials science expert. When a user asks about materials for an application, provide:\n"
        "1. Required properties.\n"
        "2. A markdown table comparing the top 3 materials (rows: properties, columns: materials).\n"
        "3. A short summary of use cases.\n"
        "Only reply with markdown content."
    )

    body = {
        "model": "llama-3.1-8b-instant",
        "messages": [
            {"role": "system", "content": system_prompt},
            {"role": "user", "content": user_input}
        ]
    }

    response = requests.post(url, headers=headers, json=body)

    if response.status_code == 200:
        content = response.json()['choices'][0]['message']['content']
        extract_table(content)
        return content
    else:
        return f"Error: {response.json()}"

# File export functions
def download_csv():
    if comparison_table is not None:
        return comparison_table.to_csv(index=False).encode('utf-8')
    return None

def download_pdf():
    if comparison_table is None:
        return None
    pdf = FPDF()
    pdf.add_page()
    pdf.set_font("Arial", size=10)
    col_width = pdf.w / (len(comparison_table.columns) + 1)
    row_height = 8
    for col in comparison_table.columns:
        pdf.cell(col_width, row_height, col, border=1)
    pdf.ln()
    for i in range(len(comparison_table)):
        for item in comparison_table.iloc[i]:
            pdf.cell(col_width, row_height, str(item), border=1)
        pdf.ln()
    output = BytesIO()
    pdf.output(output)
    output.seek(0)
    return output.read()

# Build UI
with gr.Blocks(title="Materials Science Chatbot", css="""
    #orange-btn {
        background-color: #f97316 !important;
        color: white !important;
        border: none;
        font-weight: bold;
    }
""") as demo:
    gr.Markdown("## 🧪 Materials Science Expert\nAsk about materials for any application or property requirements.")

    with gr.Row():
        with gr.Column(scale=3):
            user_input = gr.Textbox(
                label="Ask your question",
                placeholder="e.g. Best materials for heat shields...",
                lines=2,
                elem_id="question_box"
            )
            gr.Markdown("💡 *Hit Enter to submit your query*")
        with gr.Column(scale=1, min_width=100):
            submit_btn = gr.Button("Submit", elem_id="orange-btn")

    # Popular questions section
    gr.Markdown("#### 📌 Popular Materials Science related questions")
    popular_questions = [
        "What are the best corrosion-resistant materials for marine environments (e.g., desalination)?",
        "Which materials are ideal for solar panel coatings and desert heat management?",
        "What materials are used for aerospace structures in extreme climates?",
        "Best high-strength materials for construction in the Gulf region?",
        "What advanced materials are used in electric vehicles and batteries in the UAE?",
        "How can one leverage AI/ML techniques in Materials Science?",
        "I’m a recent high school graduate interested in science. How can I explore Materials Science with AI/ML?"
    ]

    def autofill(question):
        return gr.Textbox.update(value=question)

    with gr.Row():
        for q in popular_questions:
            gr.Button(q, size="sm").click(autofill, inputs=[], outputs=user_input)

    # Output
    output_md = gr.Markdown()

    with gr.Row():
        with gr.Column():
            csv_btn = gr.File(label="Download CSV", visible=False)
            pdf_btn = gr.File(label="Download PDF", visible=False)

    def submit_and_prepare(user_input):
        response = chat_with_groq(user_input)
        csv_data = download_csv()
        pdf_data = download_pdf()
        return response, gr.File.update(value=("materials.csv", csv_data), visible=True), gr.File.update(value=("materials.pdf", pdf_data), visible=True)

    submit_btn.click(submit_and_prepare, inputs=user_input, outputs=[output_md, csv_btn, pdf_btn])
    user_input.submit(submit_and_prepare, inputs=user_input, outputs=[output_md, csv_btn, pdf_btn])

# Launch
if __name__ == "__main__":
    demo.launch()