File size: 3,109 Bytes
a00be78
 
 
033af05
a00be78
 
13e0d1b
 
 
 
033af05
a00be78
13e0d1b
033af05
a00be78
 
033af05
13e0d1b
 
 
 
 
 
 
 
 
 
 
 
 
a00be78
13e0d1b
 
44cb622
13e0d1b
 
a00be78
 
 
 
 
 
 
 
 
44cb622
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a00be78
44cb622
a00be78
 
13e0d1b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a00be78
 
13e0d1b
a00be78
 
 
 
 
13e0d1b
a00be78
 
13e0d1b
 
 
a00be78
 
13e0d1b
 
 
a00be78
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
from gradio_huggingfacehub_search import HuggingfaceHubSearch
from huggingface_hub import HfApi
import pandas as pd
import gradio as gr
import duckdb
import requests
import llama_cpp
import instructor

from pydantic import BaseModel

BASE_DATASETS_SERVER_URL = "https://datasets-server.huggingface.co"
view_name = "dataset_view"

hf_api = HfApi()
conn = duckdb.connect()

llama = llama_cpp.Llama(
    model_path="Hermes-2-Pro-Llama-3-8B-Q8_0.gguf",
    n_gpu_layers=-1,
    chat_format="chatml",
    n_ctx=2048,
    verbose=False,
)

create = instructor.patch(
    create=llama.create_chat_completion_openai_v1,
    mode=instructor.Mode.JSON_SCHEMA,
)


class SQLResponse(BaseModel):
    sql: str


def get_dataset_ddl(dataset_id: str) -> str:
    response = requests.get(f"{BASE_DATASETS_SERVER_URL}/parquet?dataset={dataset_id}")
    response.raise_for_status()  # Check if the request was successful

    first_parquet = response.json().get("parquet_files", [])[0]
    first_parquet_url = first_parquet.get("url")

    if not first_parquet_url:
        raise ValueError("No valid URL found for the first parquet file.")

    conn.execute(
        f"CREATE OR REPLACE VIEW {view_name} as SELECT * FROM read_parquet('{first_parquet_url}');"
    )
    dataset_ddl = conn.execute(f"PRAGMA table_info('{view_name}');").fetchall()

    column_data_types = ",\n\t".join(
        [f"{column[1]} {column[2]}" for column in dataset_ddl]
    )

    sql_ddl = """
CREATE TABLE {} (
    {}
);
    """.format(
        view_name, column_data_types
    )

    return sql_ddl


def generate_sql(dataset_id: str, query: str) -> str:
    ddl = get_dataset_ddl(dataset_id)

    system_prompt = f"""
    You are an expert SQL assistant with access to the following DuckDB Table:
    
    ```sql
    {ddl}
    ```
    
    Please assist the user by writing a SQL query that answers the user's question.
    """

    resp: SQLResponse = create(
        model="Hermes-2-Pro-Llama-3-8B",
        messages=[
            {"role": "system", "content": system_prompt},
            {
                "role": "user",
                "content": query,
            },
        ],
        response_model=SQLResponse,
    )

    return resp.sql


def query_dataset(dataset_id: str, query: str) -> tuple[pd.DataFrame, str]:
    sql_query = generate_sql(dataset_id, query)
    df = conn.execute(sql_query).fetchdf()

    markdown_output = f"""```sql\n{sql_query}```"""
    return df, markdown_output


with gr.Blocks() as demo:
    gr.Markdown("# Query your HF Datasets with Natural Language πŸ“ˆπŸ“Š")
    dataset_id = HuggingfaceHubSearch(
        label="Hub Dataset ID",
        placeholder="Find your favorite dataset...",
        search_type="dataset",
        value="jamescalam/world-cities-geo",
    )
    user_query = gr.Textbox("", label="Ask anything...")

    btn = gr.Button("Ask πŸͺ„")

    df = gr.DataFrame()
    sql_query = gr.Markdown(label="Output SQL Query")

    btn.click(
        query_dataset,
        inputs=[dataset_id, user_query],
        outputs=[df, sql_query],
    )


if __name__ == "__main__":
    demo.launch()