File size: 7,868 Bytes
1e3f569
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ef53845
1e3f569
 
 
eaeb469
1e3f569
 
 
 
 
 
 
 
 
 
 
 
 
eaeb469
 
 
 
1e3f569
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ef53845
1e3f569
 
 
 
 
 
 
 
e4fb671
1e3f569
 
 
 
f667674
1e3f569
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
import os
import gradio as gr
import sqlparse
import requests
from time import sleep

def format(text):
    # Split the text by "|", and get the last element in the list which should be the final query
    try:
        final_query = text.split("|")[1].strip()
    except Exception:
        final_query = text

    try:
        # Attempt to format SQL query using sqlparse
        formatted_query = sqlparse.format(final_query, reindent=True, keyword_case='upper')
    except Exception:
        # If formatting fails, use the original, unformatted query
        formatted_query = final_query

    # Convert SQL to markdown (not required, but just to show how to use the markdown module)
    final_query_markdown = f"{formatted_query}"

    return final_query_markdown


def bot(input_message: str, db_info="", temperature=0.3, top_p=0.9, top_k=0, repetition_penalty=1.08):
    # Format the user's input message
    messages = f"Below is an instruction that describes a task, paired with an input that provides further context. Write a response that appropriately completes the request.\n\n### Instruction:\n\nConvert text to sql: {input_message} {db_info}\n\n### Response:\n\n"

    url = "https://e9f4be879d38-8269039109365193683.ngrok-free.app/api/v1/generate"
    payload = {
        "prompt": messages,
        "temperature": temperature,
        "top_p": top_p,
        "top_k": top_k,
        "top_a": 0,
        "n": 1,
        "max_context_length": 2048,
        "max_length": 512,
        "rep_pen": repetition_penalty,
        "sampler_order": [6,0,1,3,4,2,5],
        "stop_sequence": ["###", "Result"],
    }
    headers = {
        "Content-Type": "application/json",
        "ngrok-skip-browser-warning": "1"  # added this line
    }

    for _ in range(3):
        try:
            response = requests.post(url, json=payload, headers=headers)
            response_text = response.json()["results"][0]["text"]
            response_text = response_text.replace("\n", "").replace("\t", " ")
            if response_text and response_text[-1] == ".":
                response_text = response_text[:-1]

            return format(response_text)
            
        except Exception as e:
            print(f'Error occurred: {str(e)}')
            print('Waiting for 10 seconds before retrying...')
            sleep(10)

with gr.Blocks(theme='gradio/soft') as demo:
    header = gr.HTML("""
        <h1 style="text-align: center">SQL Skeleton WizardCoder Demo</h1>
        <h3 style="text-align: center">πŸ§™β€β™‚οΈ Generate SQL queries from Natural Language πŸ§™β€β™‚οΈ</h3>
    """)

    output_box = gr.Code(label="Generated SQL", lines=2, interactive=True)
    input_text = gr.Textbox(lines=3, placeholder='Write your question here...', label='NL Input')
    db_info = gr.Textbox(lines=4, placeholder='Example: | table_01 : column_01 , column_02 | table_02 : column_01 , column_02 | ...', label='Database Info')

    with gr.Accordion("Hyperparameters", open=False):
        temperature = gr.Slider(label="Temperature", minimum=0.0, maximum=1.0, value=0.3, step=0.1)
        top_p = gr.Slider(label="Top-p (nucleus sampling)", minimum=0.0, maximum=1.0, value=0.9, step=0.01)
        top_k = gr.Slider(label="Top-k", minimum=0, maximum=200, value=0, step=1)
        repetition_penalty = gr.Slider(label="Repetition Penalty", minimum=1.0, maximum=2.0, value=1.08, step=0.01)
        
    run_button = gr.Button("Generate SQL", variant="primary")
    
    with gr.Accordion("Examples", open=True):
        examples = gr.Examples([
            ["What is the average, minimum, and maximum age of all singers from France?", "| stadium : stadium_id , location , name , capacity , highest , lowest , average | singer : singer_id , name , country , song_name , song_release_year , age , is_male | concert : concert_id , concert_name , theme , stadium_id , year | singer_in_concert : concert_id , singer_id | concert.stadium_id = stadium.stadium_id | singer_in_concert.singer_id = singer.singer_id | singer_in_concert.concert_id = concert.concert_id |"],
            ["Show location and name for all stadiums with a capacity between 5000 and 10000.", "| stadium : stadium_id , location , name , capacity , highest , lowest , average | singer : singer_id , name , country , song_name , song_release_year , age , is_male | concert : concert_id , concert_name , theme , stadium_id , year | singer_in_concert : concert_id , singer_id | concert.stadium_id = stadium.stadium_id | singer_in_concert.singer_id = singer.singer_id | singer_in_concert.concert_id = concert.concert_id |"],
            ["What are the number of concerts that occurred in the stadium with the largest capacity ?", "| stadium : stadium_id , location , name , capacity , highest , lowest , average | singer : singer_id , name , country , song_name , song_release_year , age , is_male | concert : concert_id , concert_name , theme , stadium_id , year | singer_in_concert : concert_id , singer_id | concert.stadium_id = stadium.stadium_id | singer_in_concert.singer_id = singer.singer_id | singer_in_concert.concert_id = concert.concert_id |"],
            ["How many male singers performed in concerts in the year 2023?", "| stadium : stadium_id , location , name , capacity , highest , lowest , average | singer : singer_id , name , country , song_name , song_release_year , age , is_male | concert : concert_id , concert_name , theme , stadium_id , year | singer_in_concert : concert_id , singer_id | concert.stadium_id = stadium.stadium_id | singer_in_concert.singer_id = singer.singer_id | singer_in_concert.concert_id = concert.concert_id |"],
            ["List the names of all singers who performed in a concert with the theme 'Rock'", "| stadium : stadium_id , location , name , capacity , highest , lowest , average | singer : singer_id , name , country , song_name , song_release_year , age , is_male | concert : concert_id , concert_name , theme , stadium_id , year | singer_in_concert : concert_id , singer_id | concert.stadium_id = stadium.stadium_id | singer_in_concert.singer_id = singer.singer_id | singer_in_concert.concert_id = concert.concert_id |"]
        ], inputs=[input_text, db_info, temperature, top_p, top_k, repetition_penalty], fn=bot, cache_examples=True, outputs=output_box)

    quantized_model = "richardr1126/spider-skeleton-wizard-coder-ggml"
    merged_model = "richardr1126/spider-skeleton-wizard-coder-merged"
    initial_model = "WizardLM/WizardCoder-15B-V1.0"
    lora_model = "richardr1126/spider-skeleton-wizard-coder-qlora"
    dataset = "richardr1126/spider-skeleton-context-instruct"
    
    footer = gr.HTML(f"""
        <p>πŸ› οΈ If you want you can <strong>duplicate this Space</strong>, then change the HF_MODEL_REPO spaces env varaible to use any GGML model.</p>
        <p>🌐 Leveraging the <a href='https://huggingface.co/{quantized_model}'><strong>4-bit GGML version</strong></a> of <a href='https://huggingface.co/{merged_model}'><strong>{merged_model}</strong></a> model.</p>
        <p>πŸ”— How it's made: <a href='https://huggingface.co/{initial_model}'><strong>{initial_model}</strong></a> was finetuned to create <a href='https://huggingface.co/{lora_model}'><strong>{lora_model}</strong></a>, then merged together to create <a href='https://huggingface.co/{merged_model}'><strong>{merged_model}</strong></a>.</p>
        <p>πŸ“‰ Fine-tuning was performed using QLoRA techniques on the <a href='https://huggingface.co/datasets/{dataset}'><strong>{dataset}</strong></a> dataset. You can view training metrics on the <a href='https://huggingface.co/{lora_model}'><strong>QLoRa adapter HF Repo</strong></a>.</p>
    """)


    run_button.click(fn=bot, inputs=[input_text, db_info, temperature, top_p, top_k, repetition_penalty], outputs=output_box, api_name="txt2sql")

demo.queue(concurrency_count=1, max_size=10).launch(debug=True)