File size: 2,713 Bytes
39dff4c
 
 
99914ec
4a0366f
c352f02
a147fbd
ed0aa7b
4a0366f
24ed9e0
c6ddc86
bb31795
fd6e173
 
 
 
bb31795
 
 
 
 
 
 
 
4a0366f
 
 
bb31795
 
4a0366f
 
bb31795
 
 
 
 
 
 
 
 
 
 
 
 
 
4a0366f
af86876
 
 
bb31795
 
 
 
 
 
 
 
 
 
 
 
 
af86876
 
 
 
 
c6ddc86
bb31795
39dff4c
bb31795
 
fd6e173
bb31795
fd6e173
9dd7ad1
bb31795
ea7a2b9
fd6e173
ca860d3
39dff4c
fd6e173
da239a9
7afe812
9ed9d81
af86876
cf7f506
bb31795
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
import gradio as gr
import requests
import json
from decouple import Config

config = Config('.env')

def query_vectara(question):
    # Get the user's message from the chat history
    user_message = question

    # Query Vectara API
    customer_id = config('CUSTOMER_ID')  # Read from .env file
    corpus_id = config('CORPUS_ID')  # Read from .env file
    api_key = config('API_KEY')  # Read from .env file

    query_url = "https://api.vectara.io/v1/query/v1/query"

    headers = {
        "Content-Type": "application/json",
        "Authorization": f"Bearer {api_key}",
        "customer-id": customer_id,
    }

    query_body = {
        "query": [
            {
                "query": user_message,
                "queryContext": "",
                "start": 0,
                "numResults": 10,
                "contextConfig": {
                    "charsBefore": 0,
                    "charsAfter": 0,
                    "sentencesBefore": 2,
                    "sentencesAfter": 2,
                    "startTag": "%START_SNIPPET%",
                    "endTag": "%END_SNIPPET%",
                },
                "rerankingConfig": {
                    "rerankerId": 272725718,
                    "mmrConfig": {
                        "diversityBias": 0.3
                    }
                },
                "corpusKey": [
                    {
                        "customerId": customer_id,
                        "corpusId": corpus_id,
                        "semantics": 0,
                        "metadataFilter": "",
                        "lexicalInterpolationConfig": {
                            "lambda": 0
                        },
                        "dim": []
                    }
                ],
                "summary": [
                    {
                        "maxSummarizedResults": 5,
                        "responseLang": "eng",
                        "summarizerPromptName": "vectara-summary-ext-v1.2.0"
                    }
                ]
            }
        ]
    }

    query_response = requests.post(query_url, json=query_body, headers=headers)

    if query_response.status_code == 200:
        query_data = query_response.json()
        response_message = f"Response from Vectara API: {json.dumps(query_data, indent=2)}"
    else:
        response_message = f"Error: {query_response.status_code}"

    return response_message

# Create a Gradio ChatInterface with only a text input
iface = gr.Interface(
    fn=query_vectara,
    inputs=[gr.Textbox(label="Input Text")],
    outputs=gr.Textbox(label="Output Text"),
    title="Vectara Chatbot",
    description="Ask me anything using the Vectara API!"
)

iface.launch()