File size: 3,509 Bytes
0b607fb
be913ab
 
 
2594602
 
 
 
 
28ed44f
2594602
be913ab
 
 
 
 
0b607fb
 
 
 
be913ab
0b607fb
 
be913ab
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2594602
be913ab
2594602
be913ab
 
 
 
0b607fb
2594602
be913ab
 
 
 
 
 
 
 
 
 
 
 
0b607fb
 
2594602
be913ab
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
import gradio as gr
from huggingface_hub import InferenceApi
from duckduckgo_search import DDGS
import requests
import json
from typing import List
from pydantic import BaseModel, Field

# Global variables
huggingface_token = os.environ.get("HUGGINGFACE_TOKEN")

# Function to perform a DuckDuckGo search
def duckduckgo_search(query):
    with DDGS() as ddgs:
        results = ddgs.text(query, max_results=5)
    return results

class CitingSources(BaseModel):
    sources: List[str] = Field(
        ...,
        description="List of sources to cite. Should be an URL of the source."
    )

def get_response_with_search(query):
    # Perform the web search
    search_results = duckduckgo_search(query)
    
    # Use the search results as context for the model
    context = "\n".join(f"{result['title']}\n{result['body']}\nSource: {result['href']}\n" 
                        for result in search_results if 'body' in result)
    
    # Prompt formatted for Mistral-7B-Instruct
    prompt = f"""<s>[INST] Using the following context:
{context}
Write a detailed and complete research document that fulfills the following user request: '{query}'
After writing the document, please provide a list of sources used in your response. [/INST]"""
    
    # API endpoint for Mistral-7B-Instruct-v0.3
    API_URL = "https://api-inference.huggingface.co/models/mistralai/Mistral-7B-Instruct-v0.3"
    
    # Headers
    headers = {"Authorization": f"Bearer {huggingface_token}"}
    
    # Payload
    payload = {
        "inputs": prompt,
        "parameters": {
            "max_new_tokens": 1000,
            "temperature": 0.7,
            "top_p": 0.95,
            "top_k": 40,
            "repetition_penalty": 1.1
        }
    }
    
    # Make the API call
    response = requests.post(API_URL, headers=headers, json=payload)
    
    if response.status_code == 200:
        result = response.json()
        if isinstance(result, list) and len(result) > 0:
            generated_text = result[0].get('generated_text', 'No text generated')
            
            # Remove the instruction part
            content_start = generated_text.find("[/INST]")
            if content_start != -1:
                generated_text = generated_text[content_start + 7:].strip()
            
            # Split the response into main content and sources
            parts = generated_text.split("Sources:", 1)
            main_content = parts[0].strip()
            sources = parts[1].strip() if len(parts) > 1 else ""
            
            return main_content, sources
        else:
            return f"Unexpected response format: {result}", ""
    else:
        return f"Error: API returned status code {response.status_code}", ""

def gradio_interface(query):
    main_content, sources = get_response_with_search(query)
    formatted_response = f"{main_content}\n\nSources:\n{sources}"
    return formatted_response

# Gradio interface
iface = gr.Interface(
    fn=gradio_interface,
    inputs=gr.Textbox(lines=2, placeholder="Enter your question here..."),
    outputs="text",
    title="AI-powered Web Search Assistant",
    description="Ask a question, and I'll search the web and provide an answer using the Mistral-7B-Instruct model.",
    examples=[
        ["Latest news about Yann LeCun"],
        ["Latest news site:github.blog"],
        ["Where I can find best hotel in Galapagos, Ecuador intitle:hotel"],
        ["filetype:pdf intitle:python"]
    ]
)

if __name__ == "__main__":
    iface.launch()