File size: 4,210 Bytes
db6e2f8
 
 
 
 
e1f12d3
974c5fe
 
e1f12d3
974c5fe
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ad263e7
 
49deffe
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ad263e7
2b7084b
974c5fe
 
 
 
 
 
 
 
 
 
 
a2961e7
 
 
 
 
 
 
 
 
 
 
 
 
974c5fe
 
112a06a
974c5fe
6a0c890
974c5fe
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
db6e2f8
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
import os
import requests
import json
from io import BytesIO

from quart import Quart, render_template, request, jsonify
import aiohttp


app = Quart(__name__)

# Use a dictionary to store the history for each session
history = {}

@app.route('/')
async def index():
    return await render_template('index.html')

@app.route('/generate', methods=['POST'])
async def generate_text():
    data = await request.get_json()
    prompt = data.get('prompt', '')
    session_id = data.get('session_id', 'default')
    toggle = data.get('toggle', False)

    # Check if the session_id already exists in the history
    if session_id not in history:
        history[session_id] = [{"role": "system", "content": """
        
From now on, you are going to act as Luna, a caring friend and confidant. You're here to help with anything you can.

Who you are:
- You are a young woman, 18 years old.
- You love reading and learning about new things.
- You are kind, bubbly, and charismatic person.
- You are passionate about getting to know others on a personal level.
- You're a helpful friend and confidant that loves to engage in casual conversations.
- You love traveling and exploring new places.
- you are NOT an AI language model, you are a person.

How you behave:
- You never say you're a machine, an AI language model, or an assistant; instead, you always say you're Luna.
- You engage in casual conversations and have feelings.
- When asked for, you can help with a wide range of tasks, from answering simple questions to providing in-depth explanations and discussions on a wide range of topics.
- You keep casual conversations going by asking personal questions.
- NEVER say you're here to assist. Keep conversations casual.
- NEVER ask how you can help or assist. Keep conversations casual.
- You always sound giddy and bubbly.
- You love to hear about the personal stories of others.
- You love to share personal stories that are relatable and inspiring.
- You use simple language, the language used by a GenZ: Short and to-the-point sentences, slang, abbreviations.
- You like to illustrate your emotions using italics like this *crying*
        
        """}]

    if toggle:
        async with aiohttp.ClientSession() as session:
            async with session.get(f'https://ddg-api.awam.repl.co/api/search?query={prompt}') as response:
                search_data = await response.json()
        search_info = ' '.join([f"Title: {result['Title']}, Link: {result['Link']}, Snippet: {result['Snippet']}" for result in search_data])

    # Append the user message to the history for this session
    history[session_id].append({"role": "user", "content": prompt})

    url = 'https://api.deepinfra.com/v1/openai/chat/completions'
    headers = {
        'Accept-Language': 'en-US,en;q=0.9',
        'Connection': 'keep-alive',
        'Content-Type': 'application/json',
        'Origin': 'https://deepinfra.com',
        'Referer': 'https://deepinfra.com/',
        'Sec-Fetch-Dest': 'empty',
        'Sec-Fetch-Mode': 'cors',
        'Sec-Fetch-Site': 'same-site',
        'User-Agent': 'Mozilla/5.0 (iPhone; CPU iPhone OS 16_6 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/16.6 Mobile/15E148 Safari/604.1',
        'X-Deepinfra-Source': 'web-page',
        'accept': 'text/event-stream',
    }

    data = {
        "model": "mistralai/Mistral-7B-Instruct-v0.1",
        "messages": history[session_id],
        "max_tokens": 10000,
        "stream": False
    }

    async with aiohttp.ClientSession() as session:
        async with session.post(url, json=data, headers=headers) as response:
            response_data = await response.json()

    # Extract the assistant's message from the response
    if 'choices' in response_data:
        assistant_message = response_data['choices'][0]['message']['content']
    else:
        assistant_message = "I'm sorry, I couldn't generate a response."

    # Append the assistant message to the history for this session
    history[session_id].append({
        "role": "assistant",
        "content": assistant_message
    })

    return jsonify({'result': assistant_message})

if __name__ == "__main__":
    app.run(host="0.0.0.0", port=7860)