File size: 977 Bytes
6f58cbf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
import os
import gradio as gr
import instrumentation

instrumentation.init("llm-chat-app")

from langchain.globals import set_debug
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.output_parsers import StrOutputParser
from langchain_openai import ChatOpenAI

set_debug(True)
import logging

logging.basicConfig(level=logging.DEBUG)
llm = ChatOpenAI(temperature=0.5, max_tokens=100, model="gpt-3.5-turbo")

output_parser = StrOutputParser()
prompt = ChatPromptTemplate.from_messages([
        ("system", "You are Responsible AI assistant to the user. "),
        ("user", "{input}")
    ])

def handle_message(message, _history):
    chain = prompt | llm | output_parser
    return chain.invoke({"input": message})

server_name = os.environ.get("GR_SERVER_NAME", "127.0.0.1")
server_port = os.environ.get("GR_SERVER_PORT", "7860")

options = {}

gr.ChatInterface(handle_message, **options).launch(server_name=server_name, server_port=int(server_port))