import React, { useState, useEffect, useRef } from "react"; import ReactMarkdown from "react-markdown"; import { Card, Title, Table, TableHead, TableRow, TableCell, TableBody, Grid, Tab, TabGroup, TabList, TabPanel, TabPanels, Metric, Col, Text, SelectItem, TextInput, Button, } from "@tremor/react"; import { message, Select } from "antd"; import { modelAvailableCall } from "./networking"; import openai from "openai"; import { ChatCompletionMessageParam } from "openai/resources/chat/completions"; import { Prism as SyntaxHighlighter } from "react-syntax-highlighter"; import { Typography } from "antd"; import { coy } from 'react-syntax-highlighter/dist/esm/styles/prism'; interface ChatUIProps { accessToken: string | null; token: string | null; userRole: string | null; userID: string | null; disabledPersonalKeyCreation: boolean; } async function generateModelResponse( chatHistory: { role: string; content: string }[], updateUI: (chunk: string, model: string) => void, selectedModel: string, accessToken: string ) { // base url should be the current base_url const isLocal = process.env.NODE_ENV === "development"; if (isLocal !== true) { console.log = function () {}; } console.log("isLocal:", isLocal); const proxyBaseUrl = isLocal ? "http://localhost:4000" : window.location.origin; const client = new openai.OpenAI({ apiKey: accessToken, // Replace with your OpenAI API key baseURL: proxyBaseUrl, // Replace with your OpenAI API base URL dangerouslyAllowBrowser: true, // using a temporary litellm proxy key }); try { const response = await client.chat.completions.create({ model: selectedModel, stream: true, messages: chatHistory as ChatCompletionMessageParam[], }); for await (const chunk of response) { console.log(chunk); if (chunk.choices[0].delta.content) { updateUI(chunk.choices[0].delta.content, chunk.model); } } } catch (error) { message.error(`Error occurred while generating model response. Please try again. Error: ${error}`, 20); } } const ChatUI: React.FC = ({ accessToken, token, userRole, userID, disabledPersonalKeyCreation, }) => { const [apiKeySource, setApiKeySource] = useState<'session' | 'custom'>( disabledPersonalKeyCreation ? 'custom' : 'session' ); const [apiKey, setApiKey] = useState(""); const [inputMessage, setInputMessage] = useState(""); const [chatHistory, setChatHistory] = useState<{ role: string; content: string; model?: string }[]>([]); const [selectedModel, setSelectedModel] = useState( undefined ); const [modelInfo, setModelInfo] = useState([]); const chatEndRef = useRef(null); useEffect(() => { if (!accessToken || !token || !userRole || !userID) { return; } // Fetch model info and set the default selected model const fetchModelInfo = async () => { try { const fetchedAvailableModels = await modelAvailableCall( accessToken, userID, userRole ); console.log("model_info:", fetchedAvailableModels); if (fetchedAvailableModels?.data.length > 0) { // Create a Map to store unique models using the model ID as key const uniqueModelsMap = new Map(); fetchedAvailableModels["data"].forEach((item: { id: string }) => { uniqueModelsMap.set(item.id, { value: item.id, label: item.id }); }); // Convert Map values back to array const uniqueModels = Array.from(uniqueModelsMap.values()); // Sort models alphabetically uniqueModels.sort((a, b) => a.label.localeCompare(b.label)); setModelInfo(uniqueModels); setSelectedModel(uniqueModels[0].value); } } catch (error) { console.error("Error fetching model info:", error); } }; fetchModelInfo(); }, [accessToken, userID, userRole]); useEffect(() => { // Scroll to the bottom of the chat whenever chatHistory updates if (chatEndRef.current) { // Add a small delay to ensure content is rendered setTimeout(() => { chatEndRef.current?.scrollIntoView({ behavior: "smooth", block: "end" // Keep the scroll position at the end }); }, 100); } }, [chatHistory]); const updateUI = (role: string, chunk: string, model?: string) => { setChatHistory((prevHistory) => { const lastMessage = prevHistory[prevHistory.length - 1]; if (lastMessage && lastMessage.role === role) { return [ ...prevHistory.slice(0, prevHistory.length - 1), { role, content: lastMessage.content + chunk, model }, ]; } else { return [...prevHistory, { role, content: chunk, model }]; } }); }; const handleKeyDown = (event: React.KeyboardEvent) => { if (event.key === 'Enter') { handleSendMessage(); } }; const handleSendMessage = async () => { if (inputMessage.trim() === "") return; if (!token || !userRole || !userID) { return; } const effectiveApiKey = apiKeySource === 'session' ? accessToken : apiKey; if (!effectiveApiKey) { message.error("Please provide an API key or select Current UI Session"); return; } // Create message object without model field for API call const newUserMessage = { role: "user", content: inputMessage }; // Create chat history for API call - strip out model field const apiChatHistory = [...chatHistory.map(({ role, content }) => ({ role, content })), newUserMessage]; // Update UI with full message object (including model field for display) setChatHistory([...chatHistory, newUserMessage]); try { if (selectedModel) { await generateModelResponse( apiChatHistory, (chunk, model) => updateUI("assistant", chunk, model), selectedModel, effectiveApiKey ); } } catch (error) { console.error("Error fetching model response", error); updateUI("assistant", "Error fetching model response"); } setInputMessage(""); }; const clearChatHistory = () => { setChatHistory([]); message.success("Chat history cleared."); }; if (userRole && userRole === "Admin Viewer") { const { Title, Paragraph } = Typography; return (
Access Denied Ask your proxy admin for access to test models
); } const onChange = (value: string) => { console.log(`selected ${value}`); setSelectedModel(value); }; return (
Chat
API Key Source {/* Clear Chat Button */}
{/* Chat */} {chatHistory.map((message, index) => (
{message.role} {message.role === "assistant" && message.model && ( {message.model} )}
& { inline?: boolean; node?: any; }) { const match = /language-(\w+)/.exec(className || ''); return !inline && match ? ( {String(children).replace(/\n$/, '')} ) : ( {children} ); } }} > {message.content}
))}
setInputMessage(e.target.value)} onKeyDown={handleKeyDown} placeholder="Type your message..." />
); }; export default ChatUI;