Joshua Sundance Bailey
commited on
Commit
·
a9599af
1
Parent(s):
86262ce
typing
Browse files
langchain-streamlit-demo/llm_stuff.py
CHANGED
@@ -4,14 +4,16 @@ import streamlit as st
|
|
4 |
from langchain import LLMChain
|
5 |
from langchain.callbacks.base import BaseCallbackHandler
|
6 |
from langchain.chat_models import ChatOpenAI, ChatAnyscale, ChatAnthropic
|
|
|
7 |
from langchain.memory import ConversationBufferMemory, StreamlitChatMessageHistory
|
|
|
8 |
from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder
|
9 |
from streamlit_feedback import streamlit_feedback
|
10 |
|
11 |
_DEFAULT_SYSTEM_PROMPT = "You are a helpful chatbot."
|
12 |
|
13 |
|
14 |
-
def get_memory() ->
|
15 |
return ConversationBufferMemory(
|
16 |
chat_memory=StreamlitChatMessageHistory(key="langchain_messages"),
|
17 |
return_messages=True,
|
@@ -24,7 +26,7 @@ def get_llm(
|
|
24 |
provider_api_key: str,
|
25 |
temperature: float,
|
26 |
max_tokens: int = 1000,
|
27 |
-
):
|
28 |
if model.startswith("gpt"):
|
29 |
return ChatOpenAI(
|
30 |
model=model,
|
|
|
4 |
from langchain import LLMChain
|
5 |
from langchain.callbacks.base import BaseCallbackHandler
|
6 |
from langchain.chat_models import ChatOpenAI, ChatAnyscale, ChatAnthropic
|
7 |
+
from langchain.chat_models.base import BaseChatModel
|
8 |
from langchain.memory import ConversationBufferMemory, StreamlitChatMessageHistory
|
9 |
+
from langchain.memory.chat_memory import BaseChatMemory
|
10 |
from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder
|
11 |
from streamlit_feedback import streamlit_feedback
|
12 |
|
13 |
_DEFAULT_SYSTEM_PROMPT = "You are a helpful chatbot."
|
14 |
|
15 |
|
16 |
+
def get_memory() -> BaseChatMemory:
|
17 |
return ConversationBufferMemory(
|
18 |
chat_memory=StreamlitChatMessageHistory(key="langchain_messages"),
|
19 |
return_messages=True,
|
|
|
26 |
provider_api_key: str,
|
27 |
temperature: float,
|
28 |
max_tokens: int = 1000,
|
29 |
+
) -> BaseChatModel:
|
30 |
if model.startswith("gpt"):
|
31 |
return ChatOpenAI(
|
32 |
model=model,
|