Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -5,23 +5,14 @@ from typing import Dict, List, Tuple
|
|
5 |
|
6 |
import gradio as gr
|
7 |
import streamlit as st
|
|
|
8 |
from huggingface_hub import InferenceClient, hf_hub_url, cached_download
|
9 |
-
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
10 |
-
from rich import print as rprint
|
11 |
-
from rich.panel import Panel
|
12 |
-
from rich.progress import track
|
13 |
-
from rich.table import Table
|
14 |
-
import subprocess
|
15 |
-
import threading
|
16 |
import git
|
17 |
-
from
|
18 |
-
from
|
19 |
-
from
|
20 |
-
from
|
21 |
-
from
|
22 |
-
from langchain_community.document_loaders import TextLoader
|
23 |
-
from streamlit_ace import st_ace
|
24 |
-
from streamlit_chat import st_chat
|
25 |
|
26 |
# --- Constants ---
|
27 |
MODEL_NAME = "google/flan-t5-xl" # Consider using a more powerful model like 'google/flan-t5-xl'
|
|
|
5 |
|
6 |
import gradio as gr
|
7 |
import streamlit as st
|
8 |
+
import streamlit_chat
|
9 |
from huggingface_hub import InferenceClient, hf_hub_url, cached_download
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
import git
|
11 |
+
from langchain_community.llms import HuggingFaceHub
|
12 |
+
from langchain_community.chains import ConversationChain
|
13 |
+
from langchain_community.memory import ConversationBufferMemory
|
14 |
+
from langchain_community.chains.question_answering import load_qa_chain
|
15 |
+
from langchain_community.utils import CharacterTextSplitter
|
|
|
|
|
|
|
16 |
|
17 |
# --- Constants ---
|
18 |
MODEL_NAME = "google/flan-t5-xl" # Consider using a more powerful model like 'google/flan-t5-xl'
|