Update app.py
Browse files
app.py
CHANGED
@@ -4,7 +4,6 @@ import os
|
|
4 |
import torch
|
5 |
from dotenv import load_dotenv
|
6 |
from langchain_community.vectorstores import Qdrant
|
7 |
-
from langchain_huggingface import HuggingFaceEmbeddings
|
8 |
from langchain.prompts import ChatPromptTemplate
|
9 |
from langchain.schema.runnable import RunnablePassthrough
|
10 |
from langchain.schema.output_parser import StrOutputParser
|
@@ -16,15 +15,11 @@ from typing import List, Tuple, Generator
|
|
16 |
from dataclasses import dataclass
|
17 |
from datetime import datetime
|
18 |
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
|
19 |
-
|
20 |
-
from langchain_cerebras import ChatCerebras
|
21 |
from queue import Queue
|
22 |
from threading import Thread
|
23 |
from langchain.chains import LLMChain
|
24 |
from langchain_core.prompts import PromptTemplate
|
25 |
-
from langchain_huggingface import HuggingFaceEndpoint
|
26 |
-
from langchain_google_genai import ChatGoogleGenerativeAI
|
27 |
-
|
28 |
# Configure logging
|
29 |
logging.basicConfig(level=logging.INFO)
|
30 |
logger = logging.getLogger(__name__)
|
|
|
4 |
import torch
|
5 |
from dotenv import load_dotenv
|
6 |
from langchain_community.vectorstores import Qdrant
|
|
|
7 |
from langchain.prompts import ChatPromptTemplate
|
8 |
from langchain.schema.runnable import RunnablePassthrough
|
9 |
from langchain.schema.output_parser import StrOutputParser
|
|
|
15 |
from dataclasses import dataclass
|
16 |
from datetime import datetime
|
17 |
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
|
18 |
+
|
|
|
19 |
from queue import Queue
|
20 |
from threading import Thread
|
21 |
from langchain.chains import LLMChain
|
22 |
from langchain_core.prompts import PromptTemplate
|
|
|
|
|
|
|
23 |
# Configure logging
|
24 |
logging.basicConfig(level=logging.INFO)
|
25 |
logger = logging.getLogger(__name__)
|