Update app.py
Browse files
app.py
CHANGED
@@ -1,19 +1,12 @@
|
|
1 |
import streamlit as st
|
2 |
from langchain_community.llms import HuggingFaceHub
|
3 |
from langchain_community.embeddings import SentenceTransformerEmbeddings
|
4 |
-
from
|
5 |
import numpy as np
|
6 |
|
7 |
# gemma = 'google/gemma-7b-it';
|
8 |
gemma = 'google/recurrentgemma-2b-it';
|
9 |
|
10 |
-
# 1. 初始化 Gemma 模型
|
11 |
-
try:
|
12 |
-
llm = HuggingFaceHub(repo_id=gemma, model_kwargs={"temperature": 0.5, "max_length": 512})
|
13 |
-
except Exception as e:
|
14 |
-
st.error(f"Gemma 模型加载失败:{e}")
|
15 |
-
st.stop()
|
16 |
-
|
17 |
# 2. 准备知识库数据 (示例)
|
18 |
knowledge_base = [
|
19 |
"Gemma 是 Google 开发的大型语言模型。",
|
@@ -32,7 +25,13 @@ except Exception as e:
|
|
32 |
st.stop()
|
33 |
|
34 |
# 4. 问答函数
|
35 |
-
def answer_question(question):
|
|
|
|
|
|
|
|
|
|
|
|
|
36 |
try:
|
37 |
question_embedding = embeddings.embed_query(question)
|
38 |
question_embedding_np = " ".join(map(str, question_embedding))
|
@@ -53,13 +52,16 @@ def answer_question(question):
|
|
53 |
# 5. Streamlit 界面
|
54 |
st.title("Gemma 知识库问答系统")
|
55 |
|
56 |
-
|
|
|
|
|
|
|
57 |
|
58 |
if st.button("提交"):
|
59 |
if not question:
|
60 |
st.warning("请输入问题!")
|
61 |
else:
|
62 |
with st.spinner("正在查询..."):
|
63 |
-
answer = answer_question(question)
|
64 |
st.write("答案:")
|
65 |
st.write(answer)
|
|
|
1 |
import streamlit as st
|
2 |
from langchain_community.llms import HuggingFaceHub
|
3 |
from langchain_community.embeddings import SentenceTransformerEmbeddings
|
4 |
+
from langchain_community.vectorstores import FAISS
|
5 |
import numpy as np
|
6 |
|
7 |
# gemma = 'google/gemma-7b-it';
|
8 |
gemma = 'google/recurrentgemma-2b-it';
|
9 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
# 2. 准备知识库数据 (示例)
|
11 |
knowledge_base = [
|
12 |
"Gemma 是 Google 开发的大型语言模型。",
|
|
|
25 |
st.stop()
|
26 |
|
27 |
# 4. 问答函数
|
28 |
+
def answer_question(gemma, temperature, max_length, question):
|
29 |
+
# 1. 初始化 Gemma 模型
|
30 |
+
try:
|
31 |
+
llm = HuggingFaceHub(repo_id=gemma, model_kwargs={"temperature": temperature, "max_length": max_length})
|
32 |
+
except Exception as e:
|
33 |
+
st.error(f"Gemma 模型加载失败:{e}")
|
34 |
+
st.stop()
|
35 |
try:
|
36 |
question_embedding = embeddings.embed_query(question)
|
37 |
question_embedding_np = " ".join(map(str, question_embedding))
|
|
|
52 |
# 5. Streamlit 界面
|
53 |
st.title("Gemma 知识库问答系统")
|
54 |
|
55 |
+
gemma = st.text_area("模型", "google/gemma-7b-it", height=50)
|
56 |
+
temperature = st.text_area("temperature", "1.0", height=50)
|
57 |
+
max_length = st.text_area("max_length", "1024", height=50)
|
58 |
+
question = st.text_area("请输入问题", "Gemma 有哪些特点?", height=100)
|
59 |
|
60 |
if st.button("提交"):
|
61 |
if not question:
|
62 |
st.warning("请输入问题!")
|
63 |
else:
|
64 |
with st.spinner("正在查询..."):
|
65 |
+
answer = answer_question(gemma, temperature, max_length, question)
|
66 |
st.write("答案:")
|
67 |
st.write(answer)
|