File size: 3,116 Bytes
50be1ff
9162cf1
 
 
 
 
 
 
 
 
 
515f567
 
 
9162cf1
515f567
 
61a50a8
9162cf1
b7f6cd9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
61a50a8
 
 
 
515f567
 
61a50a8
 
 
 
 
515f567
61a50a8
515f567
 
61a50a8
 
 
 
 
 
 
 
 
 
 
 
 
 
515f567
 
 
 
 
 
 
 
 
 
 
 
 
50be1ff
515f567
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
import streamlit as st
import os
import getpass
from langchain import PromptTemplate
from langchain import hub
from langchain.docstore.document import Document
from langchain.document_loaders import WebBaseLoader
from langchain.schema import StrOutputParser
from langchain.schema.prompt_template import format_document
from langchain.schema.runnable import RunnablePassthrough
from langchain.vectorstores import Chroma
import google.generativeai as genai
from langchain_google_genai import GoogleGenerativeAIEmbeddings
from langchain_google_genai import ChatGoogleGenerativeAI

GOOGLE_API_KEY=os.environ['GOOGLE_API_KEY']

loader = WebBaseLoader("https://www.google.com/finance/?hl=en")
docs = loader.load()

gemini_embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001")

# Save to disk
vectorstore = Chroma.from_documents(
                     documents=docs,                 # Data
                     embedding=gemini_embeddings,    # Embedding model
                     persist_directory="./chroma_db" # Directory to save data
                     )

vectorstore_disk = Chroma(
                        persist_directory="./chroma_db",       # Directory of db
                        embedding_function=gemini_embeddings   # Embedding model
                   )
retriever = vectorstore_disk.as_retriever(search_kwargs={"k": 1})
# If there is no environment variable set for the API key, you can pass the API
# key to the parameter `google_api_key` of the `ChatGoogleGenerativeAI` function:
# `google_api_key="key"`.
llm = ChatGoogleGenerativeAI(model="gemini-pro",google_api_key = GOOGLE_API_KEY)


llm_prompt_template = """You are an assistant for question-answering tasks.
Use the following context to answer the question.
If you don't know the answer, just say that you don't know.
Use five sentences maximum and keep the answer concise.\n
Question: {question} \nContext: {context} \nAnswer:"""

llm_prompt = PromptTemplate.from_template(llm_prompt_template)


def format_docs(docs):
    return "\n\n".join(doc.page_content for doc in docs)

rag_chain = (
    {"context": retriever | format_docs, "question": RunnablePassthrough()}
    | llm_prompt
    | llm
    | StrOutputParser()
)

prompt = st.text_input("Enter Prompt","What is the best stocks for the next few weeks")

res = rag_chain.invoke(prompt)
st.write(res)

# If there is no environment variable set for the API key, you can pass the API
# key to the parameter `google_api_key` of the `GoogleGenerativeAIEmbeddings`
# function: `google_api_key = "key"`.

# gemini_embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001")

# # Save to disk
# vectorstore = Chroma.from_documents(
#                      documents=docs,                 # Data
#                      embedding=gemini_embeddings,    # Embedding model
#                      persist_directory="./chroma_db" # Directory to save data
#                      )

# vectorstore_disk = Chroma(
#                         persist_directory="./chroma_db",       # Directory of db
#                         embedding_function=gemini_embeddings   # Embedding model