Upload setup.py
Browse files
setup.py
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import getpass
|
3 |
+
from dotenv import load_dotenv
|
4 |
+
from langchain_groq import ChatGroq
|
5 |
+
from langchain_community.tools import TavilySearchResults
|
6 |
+
|
7 |
+
load_dotenv()
|
8 |
+
|
9 |
+
def _set_env(var: str):
|
10 |
+
if not os.environ.get(var):
|
11 |
+
os.environ[var] = getpass.getpass(f"Enter {var}: ")
|
12 |
+
|
13 |
+
|
14 |
+
# Setting up environmentl_variables
|
15 |
+
_set_env("GROQ_API_KEY")
|
16 |
+
_set_env("TAVILY_API_KEY")
|
17 |
+
_set_env("LANGCHAIN_API_KEY")
|
18 |
+
_set_env("JINA_API_KEY")
|
19 |
+
os.environ["LANGCHAIN_TRACING_V2"] = "true"
|
20 |
+
os.environ["LANGCHAIN_PROJECT"] = "langchain-academy"
|
21 |
+
|
22 |
+
|
23 |
+
|
24 |
+
# llm defined
|
25 |
+
llm = ChatGroq(temperature=0, model_name="llama-3.1-8b-instant")
|
26 |
+
|
27 |
+
# search tool
|
28 |
+
tavily_search = TavilySearchResults(
|
29 |
+
max_results=2,
|
30 |
+
search_depth="advanced",
|
31 |
+
include_answer=True,
|
32 |
+
include_raw_content=True,
|
33 |
+
include_images=True,
|
34 |
+
)
|