llama3.2
Browse files
rag.py
CHANGED
@@ -3,7 +3,8 @@ import dspy
|
|
3 |
import pandas as pd
|
4 |
|
5 |
#lm = dspy.LM('ollama_chat/deepseek-r1', api_base='http://localhost:11434', api_key='')
|
6 |
-
lm = dspy.LM('huggingface/Qwen/Qwen2.5-Coder-32B-Instruct')
|
|
|
7 |
dspy.configure(lm=lm)
|
8 |
|
9 |
df = pd.read_csv("product2.csv")
|
|
|
3 |
import pandas as pd
|
4 |
|
5 |
#lm = dspy.LM('ollama_chat/deepseek-r1', api_base='http://localhost:11434', api_key='')
|
6 |
+
#lm = dspy.LM('huggingface/Qwen/Qwen2.5-Coder-32B-Instruct')
|
7 |
+
lm = dspy.LM('huggingface/meta-llama/Llama-3.2-1B')
|
8 |
dspy.configure(lm=lm)
|
9 |
|
10 |
df = pd.read_csv("product2.csv")
|
tool.py
CHANGED
@@ -5,7 +5,8 @@ from dspy.predict.react import Tool
|
|
5 |
from tavily import TavilyClient
|
6 |
|
7 |
#lm = dspy.LM('ollama_chat/deepseek-r1', api_base='http://localhost:11434', api_key='')
|
8 |
-
lm = dspy.LM('huggingface/Qwen/Qwen2.5-Coder-32B-Instruct')
|
|
|
9 |
dspy.configure(lm=lm)
|
10 |
|
11 |
search_client = TavilyClient(api_key=os.environ["T_TOKEN"])
|
|
|
5 |
from tavily import TavilyClient
|
6 |
|
7 |
#lm = dspy.LM('ollama_chat/deepseek-r1', api_base='http://localhost:11434', api_key='')
|
8 |
+
#lm = dspy.LM('huggingface/Qwen/Qwen2.5-Coder-32B-Instruct')
|
9 |
+
lm = dspy.LM('huggingface/meta-llama/Llama-3.2-1B')
|
10 |
dspy.configure(lm=lm)
|
11 |
|
12 |
search_client = TavilyClient(api_key=os.environ["T_TOKEN"])
|