Update chain.py
Browse files
chain.py
CHANGED
@@ -50,7 +50,8 @@ def create_question_answering_chain(retriever):
|
|
50 |
# Initialize the OpenAI language model with specified temperature, model name, and API key.
|
51 |
model = "meta-llama/Llama-2-7b-chat-hf"
|
52 |
|
53 |
-
|
|
|
54 |
|
55 |
pipeline = transformers.pipeline(
|
56 |
"text-generation", #task
|
@@ -63,7 +64,7 @@ def create_question_answering_chain(retriever):
|
|
63 |
do_sample=True,
|
64 |
top_k=10,
|
65 |
num_return_sequences=1,
|
66 |
-
eos_token_id=tokenizer.eos_token_id
|
67 |
)
|
68 |
|
69 |
llm = HuggingFacePipeline(pipeline = pipeline, model_kwargs = {'temperature':0})
|
|
|
50 |
# Initialize the OpenAI language model with specified temperature, model name, and API key.
|
51 |
model = "meta-llama/Llama-2-7b-chat-hf"
|
52 |
|
53 |
+
access_token = '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'
|
54 |
+
tokenizer = AutoTokenizer.from_pretrained(model, token=access_token)
|
55 |
|
56 |
pipeline = transformers.pipeline(
|
57 |
"text-generation", #task
|
|
|
64 |
do_sample=True,
|
65 |
top_k=10,
|
66 |
num_return_sequences=1,
|
67 |
+
eos_token_id=tokenizer.eos_token_id,
|
68 |
)
|
69 |
|
70 |
llm = HuggingFacePipeline(pipeline = pipeline, model_kwargs = {'temperature':0})
|