Krzysztof Krystian Jankowski commited on
Commit
ca8913d
·
1 Parent(s): 527e5bd
Files changed (1) hide show
  1. app.py +3 -1
app.py CHANGED
@@ -1,11 +1,13 @@
1
  import streamlit as st
2
  from langchain.prompts import PromptTemplate
3
  from langchain_community.llms import CTransformers
 
4
 
5
  # load the model
6
 
 
7
  def getLlamaResponse(input_text, no_words, blog_style):
8
- llm=CTransformers(model='TheBloke/TinyLlama-1.1B-Chat-v1.0-GGUF/blob/main/tinyllama-1.1b-chat-v1.0.Q8_0.gguf',
9
  model_type='llama',
10
  config={'max_new_tokens':256, 'temperature':0.2})
11
 
 
1
  import streamlit as st
2
  from langchain.prompts import PromptTemplate
3
  from langchain_community.llms import CTransformers
4
+ from transformers import AutoModel
5
 
6
  # load the model
7
 
8
+ TinyLlamaModel = AutoModel.from_pretrained("NousResearch/Llama-2-7b-hf")
9
  def getLlamaResponse(input_text, no_words, blog_style):
10
+ llm=CTransformers(model=TinyLlamaModel,
11
  model_type='llama',
12
  config={'max_new_tokens':256, 'temperature':0.2})
13