Rashik24 commited on
Commit
901e071
·
1 Parent(s): 0d7d920

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +4 -8
README.md CHANGED
@@ -23,18 +23,14 @@ To start using the Rashik24/Mistral-Instruct-Bangla model, you can use the follo
23
  ```Python
24
  from transformers import AutoModelForCausalLM, AutoTokenizer
25
 
26
- from peft import PeftModel, PeftConfig
27
-
28
-
29
-
30
  def load_model(model_name):
31
- config = PeftConfig.from_pretrained("Rashik24/Mistral-Instruct-Bangla")
32
- model = AutoModelForCausalLM.from_pretrained("mistralai/Mistral-7B-Instruct-v0.2")
33
- model = PeftModel.from_pretrained(model, "Rashik24/Mistral-Instruct-Bangla")
34
 
35
  def generate_text(prompt, model, tokenizer):
36
  inputs = tokenizer.encode(prompt, return_tensors='pt')
37
- outputs = model.generate(inputs, max_length=50, num_return_sequences=1)
38
  return tokenizer.decode(outputs[0], skip_special_tokens=True)
39
 
40
  #Load the model
 
23
  ```Python
24
  from transformers import AutoModelForCausalLM, AutoTokenizer
25
 
 
 
 
 
26
  def load_model(model_name):
27
+ tokenizer = AutoTokenizer.from_pretrained("mistralai/Mistral-7B-Instruct-v0.2")
28
+ model = AutoModelForCausalLM.from_pretrained(model_name)
29
+ return model, tokenizer
30
 
31
  def generate_text(prompt, model, tokenizer):
32
  inputs = tokenizer.encode(prompt, return_tensors='pt')
33
+ outputs = model.generate(inputs, max_length=256, num_return_sequences=1)
34
  return tokenizer.decode(outputs[0], skip_special_tokens=True)
35
 
36
  #Load the model