sino commited on
Commit
aac783a
·
1 Parent(s): 7ffc8bc

Update src/LMdecoder.py

Browse files
Files changed (1) hide show
  1. src/LMdecoder.py +3 -3
src/LMdecoder.py CHANGED
@@ -81,11 +81,11 @@ class LMDecoder(nn.Module):
81
 
82
  def load_lm(self):
83
  ## ---------------------LM setting----------------------
84
- self.tokenizer = AutoTokenizer.from_pretrained('meta-llama/Llama-2-7b-hf')
85
  if self.tokenizer.pad_token is None:
86
  self.tokenizer.pad_token = self.tokenizer.eos_token
87
- self.LMconfig = AutoConfig.from_pretrained('meta-llama/Llama-2-7b-hf')
88
- self.lm = AutoModelForCausalLM.from_pretrained('meta-llama/Llama-2-7b-hf')
89
 
90
 
91
  def forward(self, input_ids, flatten_embs, attention_mask, labels, **kwargs):
 
81
 
82
  def load_lm(self):
83
  ## ---------------------LM setting----------------------
84
+ self.tokenizer = AutoTokenizer.from_pretrained('meta-llama/Llama-2-7b-hf', token='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')
85
  if self.tokenizer.pad_token is None:
86
  self.tokenizer.pad_token = self.tokenizer.eos_token
87
+ self.LMconfig = AutoConfig.from_pretrained('meta-llama/Llama-2-7b-hf', token='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')
88
+ self.lm = AutoModelForCausalLM.from_pretrained('meta-llama/Llama-2-7b-hf', token='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')
89
 
90
 
91
  def forward(self, input_ids, flatten_embs, attention_mask, labels, **kwargs):