Spaces:
Build error
Build error
Update app.py
Browse files
app.py
CHANGED
@@ -45,6 +45,7 @@ def compute_embeddings(*input_texts):
|
|
45 |
batch_dict = tokenizer(processed_texts, max_length=max_length - 1, return_attention_mask=False, padding=False, truncation=True)
|
46 |
batch_dict['input_ids'] = [input_ids + [tokenizer.eos_token_id] for input_ids in batch_dict['input_ids']]
|
47 |
batch_dict = tokenizer.pad(batch_dict, padding=True, return_attention_mask=True, return_tensors='pt')
|
|
|
48 |
outputs = model(**batch_dict)
|
49 |
embeddings = last_token_pool(outputs.last_hidden_state, batch_dict['attention_mask'])
|
50 |
embeddings = F.normalize(embeddings, p=2, dim=1)
|
|
|
45 |
batch_dict = tokenizer(processed_texts, max_length=max_length - 1, return_attention_mask=False, padding=False, truncation=True)
|
46 |
batch_dict['input_ids'] = [input_ids + [tokenizer.eos_token_id] for input_ids in batch_dict['input_ids']]
|
47 |
batch_dict = tokenizer.pad(batch_dict, padding=True, return_attention_mask=True, return_tensors='pt')
|
48 |
+
batch_dict = {k: v.to(device) for k, v in batch_dict.items()}
|
49 |
outputs = model(**batch_dict)
|
50 |
embeddings = last_token_pool(outputs.last_hidden_state, batch_dict['attention_mask'])
|
51 |
embeddings = F.normalize(embeddings, p=2, dim=1)
|