Tonic commited on
Commit
0351fe3
·
verified ·
1 Parent(s): cbe2df8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -6
app.py CHANGED
@@ -50,13 +50,15 @@ def last_token_pool(last_hidden_states: Tensor, attention_mask: Tensor) -> Tenso
50
 
51
  @spaces.GPU
52
  def compute_embeddings(selected_task, *input_texts):
53
- max_length = 2042
54
- if selected_task:
55
- task = tasks[selected_task]
56
- processed_texts = [f'Instruct: {task}\nQuery: {text}' for text in input_texts]
57
  else:
58
- processed_texts = [f'Instruct: {system_prompt}\nQuerry: {text}' for text in input_texts]
59
- task = tasks[selected_task]
 
 
60
  batch_dict = tokenizer(processed_texts, max_length=max_length - 1, return_attention_mask=False, padding=False, truncation=True)
61
  batch_dict['input_ids'] = [input_ids + [tokenizer.eos_token_id] for input_ids in batch_dict['input_ids']]
62
  batch_dict = tokenizer.pad(batch_dict, padding=True, return_attention_mask=True, return_tensors='pt')
 
50
 
51
  @spaces.GPU
52
  def compute_embeddings(selected_task, *input_texts):
53
+
54
+ if selected_task == "None":
55
+ # Use the system prompt if 'None' is selected
56
+ processed_texts = [f'Instruct: {system_prompt}\nQuery: {input_text}']
57
  else:
58
+ # Use the task description from the tasks dictionary
59
+ task_description = tasks[selected_task]
60
+ processed_texts = [f'Instruct: {task_description}\nQuery: {input_text}']
61
+
62
  batch_dict = tokenizer(processed_texts, max_length=max_length - 1, return_attention_mask=False, padding=False, truncation=True)
63
  batch_dict['input_ids'] = [input_ids + [tokenizer.eos_token_id] for input_ids in batch_dict['input_ids']]
64
  batch_dict = tokenizer.pad(batch_dict, padding=True, return_attention_mask=True, return_tensors='pt')