research14 commited on
Commit
488b0ed
·
1 Parent(s): e2550af
Files changed (1) hide show
  1. app.py +6 -6
app.py CHANGED
@@ -50,8 +50,8 @@ for i, j in zip(ents, ents_prompt):
50
  print(i, j)
51
 
52
  model_mapping = {
53
- #'gpt3.5': 'gpt2',
54
- 'vicuna-7b': 'lmsys/vicuna-7b-v1.3',
55
  #'vicuna-13b': 'lmsys/vicuna-13b-v1.3',
56
  #'vicuna-33b': 'lmsys/vicuna-33b-v1.3',
57
  #'fastchat-t5': 'lmsys/fastchat-t5-3b-v1.0',
@@ -116,7 +116,7 @@ def process_text(model_name, task, text):
116
  for gid in tqdm(gid_list, desc='Query'):
117
  text = ptb[gid]['text']
118
 
119
- if model_name == 'gpt2':
120
  if task == 'POS':
121
  strategy1_format = template_all.format(text)
122
  strategy2_format = prompt2_pos.format(text)
@@ -126,9 +126,9 @@ def process_text(model_name, task, text):
126
  result2 = gpt_pipeline(strategy2_format)
127
  result3 = gpt_pipeline(strategy3_format)
128
 
129
- generated_text1 = result1[0]['generated_text']
130
- generated_text2 = result2[0]['generated_text']
131
- generated_text3 = result3[0]['generated_text']
132
 
133
  return (generated_text1, generated_text2, generated_text3)
134
  # elif task == 'Chunking':
 
50
  print(i, j)
51
 
52
  model_mapping = {
53
+ 'gpt3.5': 'gpt2',
54
+ #'vicuna-7b': 'lmsys/vicuna-7b-v1.3',
55
  #'vicuna-13b': 'lmsys/vicuna-13b-v1.3',
56
  #'vicuna-33b': 'lmsys/vicuna-33b-v1.3',
57
  #'fastchat-t5': 'lmsys/fastchat-t5-3b-v1.0',
 
116
  for gid in tqdm(gid_list, desc='Query'):
117
  text = ptb[gid]['text']
118
 
119
+ if model_name == 'gpt3.5':
120
  if task == 'POS':
121
  strategy1_format = template_all.format(text)
122
  strategy2_format = prompt2_pos.format(text)
 
126
  result2 = gpt_pipeline(strategy2_format)
127
  result3 = gpt_pipeline(strategy3_format)
128
 
129
+ generated_text1 = result1[0]['sequence']
130
+ generated_text2 = result2[0]['sequence']
131
+ generated_text3 = result3[0]['sequence']
132
 
133
  return (generated_text1, generated_text2, generated_text3)
134
  # elif task == 'Chunking':