Spaces:
Runtime error
Runtime error
Commit
·
acb52cd
1
Parent(s):
469b565
removed unnecesary files
Browse files- conll_1k_ling.csv +0 -0
- placeholder.py +0 -175
- run_llm2.py +0 -147
- sample_uniform_1k_2.txt +0 -1000
- test3.py +0 -30
conll_1k_ling.csv
DELETED
|
The diff for this file is too large to render.
See raw diff
|
|
|
placeholder.py
DELETED
|
@@ -1,175 +0,0 @@
|
|
| 1 |
-
import os
|
| 2 |
-
import sys
|
| 3 |
-
import json
|
| 4 |
-
import time
|
| 5 |
-
import openai
|
| 6 |
-
import pickle
|
| 7 |
-
import argparse
|
| 8 |
-
import requests
|
| 9 |
-
from tqdm import tqdm
|
| 10 |
-
import torch
|
| 11 |
-
from transformers import AutoTokenizer, AutoModelForCausalLM, LlamaForCausalLM, LlamaTokenizer
|
| 12 |
-
|
| 13 |
-
from fastchat.model import load_model, get_conversation_template, add_model_args
|
| 14 |
-
|
| 15 |
-
from nltk.tag.mapping import _UNIVERSAL_TAGS
|
| 16 |
-
|
| 17 |
-
import gradio as gr
|
| 18 |
-
from transformers import pipeline
|
| 19 |
-
|
| 20 |
-
demo = gr.Blocks()
|
| 21 |
-
|
| 22 |
-
uni_tags = list(_UNIVERSAL_TAGS)
|
| 23 |
-
uni_tags[-1] = 'PUNC'
|
| 24 |
-
|
| 25 |
-
bio_tags = ['B', 'I', 'O']
|
| 26 |
-
chunk_tags = ['ADJP', 'ADVP', 'CONJP', 'INTJ', 'LST', 'NP', 'O', 'PP', 'PRT', 'SBAR', 'UCP', 'VP']
|
| 27 |
-
|
| 28 |
-
syntags = ['NP', 'S', 'VP', 'ADJP', 'ADVP', 'SBAR', 'TOP', 'PP', 'POS', 'NAC', "''", 'SINV', 'PRN', 'QP', 'WHNP', 'RB', 'FRAG',
|
| 29 |
-
'WHADVP', 'NX', 'PRT', 'VBZ', 'VBP', 'MD', 'NN', 'WHPP', 'SQ', 'SBARQ', 'LST', 'INTJ', 'X', 'UCP', 'CONJP', 'NNP', 'CD', 'JJ',
|
| 30 |
-
'VBD', 'WHADJP', 'PRP', 'RRC', 'NNS', 'SYM', 'CC']
|
| 31 |
-
|
| 32 |
-
openai.api_key = " "
|
| 33 |
-
|
| 34 |
-
# determinant vs. determiner
|
| 35 |
-
# https://wikidiff.com/determiner/determinant
|
| 36 |
-
ents_prompt = ['Noun','Verb','Adjective','Adverb','Preposition/Subord','Coordinating Conjunction',# 'Cardinal Number',
|
| 37 |
-
'Determiner',
|
| 38 |
-
'Noun Phrase','Verb Phrase','Adjective Phrase','Adverb Phrase','Preposition Phrase','Conjunction Phrase','Coordinate Phrase','Quantitave Phrase','Complex Nominal',
|
| 39 |
-
'Clause','Dependent Clause','Fragment Clause','T-unit','Complex T-unit',# 'Fragment T-unit',
|
| 40 |
-
][7:]
|
| 41 |
-
ents = ['NN', 'VB', 'JJ', 'RB', 'IN', 'CC', 'DT', 'NP', 'VP', 'ADJP', 'ADVP', 'PP', 'CONJP', 'CP', 'QP', 'CN', 'C', 'DC', 'FC', 'T', 'CT'][7:]
|
| 42 |
-
|
| 43 |
-
|
| 44 |
-
ents_prompt_uni_tags = ['Verb', 'Noun', 'Pronoun', 'Adjective', 'Adverb', 'Preposition and Postposition', 'Coordinating Conjunction',
|
| 45 |
-
'Determiner', 'Cardinal Number', 'Particles or other function words',
|
| 46 |
-
'Words that cannot be assigned a POS tag', 'Punctuation']
|
| 47 |
-
|
| 48 |
-
ents = uni_tags + ents
|
| 49 |
-
ents_prompt = ents_prompt_uni_tags + ents_prompt
|
| 50 |
-
|
| 51 |
-
for i, j in zip(ents, ents_prompt):
|
| 52 |
-
print(i, j)
|
| 53 |
-
|
| 54 |
-
model_mapping = {
|
| 55 |
-
'gpt3.5': 'gpt2',
|
| 56 |
-
#'vicuna-7b': 'lmsys/vicuna-7b-v1.3',
|
| 57 |
-
#'llama-7b': './llama/hf/7B',
|
| 58 |
-
}
|
| 59 |
-
|
| 60 |
-
with open('sample_uniform_1k_2.txt', 'r') as f:
|
| 61 |
-
selected_idx = f.readlines()
|
| 62 |
-
selected_idx = [int(i.strip()) for i in selected_idx]#[s:e]
|
| 63 |
-
|
| 64 |
-
ptb = []
|
| 65 |
-
with open('ptb.jsonl', 'r') as f:
|
| 66 |
-
for l in f:
|
| 67 |
-
ptb.append(json.loads(l))
|
| 68 |
-
|
| 69 |
-
|
| 70 |
-
## Prompt 1
|
| 71 |
-
template_all = '''Please output the <Noun, Verb, Adjective, Adverb, Preposition/Subord, Coordinating Conjunction, Cardinal Number, Determiner, Noun Phrase, Verb Phrase, Adjective Phrase, Adverb Phrase, Preposition Phrase, Conjunction Phrase, Coordinate Phrase, Quantitave Phrase, Complex Nominal, Clause, Dependent Clause, Fragment Clause, T-unit, Complex T-unit, Fragment T-unit> in the following sentence without any additional text in json format: "{}"'''
|
| 72 |
-
template_single = '''Please output any <{}> in the following sentence one per line without any additional text: "{}"'''
|
| 73 |
-
|
| 74 |
-
## Prompt 2
|
| 75 |
-
prompt2_pos = '''Please pos tag the following sentence using Universal POS tag set without generating any additional text: {}'''
|
| 76 |
-
prompt2_chunk = '''Please do sentence chunking for the following sentence as in CoNLL 2000 shared task without generating any addtional text: {}'''
|
| 77 |
-
prompt2_parse = '''Generate textual representation of the constituency parse tree of the following sentence using Penn TreeBank tag set without outputing any additional text: {}'''
|
| 78 |
-
|
| 79 |
-
prompt2_chunk = '''Please chunk the following sentence in CoNLL 2000 format with BIO tags without outputing any additional text: {}'''
|
| 80 |
-
|
| 81 |
-
## Prompt 3
|
| 82 |
-
with open('demonstration_3_42_pos.txt', 'r') as f:
|
| 83 |
-
demon_pos = f.read()
|
| 84 |
-
with open('demonstration_3_42_chunk.txt', 'r') as f:
|
| 85 |
-
demon_chunk = f.read()
|
| 86 |
-
with open('demonstration_3_42_parse.txt', 'r') as f:
|
| 87 |
-
demon_parse = f.read()
|
| 88 |
-
|
| 89 |
-
# Your existing code
|
| 90 |
-
theme = gr.themes.Soft()
|
| 91 |
-
|
| 92 |
-
# issue get request for gpt 3.5
|
| 93 |
-
gpt_pipeline = pipeline(task="text2text-generation", model="gpt2")
|
| 94 |
-
#vicuna7b_pipeline = pipeline(task="text2text-generation", model="lmsys/vicuna-7b-v1.3")
|
| 95 |
-
#llama7b_pipeline = pipeline(task="text2text-generation", model="./llama/hf/7B")
|
| 96 |
-
|
| 97 |
-
# Dropdown options for model and task
|
| 98 |
-
model_options = list(model_mapping.keys())
|
| 99 |
-
task_options = ['POS', 'Chunking'] # remove parsing
|
| 100 |
-
|
| 101 |
-
|
| 102 |
-
# Function to process text based on model and task
|
| 103 |
-
def process_text(tab, text):
|
| 104 |
-
if tab == 'POS Tab':
|
| 105 |
-
strategy1_format = template_all.format(text)
|
| 106 |
-
strategy2_format = prompt2_pos.format(text)
|
| 107 |
-
strategy3_format = demon_pos
|
| 108 |
-
|
| 109 |
-
vicuna_result1 = gpt_pipeline(strategy1_format)[0]['generated_text']
|
| 110 |
-
vicuna_result2 = gpt_pipeline(strategy2_format)[0]['generated_text']
|
| 111 |
-
vicuna_result3 = gpt_pipeline(strategy3_format)[0]['generated_text']
|
| 112 |
-
|
| 113 |
-
return (vicuna_result1, vicuna_result2, vicuna_result3)
|
| 114 |
-
elif tab == 'Chunk Tab':
|
| 115 |
-
strategy1_format = template_all.format(text)
|
| 116 |
-
strategy2_format = prompt2_chunk.format(text)
|
| 117 |
-
strategy3_format = demon_chunk
|
| 118 |
-
|
| 119 |
-
result1 = gpt_pipeline(strategy1_format)[0]['generated_text']
|
| 120 |
-
result2 = gpt_pipeline(strategy2_format)[0]['generated_text']
|
| 121 |
-
result3 = gpt_pipeline(strategy3_format)[0]['generated_text']
|
| 122 |
-
return (result1, result2, result3)
|
| 123 |
-
|
| 124 |
-
# Gradio interface
|
| 125 |
-
with demo:
|
| 126 |
-
gr.Markdown("# LLM Evaluator With Linguistic Scrutiny")
|
| 127 |
-
|
| 128 |
-
with gr.Tabs():
|
| 129 |
-
with gr.TabItem("POS", id="POS Tab"):
|
| 130 |
-
with gr.Row():
|
| 131 |
-
gr.Markdown("<center>Vicuna 7b</center>")
|
| 132 |
-
gr.Markdown("<center> LLaMA-7b </center>")
|
| 133 |
-
gr.Markdown("<center> GPT 3.5 </center>")
|
| 134 |
-
with gr.Row():
|
| 135 |
-
model1_S1_output = gr.Textbox(label="Strategy 1 QA")
|
| 136 |
-
model2_S1_output = gr.Textbox(label=".")
|
| 137 |
-
model3_S1_output = gr.Textbox(label=".")
|
| 138 |
-
with gr.Row():
|
| 139 |
-
model1_S2_output = gr.Textbox(label="Strategy 2 Instruction")
|
| 140 |
-
model2_S2_output = gr.Textbox(label=".")
|
| 141 |
-
model3_S2_output = gr.Textbox(label=".")
|
| 142 |
-
with gr.Row():
|
| 143 |
-
model1_S3_output = gr.Textbox(label="Strategy 3 Structured Prompting")
|
| 144 |
-
model2_S3_output = gr.Textbox(label=".")
|
| 145 |
-
model3_S3_output = gr.Textbox(label=".")
|
| 146 |
-
with gr.Row():
|
| 147 |
-
prompt_POS = gr.Textbox(show_label=False, placeholder="Enter prompt")
|
| 148 |
-
send_button_POS = gr.Button("Send", scale=0)
|
| 149 |
-
|
| 150 |
-
with gr.TabItem("Chunking", id="Chunk Tab"):
|
| 151 |
-
with gr.Row():
|
| 152 |
-
gr.Markdown("<center>Vicuna 7b</center>")
|
| 153 |
-
gr.Markdown("<center> LLaMA-7b </center>")
|
| 154 |
-
gr.Markdown("<center> GPT 3.5 </center>")
|
| 155 |
-
with gr.Row():
|
| 156 |
-
model1_S1_output = gr.Textbox(label="Strategy 1 QA")
|
| 157 |
-
model2_S1_output = gr.Textbox(label=".")
|
| 158 |
-
model3_S1_output = gr.Textbox(label=".")
|
| 159 |
-
with gr.Row():
|
| 160 |
-
model1_S2_output = gr.Textbox(label="Strategy 2 Instruction")
|
| 161 |
-
model2_S2_output = gr.Textbox(label=".")
|
| 162 |
-
model3_S2_output = gr.Textbox(label=".")
|
| 163 |
-
with gr.Row():
|
| 164 |
-
model1_S3_output = gr.Textbox(label="Strategy 3 Structured Prompting")
|
| 165 |
-
model2_S3_output = gr.Textbox(label=".")
|
| 166 |
-
model3_S3_output = gr.Textbox(label=".")
|
| 167 |
-
with gr.Row():
|
| 168 |
-
prompt_Chunk = gr.Textbox(id="prompt_Chunk", show_label=False, placeholder="Enter prompt")
|
| 169 |
-
send_button_Chunk = gr.Button("Send", scale=0)
|
| 170 |
-
|
| 171 |
-
send_button_POS.click(process_text, inputs=["POS Tab", prompt_Chunk], outputs=[model1_S1_output, model1_S1_output, model1_S1_output])
|
| 172 |
-
send_button_Chunk.click(process_text, inputs=["Chunk Tab", prompt_POS], outputs=[model1_S1_output, model1_S1_output, model1_S1_output])
|
| 173 |
-
|
| 174 |
-
demo.launch()
|
| 175 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
run_llm2.py
DELETED
|
@@ -1,147 +0,0 @@
|
|
| 1 |
-
import os
|
| 2 |
-
import sys
|
| 3 |
-
import json
|
| 4 |
-
import time
|
| 5 |
-
import openai
|
| 6 |
-
import pickle
|
| 7 |
-
import argparse
|
| 8 |
-
import requests
|
| 9 |
-
from tqdm import tqdm
|
| 10 |
-
import torch
|
| 11 |
-
from transformers import AutoTokenizer, AutoModelForCausalLM, LlamaForCausalLM, LlamaTokenizer
|
| 12 |
-
|
| 13 |
-
from fastchat.model import load_model, get_conversation_template, add_model_args
|
| 14 |
-
|
| 15 |
-
from nltk.tag.mapping import _UNIVERSAL_TAGS
|
| 16 |
-
|
| 17 |
-
import gradio as gr
|
| 18 |
-
from transformers import pipeline
|
| 19 |
-
|
| 20 |
-
uni_tags = list(_UNIVERSAL_TAGS)
|
| 21 |
-
uni_tags[-1] = 'PUNC'
|
| 22 |
-
|
| 23 |
-
bio_tags = ['B', 'I', 'O']
|
| 24 |
-
chunk_tags = ['ADJP', 'ADVP', 'CONJP', 'INTJ', 'LST', 'NP', 'O', 'PP', 'PRT', 'SBAR', 'UCP', 'VP']
|
| 25 |
-
|
| 26 |
-
syntags = ['NP', 'S', 'VP', 'ADJP', 'ADVP', 'SBAR', 'TOP', 'PP', 'POS', 'NAC', "''", 'SINV', 'PRN', 'QP', 'WHNP', 'RB', 'FRAG',
|
| 27 |
-
'WHADVP', 'NX', 'PRT', 'VBZ', 'VBP', 'MD', 'NN', 'WHPP', 'SQ', 'SBARQ', 'LST', 'INTJ', 'X', 'UCP', 'CONJP', 'NNP', 'CD', 'JJ',
|
| 28 |
-
'VBD', 'WHADJP', 'PRP', 'RRC', 'NNS', 'SYM', 'CC']
|
| 29 |
-
|
| 30 |
-
openai.api_key = " "
|
| 31 |
-
|
| 32 |
-
# determinant vs. determiner
|
| 33 |
-
# https://wikidiff.com/determiner/determinant
|
| 34 |
-
ents_prompt = ['Noun','Verb','Adjective','Adverb','Preposition/Subord','Coordinating Conjunction',# 'Cardinal Number',
|
| 35 |
-
'Determiner',
|
| 36 |
-
'Noun Phrase','Verb Phrase','Adjective Phrase','Adverb Phrase','Preposition Phrase','Conjunction Phrase','Coordinate Phrase','Quantitave Phrase','Complex Nominal',
|
| 37 |
-
'Clause','Dependent Clause','Fragment Clause','T-unit','Complex T-unit',# 'Fragment T-unit',
|
| 38 |
-
][7:]
|
| 39 |
-
ents = ['NN', 'VB', 'JJ', 'RB', 'IN', 'CC', 'DT', 'NP', 'VP', 'ADJP', 'ADVP', 'PP', 'CONJP', 'CP', 'QP', 'CN', 'C', 'DC', 'FC', 'T', 'CT'][7:]
|
| 40 |
-
|
| 41 |
-
|
| 42 |
-
ents_prompt_uni_tags = ['Verb', 'Noun', 'Pronoun', 'Adjective', 'Adverb', 'Preposition and Postposition', 'Coordinating Conjunction',
|
| 43 |
-
'Determiner', 'Cardinal Number', 'Particles or other function words',
|
| 44 |
-
'Words that cannot be assigned a POS tag', 'Punctuation']
|
| 45 |
-
|
| 46 |
-
ents = uni_tags + ents
|
| 47 |
-
ents_prompt = ents_prompt_uni_tags + ents_prompt
|
| 48 |
-
|
| 49 |
-
for i, j in zip(ents, ents_prompt):
|
| 50 |
-
print(i, j)
|
| 51 |
-
|
| 52 |
-
model_mapping = {
|
| 53 |
-
'gpt3.5': 'gpt2',
|
| 54 |
-
#'vicuna-7b': 'lmsys/vicuna-7b-v1.3',
|
| 55 |
-
#'llama-7b': './llama/hf/7B',
|
| 56 |
-
}
|
| 57 |
-
|
| 58 |
-
with open('sample_uniform_1k_2.txt', 'r') as f:
|
| 59 |
-
selected_idx = f.readlines()
|
| 60 |
-
selected_idx = [int(i.strip()) for i in selected_idx]#[s:e]
|
| 61 |
-
|
| 62 |
-
ptb = []
|
| 63 |
-
with open('ptb.jsonl', 'r') as f:
|
| 64 |
-
for l in f:
|
| 65 |
-
ptb.append(json.loads(l))
|
| 66 |
-
|
| 67 |
-
|
| 68 |
-
## Prompt 1
|
| 69 |
-
template_all = '''Please output the <Noun, Verb, Adjective, Adverb, Preposition/Subord, Coordinating Conjunction, Cardinal Number, Determiner, Noun Phrase, Verb Phrase, Adjective Phrase, Adverb Phrase, Preposition Phrase, Conjunction Phrase, Coordinate Phrase, Quantitave Phrase, Complex Nominal, Clause, Dependent Clause, Fragment Clause, T-unit, Complex T-unit, Fragment T-unit> in the following sentence without any additional text in json format: "{}"'''
|
| 70 |
-
template_single = '''Please output any <{}> in the following sentence one per line without any additional text: "{}"'''
|
| 71 |
-
|
| 72 |
-
## Prompt 2
|
| 73 |
-
prompt2_pos = '''Please pos tag the following sentence using Universal POS tag set without generating any additional text: {}'''
|
| 74 |
-
prompt2_chunk = '''Please do sentence chunking for the following sentence as in CoNLL 2000 shared task without generating any addtional text: {}'''
|
| 75 |
-
prompt2_parse = '''Generate textual representation of the constituency parse tree of the following sentence using Penn TreeBank tag set without outputing any additional text: {}'''
|
| 76 |
-
|
| 77 |
-
prompt2_chunk = '''Please chunk the following sentence in CoNLL 2000 format with BIO tags without outputing any additional text: {}'''
|
| 78 |
-
|
| 79 |
-
## Prompt 3
|
| 80 |
-
with open('demonstration_3_42_pos.txt', 'r') as f:
|
| 81 |
-
demon_pos = f.read()
|
| 82 |
-
with open('demonstration_3_42_chunk.txt', 'r') as f:
|
| 83 |
-
demon_chunk = f.read()
|
| 84 |
-
with open('demonstration_3_42_parse.txt', 'r') as f:
|
| 85 |
-
demon_parse = f.read()
|
| 86 |
-
|
| 87 |
-
# Your existing code
|
| 88 |
-
theme = gr.themes.Soft()
|
| 89 |
-
|
| 90 |
-
# issue get request for gpt 3.5
|
| 91 |
-
gpt_pipeline = pipeline(task="text2text-generation", model="gpt2")
|
| 92 |
-
#vicuna7b_pipeline = pipeline(task="text2text-generation", model="lmsys/vicuna-7b-v1.3")
|
| 93 |
-
#llama7b_pipeline = pipeline(task="text2text-generation", model="./llama/hf/7B")
|
| 94 |
-
|
| 95 |
-
# Dropdown options for model and task
|
| 96 |
-
model_options = list(model_mapping.keys())
|
| 97 |
-
task_options = ['POS', 'Chunking'] # remove parsing
|
| 98 |
-
|
| 99 |
-
|
| 100 |
-
# Function to process text based on model and task
|
| 101 |
-
def process_text(model_name, task, text):
|
| 102 |
-
gid_list = selected_idx[0:20]
|
| 103 |
-
|
| 104 |
-
for gid in tqdm(gid_list, desc='Query'):
|
| 105 |
-
text = ptb[gid]['text']
|
| 106 |
-
|
| 107 |
-
if model_name == 'vicuna-7b':
|
| 108 |
-
if task == 'POS':
|
| 109 |
-
strategy1_format = template_all.format(text)
|
| 110 |
-
strategy2_format = prompt2_pos.format(text)
|
| 111 |
-
strategy3_format = demon_pos
|
| 112 |
-
|
| 113 |
-
result1 = gpt_pipeline(strategy1_format)[0]['generated_text']
|
| 114 |
-
result2 = gpt_pipeline(strategy2_format)[0]['generated_text']
|
| 115 |
-
result3 = gpt_pipeline(strategy3_format)[0]['generated_text']
|
| 116 |
-
return (result1, result2, result3)
|
| 117 |
-
elif task == 'Chunking':
|
| 118 |
-
strategy1_format = template_all.format(text)
|
| 119 |
-
strategy2_format = prompt2_chunk.format(text)
|
| 120 |
-
strategy3_format = demon_chunk
|
| 121 |
-
|
| 122 |
-
result1 = gpt_pipeline(strategy1_format)[0]['generated_text']
|
| 123 |
-
result2 = gpt_pipeline(strategy2_format)[0]['generated_text']
|
| 124 |
-
result3 = gpt_pipeline(strategy3_format)[0]['generated_text']
|
| 125 |
-
return (result1, result2, result3)
|
| 126 |
-
|
| 127 |
-
# Gradio interface
|
| 128 |
-
iface = gr.Interface(
|
| 129 |
-
fn=process_text,
|
| 130 |
-
inputs=[
|
| 131 |
-
gr.Dropdown(model_options, label="Select Model"),
|
| 132 |
-
gr.Dropdown(task_options, label="Select Task"),
|
| 133 |
-
gr.Textbox(label="Input Text", placeholder="Enter the text to process..."),
|
| 134 |
-
],
|
| 135 |
-
outputs=[
|
| 136 |
-
gr.Textbox(label="Strategy 1 QA Result"),
|
| 137 |
-
gr.Textbox(label="Strategy 2 Instruction Result"),
|
| 138 |
-
gr.Textbox(label="Strategy 3 Structured Prompting Result"),
|
| 139 |
-
],
|
| 140 |
-
title = "LLM Evaluator For Linguistic Scrutiny",
|
| 141 |
-
theme = theme,
|
| 142 |
-
live=False,
|
| 143 |
-
)
|
| 144 |
-
|
| 145 |
-
iface.launch()
|
| 146 |
-
|
| 147 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
sample_uniform_1k_2.txt
DELETED
|
@@ -1,1000 +0,0 @@
|
|
| 1 |
-
22
|
| 2 |
-
40
|
| 3 |
-
199
|
| 4 |
-
444
|
| 5 |
-
457
|
| 6 |
-
669
|
| 7 |
-
679
|
| 8 |
-
1215
|
| 9 |
-
1412
|
| 10 |
-
1423
|
| 11 |
-
1429
|
| 12 |
-
1460
|
| 13 |
-
1491
|
| 14 |
-
1647
|
| 15 |
-
1671
|
| 16 |
-
1740
|
| 17 |
-
1755
|
| 18 |
-
1779
|
| 19 |
-
1823
|
| 20 |
-
1878
|
| 21 |
-
2036
|
| 22 |
-
2145
|
| 23 |
-
2161
|
| 24 |
-
2185
|
| 25 |
-
2219
|
| 26 |
-
2243
|
| 27 |
-
2328
|
| 28 |
-
2352
|
| 29 |
-
2615
|
| 30 |
-
2657
|
| 31 |
-
2666
|
| 32 |
-
2713
|
| 33 |
-
2870
|
| 34 |
-
2884
|
| 35 |
-
2909
|
| 36 |
-
2915
|
| 37 |
-
2936
|
| 38 |
-
3005
|
| 39 |
-
3065
|
| 40 |
-
3095
|
| 41 |
-
3194
|
| 42 |
-
3251
|
| 43 |
-
3375
|
| 44 |
-
3404
|
| 45 |
-
3679
|
| 46 |
-
3864
|
| 47 |
-
3904
|
| 48 |
-
4016
|
| 49 |
-
4041
|
| 50 |
-
4045
|
| 51 |
-
4064
|
| 52 |
-
4068
|
| 53 |
-
4070
|
| 54 |
-
4072
|
| 55 |
-
4079
|
| 56 |
-
4090
|
| 57 |
-
4134
|
| 58 |
-
4425
|
| 59 |
-
4450
|
| 60 |
-
4455
|
| 61 |
-
4457
|
| 62 |
-
4464
|
| 63 |
-
4523
|
| 64 |
-
4524
|
| 65 |
-
4541
|
| 66 |
-
4550
|
| 67 |
-
4575
|
| 68 |
-
4614
|
| 69 |
-
4634
|
| 70 |
-
4715
|
| 71 |
-
5013
|
| 72 |
-
5015
|
| 73 |
-
5021
|
| 74 |
-
5029
|
| 75 |
-
5040
|
| 76 |
-
5057
|
| 77 |
-
5070
|
| 78 |
-
5121
|
| 79 |
-
5250
|
| 80 |
-
5257
|
| 81 |
-
5328
|
| 82 |
-
5338
|
| 83 |
-
5351
|
| 84 |
-
5415
|
| 85 |
-
5497
|
| 86 |
-
5501
|
| 87 |
-
5536
|
| 88 |
-
5576
|
| 89 |
-
5599
|
| 90 |
-
5617
|
| 91 |
-
5641
|
| 92 |
-
5683
|
| 93 |
-
5702
|
| 94 |
-
5709
|
| 95 |
-
5711
|
| 96 |
-
5716
|
| 97 |
-
5771
|
| 98 |
-
5877
|
| 99 |
-
5888
|
| 100 |
-
5899
|
| 101 |
-
5921
|
| 102 |
-
5945
|
| 103 |
-
5991
|
| 104 |
-
6059
|
| 105 |
-
6065
|
| 106 |
-
6066
|
| 107 |
-
6087
|
| 108 |
-
6096
|
| 109 |
-
6114
|
| 110 |
-
6256
|
| 111 |
-
6265
|
| 112 |
-
6330
|
| 113 |
-
6520
|
| 114 |
-
6633
|
| 115 |
-
6943
|
| 116 |
-
7051
|
| 117 |
-
7108
|
| 118 |
-
7526
|
| 119 |
-
7681
|
| 120 |
-
7748
|
| 121 |
-
7763
|
| 122 |
-
7768
|
| 123 |
-
7778
|
| 124 |
-
7878
|
| 125 |
-
7906
|
| 126 |
-
7936
|
| 127 |
-
7975
|
| 128 |
-
8035
|
| 129 |
-
8185
|
| 130 |
-
8431
|
| 131 |
-
8942
|
| 132 |
-
9070
|
| 133 |
-
9212
|
| 134 |
-
9584
|
| 135 |
-
9615
|
| 136 |
-
9626
|
| 137 |
-
9631
|
| 138 |
-
9709
|
| 139 |
-
9711
|
| 140 |
-
9722
|
| 141 |
-
9870
|
| 142 |
-
9874
|
| 143 |
-
9878
|
| 144 |
-
9942
|
| 145 |
-
9946
|
| 146 |
-
9947
|
| 147 |
-
9958
|
| 148 |
-
9973
|
| 149 |
-
9987
|
| 150 |
-
10017
|
| 151 |
-
10030
|
| 152 |
-
10244
|
| 153 |
-
10250
|
| 154 |
-
10284
|
| 155 |
-
10333
|
| 156 |
-
10438
|
| 157 |
-
10645
|
| 158 |
-
10647
|
| 159 |
-
10888
|
| 160 |
-
10995
|
| 161 |
-
11083
|
| 162 |
-
11220
|
| 163 |
-
11349
|
| 164 |
-
11416
|
| 165 |
-
11491
|
| 166 |
-
11498
|
| 167 |
-
11630
|
| 168 |
-
11874
|
| 169 |
-
11889
|
| 170 |
-
11972
|
| 171 |
-
12296
|
| 172 |
-
12369
|
| 173 |
-
12382
|
| 174 |
-
12504
|
| 175 |
-
12580
|
| 176 |
-
12606
|
| 177 |
-
12611
|
| 178 |
-
12623
|
| 179 |
-
12823
|
| 180 |
-
12884
|
| 181 |
-
13128
|
| 182 |
-
13145
|
| 183 |
-
13339
|
| 184 |
-
13358
|
| 185 |
-
13368
|
| 186 |
-
13689
|
| 187 |
-
13709
|
| 188 |
-
13839
|
| 189 |
-
14041
|
| 190 |
-
14042
|
| 191 |
-
14130
|
| 192 |
-
14475
|
| 193 |
-
14946
|
| 194 |
-
15614
|
| 195 |
-
15706
|
| 196 |
-
15863
|
| 197 |
-
16111
|
| 198 |
-
16219
|
| 199 |
-
16658
|
| 200 |
-
17367
|
| 201 |
-
17382
|
| 202 |
-
17388
|
| 203 |
-
17584
|
| 204 |
-
17705
|
| 205 |
-
17858
|
| 206 |
-
17996
|
| 207 |
-
18440
|
| 208 |
-
18839
|
| 209 |
-
19022
|
| 210 |
-
19324
|
| 211 |
-
19378
|
| 212 |
-
19459
|
| 213 |
-
19467
|
| 214 |
-
19475
|
| 215 |
-
19531
|
| 216 |
-
19543
|
| 217 |
-
19612
|
| 218 |
-
19633
|
| 219 |
-
19634
|
| 220 |
-
19640
|
| 221 |
-
19666
|
| 222 |
-
19701
|
| 223 |
-
19722
|
| 224 |
-
19780
|
| 225 |
-
19807
|
| 226 |
-
19836
|
| 227 |
-
19982
|
| 228 |
-
20425
|
| 229 |
-
20430
|
| 230 |
-
20433
|
| 231 |
-
20447
|
| 232 |
-
20448
|
| 233 |
-
20451
|
| 234 |
-
20577
|
| 235 |
-
20624
|
| 236 |
-
20661
|
| 237 |
-
20878
|
| 238 |
-
20930
|
| 239 |
-
21463
|
| 240 |
-
21487
|
| 241 |
-
21520
|
| 242 |
-
21533
|
| 243 |
-
21575
|
| 244 |
-
21593
|
| 245 |
-
21692
|
| 246 |
-
21811
|
| 247 |
-
21864
|
| 248 |
-
22191
|
| 249 |
-
22195
|
| 250 |
-
22209
|
| 251 |
-
22216
|
| 252 |
-
22447
|
| 253 |
-
22617
|
| 254 |
-
22700
|
| 255 |
-
22718
|
| 256 |
-
22720
|
| 257 |
-
22758
|
| 258 |
-
22948
|
| 259 |
-
23059
|
| 260 |
-
23357
|
| 261 |
-
23415
|
| 262 |
-
23460
|
| 263 |
-
23471
|
| 264 |
-
23479
|
| 265 |
-
23483
|
| 266 |
-
23498
|
| 267 |
-
23501
|
| 268 |
-
23506
|
| 269 |
-
23517
|
| 270 |
-
23524
|
| 271 |
-
23615
|
| 272 |
-
24121
|
| 273 |
-
24178
|
| 274 |
-
24241
|
| 275 |
-
25189
|
| 276 |
-
25444
|
| 277 |
-
25691
|
| 278 |
-
25970
|
| 279 |
-
26117
|
| 280 |
-
26379
|
| 281 |
-
26440
|
| 282 |
-
26531
|
| 283 |
-
27037
|
| 284 |
-
27873
|
| 285 |
-
28995
|
| 286 |
-
30674
|
| 287 |
-
30682
|
| 288 |
-
32767
|
| 289 |
-
33573
|
| 290 |
-
34496
|
| 291 |
-
35035
|
| 292 |
-
35128
|
| 293 |
-
35214
|
| 294 |
-
36123
|
| 295 |
-
37616
|
| 296 |
-
37885
|
| 297 |
-
37886
|
| 298 |
-
38014
|
| 299 |
-
38348
|
| 300 |
-
38972
|
| 301 |
-
39029
|
| 302 |
-
40054
|
| 303 |
-
42861
|
| 304 |
-
43518
|
| 305 |
-
43727
|
| 306 |
-
44175
|
| 307 |
-
44388
|
| 308 |
-
45668
|
| 309 |
-
46366
|
| 310 |
-
46700
|
| 311 |
-
47791
|
| 312 |
-
49203
|
| 313 |
-
49378
|
| 314 |
-
50205
|
| 315 |
-
50859
|
| 316 |
-
51993
|
| 317 |
-
52118
|
| 318 |
-
52276
|
| 319 |
-
52443
|
| 320 |
-
52461
|
| 321 |
-
52539
|
| 322 |
-
52571
|
| 323 |
-
53484
|
| 324 |
-
54161
|
| 325 |
-
55357
|
| 326 |
-
55376
|
| 327 |
-
55585
|
| 328 |
-
58690
|
| 329 |
-
58714
|
| 330 |
-
59806
|
| 331 |
-
60070
|
| 332 |
-
60217
|
| 333 |
-
61200
|
| 334 |
-
61839
|
| 335 |
-
63596
|
| 336 |
-
64306
|
| 337 |
-
64719
|
| 338 |
-
65841
|
| 339 |
-
66221
|
| 340 |
-
66620
|
| 341 |
-
66669
|
| 342 |
-
66865
|
| 343 |
-
67195
|
| 344 |
-
68639
|
| 345 |
-
68808
|
| 346 |
-
68959
|
| 347 |
-
69099
|
| 348 |
-
69215
|
| 349 |
-
69337
|
| 350 |
-
72196
|
| 351 |
-
72238
|
| 352 |
-
72802
|
| 353 |
-
73555
|
| 354 |
-
74922
|
| 355 |
-
75248
|
| 356 |
-
76084
|
| 357 |
-
76487
|
| 358 |
-
76817
|
| 359 |
-
76893
|
| 360 |
-
77930
|
| 361 |
-
79266
|
| 362 |
-
79520
|
| 363 |
-
80210
|
| 364 |
-
81184
|
| 365 |
-
81294
|
| 366 |
-
81644
|
| 367 |
-
82214
|
| 368 |
-
82518
|
| 369 |
-
82537
|
| 370 |
-
82604
|
| 371 |
-
83578
|
| 372 |
-
83794
|
| 373 |
-
83804
|
| 374 |
-
83883
|
| 375 |
-
84330
|
| 376 |
-
85286
|
| 377 |
-
85639
|
| 378 |
-
86281
|
| 379 |
-
86453
|
| 380 |
-
87624
|
| 381 |
-
87732
|
| 382 |
-
87804
|
| 383 |
-
87822
|
| 384 |
-
87933
|
| 385 |
-
88638
|
| 386 |
-
90076
|
| 387 |
-
90481
|
| 388 |
-
90665
|
| 389 |
-
90683
|
| 390 |
-
90945
|
| 391 |
-
91665
|
| 392 |
-
92617
|
| 393 |
-
93512
|
| 394 |
-
94615
|
| 395 |
-
94806
|
| 396 |
-
94813
|
| 397 |
-
96676
|
| 398 |
-
97060
|
| 399 |
-
97291
|
| 400 |
-
97582
|
| 401 |
-
98335
|
| 402 |
-
98557
|
| 403 |
-
98825
|
| 404 |
-
100249
|
| 405 |
-
100411
|
| 406 |
-
100434
|
| 407 |
-
100922
|
| 408 |
-
101125
|
| 409 |
-
101527
|
| 410 |
-
103116
|
| 411 |
-
104391
|
| 412 |
-
104793
|
| 413 |
-
105808
|
| 414 |
-
105975
|
| 415 |
-
106681
|
| 416 |
-
107229
|
| 417 |
-
107334
|
| 418 |
-
107552
|
| 419 |
-
107934
|
| 420 |
-
108060
|
| 421 |
-
108098
|
| 422 |
-
108106
|
| 423 |
-
110030
|
| 424 |
-
110065
|
| 425 |
-
110563
|
| 426 |
-
111079
|
| 427 |
-
111667
|
| 428 |
-
111671
|
| 429 |
-
111957
|
| 430 |
-
112351
|
| 431 |
-
112544
|
| 432 |
-
114246
|
| 433 |
-
114249
|
| 434 |
-
115279
|
| 435 |
-
115933
|
| 436 |
-
115938
|
| 437 |
-
116006
|
| 438 |
-
116122
|
| 439 |
-
117380
|
| 440 |
-
118649
|
| 441 |
-
118675
|
| 442 |
-
118864
|
| 443 |
-
119478
|
| 444 |
-
119561
|
| 445 |
-
119682
|
| 446 |
-
119808
|
| 447 |
-
120075
|
| 448 |
-
120176
|
| 449 |
-
120750
|
| 450 |
-
121706
|
| 451 |
-
122975
|
| 452 |
-
123612
|
| 453 |
-
124509
|
| 454 |
-
124814
|
| 455 |
-
125632
|
| 456 |
-
125889
|
| 457 |
-
125949
|
| 458 |
-
126100
|
| 459 |
-
126676
|
| 460 |
-
127339
|
| 461 |
-
127504
|
| 462 |
-
127655
|
| 463 |
-
127728
|
| 464 |
-
127753
|
| 465 |
-
127857
|
| 466 |
-
128152
|
| 467 |
-
128476
|
| 468 |
-
128738
|
| 469 |
-
129016
|
| 470 |
-
129211
|
| 471 |
-
129244
|
| 472 |
-
129469
|
| 473 |
-
130186
|
| 474 |
-
130221
|
| 475 |
-
130852
|
| 476 |
-
130991
|
| 477 |
-
131039
|
| 478 |
-
131466
|
| 479 |
-
131763
|
| 480 |
-
133157
|
| 481 |
-
133548
|
| 482 |
-
133661
|
| 483 |
-
134168
|
| 484 |
-
134206
|
| 485 |
-
134752
|
| 486 |
-
135454
|
| 487 |
-
137001
|
| 488 |
-
138509
|
| 489 |
-
138980
|
| 490 |
-
139058
|
| 491 |
-
139922
|
| 492 |
-
139967
|
| 493 |
-
140086
|
| 494 |
-
140103
|
| 495 |
-
140186
|
| 496 |
-
141180
|
| 497 |
-
141629
|
| 498 |
-
141664
|
| 499 |
-
142655
|
| 500 |
-
143450
|
| 501 |
-
143592
|
| 502 |
-
143709
|
| 503 |
-
144382
|
| 504 |
-
146718
|
| 505 |
-
148280
|
| 506 |
-
148723
|
| 507 |
-
151351
|
| 508 |
-
151759
|
| 509 |
-
151778
|
| 510 |
-
152285
|
| 511 |
-
152453
|
| 512 |
-
152585
|
| 513 |
-
152990
|
| 514 |
-
153834
|
| 515 |
-
153966
|
| 516 |
-
154283
|
| 517 |
-
154448
|
| 518 |
-
154788
|
| 519 |
-
155021
|
| 520 |
-
155296
|
| 521 |
-
156949
|
| 522 |
-
156954
|
| 523 |
-
157620
|
| 524 |
-
158390
|
| 525 |
-
158963
|
| 526 |
-
160769
|
| 527 |
-
161380
|
| 528 |
-
161660
|
| 529 |
-
162673
|
| 530 |
-
163005
|
| 531 |
-
163016
|
| 532 |
-
163596
|
| 533 |
-
164120
|
| 534 |
-
164152
|
| 535 |
-
164330
|
| 536 |
-
164925
|
| 537 |
-
166595
|
| 538 |
-
166660
|
| 539 |
-
166683
|
| 540 |
-
166768
|
| 541 |
-
168438
|
| 542 |
-
169072
|
| 543 |
-
169133
|
| 544 |
-
170444
|
| 545 |
-
171462
|
| 546 |
-
172071
|
| 547 |
-
172473
|
| 548 |
-
173397
|
| 549 |
-
173564
|
| 550 |
-
174306
|
| 551 |
-
174802
|
| 552 |
-
175695
|
| 553 |
-
175912
|
| 554 |
-
176612
|
| 555 |
-
176740
|
| 556 |
-
177250
|
| 557 |
-
177265
|
| 558 |
-
177696
|
| 559 |
-
177885
|
| 560 |
-
177903
|
| 561 |
-
178320
|
| 562 |
-
179224
|
| 563 |
-
179337
|
| 564 |
-
179437
|
| 565 |
-
179444
|
| 566 |
-
179481
|
| 567 |
-
179994
|
| 568 |
-
180430
|
| 569 |
-
180452
|
| 570 |
-
180709
|
| 571 |
-
181259
|
| 572 |
-
181270
|
| 573 |
-
181292
|
| 574 |
-
181421
|
| 575 |
-
181904
|
| 576 |
-
181910
|
| 577 |
-
181915
|
| 578 |
-
181925
|
| 579 |
-
182069
|
| 580 |
-
182260
|
| 581 |
-
182474
|
| 582 |
-
182518
|
| 583 |
-
183209
|
| 584 |
-
183462
|
| 585 |
-
184125
|
| 586 |
-
184260
|
| 587 |
-
184413
|
| 588 |
-
185253
|
| 589 |
-
186629
|
| 590 |
-
186898
|
| 591 |
-
188504
|
| 592 |
-
190084
|
| 593 |
-
190127
|
| 594 |
-
190328
|
| 595 |
-
190481
|
| 596 |
-
191207
|
| 597 |
-
191444
|
| 598 |
-
192882
|
| 599 |
-
193144
|
| 600 |
-
193519
|
| 601 |
-
193641
|
| 602 |
-
193671
|
| 603 |
-
193683
|
| 604 |
-
193772
|
| 605 |
-
194206
|
| 606 |
-
194249
|
| 607 |
-
194554
|
| 608 |
-
195315
|
| 609 |
-
195553
|
| 610 |
-
195858
|
| 611 |
-
196421
|
| 612 |
-
196853
|
| 613 |
-
197746
|
| 614 |
-
198738
|
| 615 |
-
198744
|
| 616 |
-
199692
|
| 617 |
-
199929
|
| 618 |
-
199976
|
| 619 |
-
200003
|
| 620 |
-
200223
|
| 621 |
-
200250
|
| 622 |
-
200828
|
| 623 |
-
201263
|
| 624 |
-
201276
|
| 625 |
-
201645
|
| 626 |
-
202167
|
| 627 |
-
202214
|
| 628 |
-
202741
|
| 629 |
-
203324
|
| 630 |
-
203419
|
| 631 |
-
203819
|
| 632 |
-
203844
|
| 633 |
-
203901
|
| 634 |
-
203975
|
| 635 |
-
204085
|
| 636 |
-
204116
|
| 637 |
-
204119
|
| 638 |
-
204429
|
| 639 |
-
204701
|
| 640 |
-
204797
|
| 641 |
-
205005
|
| 642 |
-
205831
|
| 643 |
-
205922
|
| 644 |
-
205974
|
| 645 |
-
206001
|
| 646 |
-
206545
|
| 647 |
-
206805
|
| 648 |
-
206842
|
| 649 |
-
206882
|
| 650 |
-
206922
|
| 651 |
-
207164
|
| 652 |
-
207174
|
| 653 |
-
207302
|
| 654 |
-
207424
|
| 655 |
-
207444
|
| 656 |
-
207829
|
| 657 |
-
207833
|
| 658 |
-
207847
|
| 659 |
-
208021
|
| 660 |
-
208149
|
| 661 |
-
208815
|
| 662 |
-
209067
|
| 663 |
-
209093
|
| 664 |
-
209274
|
| 665 |
-
209310
|
| 666 |
-
209508
|
| 667 |
-
209808
|
| 668 |
-
209899
|
| 669 |
-
209906
|
| 670 |
-
210013
|
| 671 |
-
210131
|
| 672 |
-
210146
|
| 673 |
-
210152
|
| 674 |
-
210341
|
| 675 |
-
210348
|
| 676 |
-
210351
|
| 677 |
-
210361
|
| 678 |
-
210629
|
| 679 |
-
210896
|
| 680 |
-
211021
|
| 681 |
-
211271
|
| 682 |
-
211363
|
| 683 |
-
211501
|
| 684 |
-
211517
|
| 685 |
-
211718
|
| 686 |
-
212002
|
| 687 |
-
212097
|
| 688 |
-
212120
|
| 689 |
-
212369
|
| 690 |
-
212458
|
| 691 |
-
212482
|
| 692 |
-
212503
|
| 693 |
-
212541
|
| 694 |
-
212596
|
| 695 |
-
212810
|
| 696 |
-
212881
|
| 697 |
-
213122
|
| 698 |
-
213338
|
| 699 |
-
213601
|
| 700 |
-
213738
|
| 701 |
-
213743
|
| 702 |
-
213861
|
| 703 |
-
213999
|
| 704 |
-
214063
|
| 705 |
-
214189
|
| 706 |
-
214252
|
| 707 |
-
214423
|
| 708 |
-
214623
|
| 709 |
-
214682
|
| 710 |
-
214720
|
| 711 |
-
214945
|
| 712 |
-
215080
|
| 713 |
-
215163
|
| 714 |
-
215306
|
| 715 |
-
215351
|
| 716 |
-
215370
|
| 717 |
-
215425
|
| 718 |
-
215516
|
| 719 |
-
215821
|
| 720 |
-
216035
|
| 721 |
-
216072
|
| 722 |
-
216340
|
| 723 |
-
216397
|
| 724 |
-
216485
|
| 725 |
-
216620
|
| 726 |
-
216864
|
| 727 |
-
217136
|
| 728 |
-
217183
|
| 729 |
-
217196
|
| 730 |
-
217240
|
| 731 |
-
217493
|
| 732 |
-
217687
|
| 733 |
-
217697
|
| 734 |
-
217718
|
| 735 |
-
218026
|
| 736 |
-
218191
|
| 737 |
-
218271
|
| 738 |
-
218341
|
| 739 |
-
218363
|
| 740 |
-
218563
|
| 741 |
-
218654
|
| 742 |
-
218830
|
| 743 |
-
218862
|
| 744 |
-
218879
|
| 745 |
-
219076
|
| 746 |
-
219241
|
| 747 |
-
219339
|
| 748 |
-
219369
|
| 749 |
-
219578
|
| 750 |
-
219724
|
| 751 |
-
219733
|
| 752 |
-
219770
|
| 753 |
-
219788
|
| 754 |
-
219870
|
| 755 |
-
219893
|
| 756 |
-
220572
|
| 757 |
-
220576
|
| 758 |
-
220596
|
| 759 |
-
220728
|
| 760 |
-
220956
|
| 761 |
-
221143
|
| 762 |
-
221203
|
| 763 |
-
222033
|
| 764 |
-
222043
|
| 765 |
-
222257
|
| 766 |
-
222320
|
| 767 |
-
222448
|
| 768 |
-
222906
|
| 769 |
-
223072
|
| 770 |
-
223109
|
| 771 |
-
223219
|
| 772 |
-
223526
|
| 773 |
-
223544
|
| 774 |
-
223605
|
| 775 |
-
223700
|
| 776 |
-
223755
|
| 777 |
-
223796
|
| 778 |
-
224043
|
| 779 |
-
224265
|
| 780 |
-
224268
|
| 781 |
-
224280
|
| 782 |
-
224304
|
| 783 |
-
224352
|
| 784 |
-
224454
|
| 785 |
-
224666
|
| 786 |
-
224709
|
| 787 |
-
224720
|
| 788 |
-
224741
|
| 789 |
-
224820
|
| 790 |
-
224962
|
| 791 |
-
225127
|
| 792 |
-
225265
|
| 793 |
-
225311
|
| 794 |
-
225363
|
| 795 |
-
225492
|
| 796 |
-
225536
|
| 797 |
-
225724
|
| 798 |
-
225750
|
| 799 |
-
225818
|
| 800 |
-
226026
|
| 801 |
-
226065
|
| 802 |
-
226124
|
| 803 |
-
226359
|
| 804 |
-
226570
|
| 805 |
-
226618
|
| 806 |
-
226930
|
| 807 |
-
227005
|
| 808 |
-
227108
|
| 809 |
-
227555
|
| 810 |
-
227779
|
| 811 |
-
227881
|
| 812 |
-
227892
|
| 813 |
-
228031
|
| 814 |
-
228267
|
| 815 |
-
228841
|
| 816 |
-
228981
|
| 817 |
-
229028
|
| 818 |
-
229114
|
| 819 |
-
229186
|
| 820 |
-
229393
|
| 821 |
-
229443
|
| 822 |
-
229478
|
| 823 |
-
229633
|
| 824 |
-
229722
|
| 825 |
-
229777
|
| 826 |
-
229812
|
| 827 |
-
230022
|
| 828 |
-
230214
|
| 829 |
-
230345
|
| 830 |
-
230351
|
| 831 |
-
230869
|
| 832 |
-
230888
|
| 833 |
-
230937
|
| 834 |
-
230987
|
| 835 |
-
231147
|
| 836 |
-
231352
|
| 837 |
-
231636
|
| 838 |
-
231820
|
| 839 |
-
231850
|
| 840 |
-
231852
|
| 841 |
-
232242
|
| 842 |
-
232543
|
| 843 |
-
232765
|
| 844 |
-
232921
|
| 845 |
-
232931
|
| 846 |
-
232977
|
| 847 |
-
232989
|
| 848 |
-
232990
|
| 849 |
-
233200
|
| 850 |
-
233478
|
| 851 |
-
233545
|
| 852 |
-
233762
|
| 853 |
-
233822
|
| 854 |
-
233881
|
| 855 |
-
233921
|
| 856 |
-
234026
|
| 857 |
-
234041
|
| 858 |
-
234061
|
| 859 |
-
234161
|
| 860 |
-
234333
|
| 861 |
-
234483
|
| 862 |
-
234640
|
| 863 |
-
234787
|
| 864 |
-
234826
|
| 865 |
-
235204
|
| 866 |
-
235214
|
| 867 |
-
235543
|
| 868 |
-
235785
|
| 869 |
-
235823
|
| 870 |
-
236025
|
| 871 |
-
236061
|
| 872 |
-
236070
|
| 873 |
-
236081
|
| 874 |
-
236102
|
| 875 |
-
236168
|
| 876 |
-
236252
|
| 877 |
-
236282
|
| 878 |
-
236406
|
| 879 |
-
236681
|
| 880 |
-
236810
|
| 881 |
-
236855
|
| 882 |
-
236919
|
| 883 |
-
237247
|
| 884 |
-
237476
|
| 885 |
-
237737
|
| 886 |
-
237850
|
| 887 |
-
237896
|
| 888 |
-
237939
|
| 889 |
-
237960
|
| 890 |
-
237961
|
| 891 |
-
237969
|
| 892 |
-
238043
|
| 893 |
-
238306
|
| 894 |
-
238393
|
| 895 |
-
238476
|
| 896 |
-
238790
|
| 897 |
-
238840
|
| 898 |
-
238878
|
| 899 |
-
239022
|
| 900 |
-
239061
|
| 901 |
-
239062
|
| 902 |
-
239217
|
| 903 |
-
239277
|
| 904 |
-
239282
|
| 905 |
-
239473
|
| 906 |
-
239666
|
| 907 |
-
240313
|
| 908 |
-
240505
|
| 909 |
-
240607
|
| 910 |
-
240617
|
| 911 |
-
240649
|
| 912 |
-
240703
|
| 913 |
-
240769
|
| 914 |
-
240791
|
| 915 |
-
240864
|
| 916 |
-
240911
|
| 917 |
-
241137
|
| 918 |
-
241288
|
| 919 |
-
241462
|
| 920 |
-
241610
|
| 921 |
-
241661
|
| 922 |
-
241825
|
| 923 |
-
242047
|
| 924 |
-
242066
|
| 925 |
-
242459
|
| 926 |
-
242499
|
| 927 |
-
242882
|
| 928 |
-
243138
|
| 929 |
-
243145
|
| 930 |
-
243168
|
| 931 |
-
243343
|
| 932 |
-
243382
|
| 933 |
-
243408
|
| 934 |
-
243574
|
| 935 |
-
243579
|
| 936 |
-
243587
|
| 937 |
-
243613
|
| 938 |
-
243810
|
| 939 |
-
243910
|
| 940 |
-
243974
|
| 941 |
-
244036
|
| 942 |
-
244047
|
| 943 |
-
244267
|
| 944 |
-
244308
|
| 945 |
-
244530
|
| 946 |
-
244666
|
| 947 |
-
244741
|
| 948 |
-
244793
|
| 949 |
-
244861
|
| 950 |
-
244865
|
| 951 |
-
245024
|
| 952 |
-
245067
|
| 953 |
-
245148
|
| 954 |
-
245157
|
| 955 |
-
245351
|
| 956 |
-
245458
|
| 957 |
-
245565
|
| 958 |
-
245926
|
| 959 |
-
246001
|
| 960 |
-
246390
|
| 961 |
-
246703
|
| 962 |
-
246732
|
| 963 |
-
246793
|
| 964 |
-
246805
|
| 965 |
-
247038
|
| 966 |
-
247132
|
| 967 |
-
247365
|
| 968 |
-
247431
|
| 969 |
-
247808
|
| 970 |
-
247911
|
| 971 |
-
247953
|
| 972 |
-
248261
|
| 973 |
-
248347
|
| 974 |
-
248351
|
| 975 |
-
248362
|
| 976 |
-
248508
|
| 977 |
-
248607
|
| 978 |
-
248734
|
| 979 |
-
248797
|
| 980 |
-
249480
|
| 981 |
-
249588
|
| 982 |
-
249750
|
| 983 |
-
249781
|
| 984 |
-
249807
|
| 985 |
-
249964
|
| 986 |
-
249973
|
| 987 |
-
249982
|
| 988 |
-
250122
|
| 989 |
-
250256
|
| 990 |
-
250373
|
| 991 |
-
250449
|
| 992 |
-
250466
|
| 993 |
-
250597
|
| 994 |
-
250664
|
| 995 |
-
250715
|
| 996 |
-
250885
|
| 997 |
-
250981
|
| 998 |
-
251018
|
| 999 |
-
251230
|
| 1000 |
-
251240
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
test3.py
DELETED
|
@@ -1,30 +0,0 @@
|
|
| 1 |
-
import gradio as gr
|
| 2 |
-
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 3 |
-
|
| 4 |
-
vicuna_model = AutoModelForCausalLM.from_pretrained("lmsys/vicuna-7b-v1.3")
|
| 5 |
-
vicuna_tokenizer = AutoTokenizer.from_pretrained("lmsys/vicuna-7b-v1.3")
|
| 6 |
-
|
| 7 |
-
# llama_model = AutoModelForCausalLM.from_pretrained("luodian/llama-7b-hf")
|
| 8 |
-
# llama_tokenizer = AutoTokenizer.from_pretrained("luodian/llama-7b-hf")
|
| 9 |
-
|
| 10 |
-
# Define the function for generating responses
|
| 11 |
-
def generate_response(model, tokenizer, prompt):
|
| 12 |
-
inputs = tokenizer(prompt, return_tensors="pt")
|
| 13 |
-
outputs = model.generate(**inputs, max_length=500, pad_token_id=tokenizer.eos_token_id)
|
| 14 |
-
response = tokenizer.decode(outputs[0])
|
| 15 |
-
return response
|
| 16 |
-
|
| 17 |
-
# Define the Gradio interface
|
| 18 |
-
def chatbot_interface(prompt):
|
| 19 |
-
vicuna_response = generate_response(vicuna_model, vicuna_tokenizer, prompt)
|
| 20 |
-
# llama_response = generate_response(llama_model, llama_tokenizer, prompt)
|
| 21 |
-
|
| 22 |
-
return {"Vicuna-7B": vicuna_response}
|
| 23 |
-
|
| 24 |
-
iface = gr.Interface(fn=chatbot_interface,
|
| 25 |
-
inputs="text",
|
| 26 |
-
outputs="text",
|
| 27 |
-
interpretation="default",
|
| 28 |
-
title="Chatbot with Three Models")
|
| 29 |
-
|
| 30 |
-
iface.launch()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|