Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,87 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
from transformers import AutoModelForTokenClassification, AutoTokenizer
|
3 |
+
|
4 |
+
title = "Protien Token Classification 🧬."
|
5 |
+
description = "Finds the position of Helix and Beta strand in the Protein Sequence."
|
6 |
+
article = 'Created from finetuning ESM2_150M'
|
7 |
+
|
8 |
+
model = AutoModelForTokenClassification.from_pretrained('./Model')
|
9 |
+
tokenizer = AutoTokenizer.from_pretrained('facebook/esm2_t30_150M_UR50D')
|
10 |
+
|
11 |
+
example_list = ['MENFTALFGAQADPPPPPTALGFGPGKPPPPPPPPAGGGPGTAPPPTAATAPPGADKSGAGCGPFYLMRELPGSTELTGSTNLITHYNLEQAYNKFCGKKVKEKLSNFLPDLPGMIDLPGSHDNSSLRSLIEKPPILSSSFNPITGTMLAGFRLHTGPLPEQCRLMHIQPPKKKNKHKHKQSRTQDPVPPETPSDSDHKKKKKKKEEDPDRKRKKKEKKKKKNRHSPDHPGMGSSQASSSSSLR',
|
12 |
+
'MAFSDLTSRTVHLYDNWIKDADPRVEDWLLMSSPLPQTILLGFYVYFVTSLGPKLMENRKPFELKKAMITYNFFIVLFSVYMCYEFVMSGWGIGYSFRCDIVDYSRSPTALRMARTCWLYYFSKFIELLDTIFFVLRKKNSQVTFLHVFHHTIMPWTWWFGVKFAAGGLGTFHALLNTAVHVVMYSYYGLSALGPAYQKYLWWKKYLTSLQLVQFVIVAIHISQFFFMEDCKYQFPVFACIIMSYSFMFLLLFLHFWYRAYTKGQRLPKTVKNGTCKNKDN',
|
13 |
+
'MYPSNKKKKVWREEKERLLKMTLEERRKEYLRDYIPLNSILSWKEEMKGKGQNDEENTQETSQVKKSLTEKVSLYRGDITLLEVDAIVNAANASLLGGGGVDGCIHRAAGPCLLAECRNLNGCDTGHAKITCGYDLPAKYVIHTVGPIARGHINGSHKEDLANCYKSSLKLVKENNIRSVAFPCISTGIYGFPNEPAAVIALNTIKEWLAKNHHEVDRIIFCVFLEVDFKIYKKKMNEFFSVDDNNEEEEDVEMKEDSDENGPEEKQSVEEMEEQSQDADGVNTVTVPGPASEEAVEDCKDEDFAKDENITKGGEVTDHSVRDQDHPDGQENDSTKNEIKIETESQSSYMETEELSSNQEDAVIVEQPEVIPLTEDQEEKEGEKAPGEDTPRMPGKSEGSSDLENTPGPDAGAQDEAKEQRNGTK',
|
14 |
+
'MAGQHLPVPRLEGVSREQFMQHLYPQRKPLVLEGIDLGPCTSKWTVDYLSQVGGKKEVKIHVAAVAQMDFISKNFVYRTLPFDQLVQRAAEEKHKEFFVSEDEKYYLRSLGEDPRKDVADIRKQFPLLKGDIKFPEFFKEEQFFSSVFRISSPGLQLWTHYDVMDNLLIQVTGKKRVVLFSPRDAQYLYLKGTKSEVLNIDNPDLAKYPLFSKARRYECSLEAGDVLFIPALWFHNVISEEFGVGVNIFWKHLPSECYDKTDTYGNKDPTAASRAAQILDRALKTLAELPEEYRDFYARRMVLHIQDKAYSKNSE',
|
15 |
+
'MEAGPPGSARPAEPGPCLSGQRGADHTASASLQSVAGTEPGRHPQAVAAVLPAGGCGERMGVPTPKQFCPILERPLISYTLQALERVCWIKDIVVAVTGENMEVMKSIIQKYQHKRISLVEAGVTRHRSIFNGLKALAEDQINSKLSKPEVVIIHDAVRPFVEEGVLLKVVTAAKEHGAAGAIRPLVSTVVSPSADGCLDYSLERARHRASEMPQAFLFDVIYEAYQQCSDYDLEFGTECLQLALKYCCTKAKLVEGSPDLWKVTYKRDLYAAESIIKERISQEICVVMDTEEDNKHVGHLLEEVLKSELNHVKVTSEALGHAGRHLQQIILDQCYNFVCVNVTTSDFQETQKLLSMLEESSLCILYPVVVVSVHFLDFKLVPPSQKMENLMQIREFAKEVKERNILLYGLLISYPQDDQKLQESLRQGAIIIASLIKERNSGLIGQLLIA']
|
16 |
+
|
17 |
+
def count_helix(helix):
|
18 |
+
final = []
|
19 |
+
temp = []
|
20 |
+
for x in range(1, len(helix)):
|
21 |
+
if helix[x] == helix[x-1] + 1:
|
22 |
+
temp.append(helix[x-1])
|
23 |
+
temp.append(helix[x])
|
24 |
+
elif len(temp) != 0:
|
25 |
+
final.append((temp[0], temp[-1]))
|
26 |
+
temp = []
|
27 |
+
return final
|
28 |
+
|
29 |
+
def count_strand(strand):
|
30 |
+
final = []
|
31 |
+
temp = []
|
32 |
+
for x in range(1, len(strand)):
|
33 |
+
if strand[x] == strand[x-1] + 1:
|
34 |
+
temp.append(strand[x-1])
|
35 |
+
temp.append(strand[x])
|
36 |
+
elif len(temp) != 0:
|
37 |
+
final.append((temp[0], temp[-1]))
|
38 |
+
temp = []
|
39 |
+
return final
|
40 |
+
|
41 |
+
def print_output1(helix):
|
42 |
+
helix_op = count_helix(helix)
|
43 |
+
|
44 |
+
if len(helix_op) != 0:
|
45 |
+
str1 = str(helix_op)[1:-1]
|
46 |
+
return str1
|
47 |
+
|
48 |
+
else:
|
49 |
+
return str('No Helix found.')
|
50 |
+
|
51 |
+
def print_output2(strand):
|
52 |
+
strand_op = count_strand(strand)
|
53 |
+
|
54 |
+
if len(strand_op) != 0:
|
55 |
+
str1 = str(strand_op)[1:-1]
|
56 |
+
return str1
|
57 |
+
|
58 |
+
else:
|
59 |
+
return str('No Beta strand found.')
|
60 |
+
|
61 |
+
def predict(ProtienSequence):
|
62 |
+
input = tokenizer(ProtienSequence, return_tensors='pt')
|
63 |
+
with torch.inference_mode():
|
64 |
+
outputs = model(**input)
|
65 |
+
output = outputs.logits.argmax(axis=2)[0].numpy()
|
66 |
+
|
67 |
+
helix = []
|
68 |
+
strand = []
|
69 |
+
|
70 |
+
for i in range(len(output)):
|
71 |
+
if output[i] != 0:
|
72 |
+
if output[i] == 1:
|
73 |
+
helix.append(i+1)
|
74 |
+
else:
|
75 |
+
strand.append(i+1)
|
76 |
+
|
77 |
+
return print_output1(helix), print_output2(strand)
|
78 |
+
|
79 |
+
iface = gr.Interface(fn=predict,
|
80 |
+
inputs='text',
|
81 |
+
outputs=[gr.Text(label='Helix'),
|
82 |
+
gr.Text(label='Beta Strand')],
|
83 |
+
title=title,
|
84 |
+
description=description,
|
85 |
+
article=article,
|
86 |
+
examples=example_list)
|
87 |
+
iface.launch()
|