thak123 commited on
Commit
64909f7
·
1 Parent(s): 72c4192

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -19
app.py CHANGED
@@ -13,25 +13,25 @@ import gradio as gr
13
  # T = tokenizer.TweetTokenizer(
14
  # preserve_handles=True, preserve_hashes=True, preserve_case=False, preserve_url=False)
15
 
16
- def preprocess(text):
17
- tokens = T.tokenize(text)
18
- print(tokens, file=sys.stderr)
19
- ptokens = []
20
- for index, token in enumerate(tokens):
21
- if "@" in token:
22
- if index > 0:
23
- # check if previous token was mention
24
- if "@" in tokens[index-1]:
25
- pass
26
- else:
27
- ptokens.append("mention_0")
28
- else:
29
- ptokens.append("mention_0")
30
- else:
31
- ptokens.append(token)
32
-
33
- print(ptokens, file=sys.stderr)
34
- return " ".join(ptokens)
35
 
36
 
37
  def sentence_prediction(sentence):
 
13
  # T = tokenizer.TweetTokenizer(
14
  # preserve_handles=True, preserve_hashes=True, preserve_case=False, preserve_url=False)
15
 
16
+ # def preprocess(text):
17
+ # tokens = T.tokenize(text)
18
+ # print(tokens, file=sys.stderr)
19
+ # ptokens = []
20
+ # for index, token in enumerate(tokens):
21
+ # if "@" in token:
22
+ # if index > 0:
23
+ # # check if previous token was mention
24
+ # if "@" in tokens[index-1]:
25
+ # pass
26
+ # else:
27
+ # ptokens.append("mention_0")
28
+ # else:
29
+ # ptokens.append("mention_0")
30
+ # else:
31
+ # ptokens.append(token)
32
+
33
+ # print(ptokens, file=sys.stderr)
34
+ # return " ".join(ptokens)
35
 
36
 
37
  def sentence_prediction(sentence):