anamargarida commited on
Commit
341d2b6
·
verified ·
1 Parent(s): f46e049

Rename app_18.py to app_19.py

Browse files
Files changed (1) hide show
  1. app_18.py → app_19.py +22 -15
app_18.py → app_19.py RENAMED
@@ -163,25 +163,32 @@ def extract_arguments(text, tokenizer, model, beam_search=True):
163
 
164
  # Add the argument tags in the sentence directly
165
  def add_tags(original_text, word_ids, start_cause, end_cause, start_effect, end_effect, start_signal, end_signal):
166
-
167
  space_splitted_tokens = original_text.split(" ")
168
-
169
-
170
- if beam_search:
171
- if start_cause is not None and end_cause is not None and start_effect is not None and end_effect is not None:
172
- this_space_splitted_tokens = copy.deepcopy(space_splitted_tokens)
173
- this_space_splitted_tokens[word_ids[start_cause]] = '<ARG0>' + this_space_splitted_tokens[word_ids[start_cause]]
174
- this_space_splitted_tokens[word_ids[end_cause]] = this_space_splitted_tokens[word_ids[end_cause]] + '</ARG0>'
175
- this_space_splitted_tokens[word_ids[start_effect]] = '<ARG1>' + this_space_splitted_tokens[word_ids[start_effect]]
176
- this_space_splitted_tokens[word_ids[end_effect]] = this_space_splitted_tokens[word_ids[end_effect]] + '</ARG1>'
177
-
178
- if has_signal:
179
- this_space_splitted_tokens[word_ids[start_signal]] = '<SIG0>' + this_space_splitted_tokens[word_ids[start_signal]]
180
- this_space_splitted_tokens[word_ids[end_signal]] = this_space_splitted_tokens[word_ids[end_signal]] + '</SIG0>'
181
 
182
- # Join the tokens back into a single string
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
183
  return ' '.join(this_space_splitted_tokens)
184
 
 
185
  # Apply the tags to the sentence tokens
186
  tagged_sentence1 = add_tags(input_text, word_ids, start_cause1, end_cause1, start_effect1, end_effect1, start_signal, end_signal)
187
  tagged_sentence2 = add_tags(input_text, word_ids, start_cause2, end_cause2, start_effect2, end_effect2, start_signal, end_signal)
 
163
 
164
  # Add the argument tags in the sentence directly
165
  def add_tags(original_text, word_ids, start_cause, end_cause, start_effect, end_effect, start_signal, end_signal):
 
166
  space_splitted_tokens = original_text.split(" ")
167
+ this_space_splitted_tokens = copy.deepcopy(space_splitted_tokens)
 
 
 
 
 
 
 
 
 
 
 
 
168
 
169
+ def safe_insert(tag, position, start=True):
170
+ """Safely insert a tag, checking for None values."""
171
+ if position is not None and word_ids[position] is not None:
172
+ if start:
173
+ this_space_splitted_tokens[word_ids[position]] = tag + this_space_splitted_tokens[word_ids[position]]
174
+ else:
175
+ this_space_splitted_tokens[word_ids[position]] += tag
176
+
177
+ # Add argument tags safely
178
+ safe_insert('<ARG0>', start_cause, start=True)
179
+ safe_insert('</ARG0>', end_cause, start=False)
180
+ safe_insert('<ARG1>', start_effect, start=True)
181
+ safe_insert('</ARG1>', end_effect, start=False)
182
+
183
+ # Add signal tags safely (if signal exists)
184
+ if start_signal is not None and end_signal is not None:
185
+ safe_insert('<SIG0>', start_signal, start=True)
186
+ safe_insert('</SIG0>', end_signal, start=False)
187
+
188
+ # Join tokens back into a string
189
  return ' '.join(this_space_splitted_tokens)
190
 
191
+
192
  # Apply the tags to the sentence tokens
193
  tagged_sentence1 = add_tags(input_text, word_ids, start_cause1, end_cause1, start_effect1, end_effect1, start_signal, end_signal)
194
  tagged_sentence2 = add_tags(input_text, word_ids, start_cause2, end_cause2, start_effect2, end_effect2, start_signal, end_signal)