jwkirchenbauer commited on
Commit
d2a37a0
·
1 Parent(s): 858fe91

add catch for ignoring the token highlighting

Browse files
Files changed (1) hide show
  1. demo_watermark.py +5 -0
demo_watermark.py CHANGED
@@ -408,6 +408,11 @@ def detect(input_text, args, tokenizer, device=None, return_green_token_mask=Tru
408
  output = [["Error","string too short to compute metrics"]]
409
  output += [["",""] for _ in range(6)]
410
 
 
 
 
 
 
411
  html_output = ""
412
  if green_token_mask is not None:
413
  # hack bc we need a fast tokenizer with charspan support
 
408
  output = [["Error","string too short to compute metrics"]]
409
  output += [["",""] for _ in range(6)]
410
 
411
+ # for now, just don't display the green token mask
412
+ # if we're using normalizers or ignore_repeated_bigrams
413
+ if args.normalizers != [] or args.ignore_repeated_bigrams:
414
+ green_token_mask = None
415
+
416
  html_output = ""
417
  if green_token_mask is not None:
418
  # hack bc we need a fast tokenizer with charspan support