Bajiyo commited on
Commit
ff2bbdf
·
verified ·
1 Parent(s): 59b75a8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -3
app.py CHANGED
@@ -7,12 +7,12 @@ model = from_pretrained_keras("Bajiyo/ml-en-transliteration")
7
  import json
8
  from keras.preprocessing.sequence import pad_sequences
9
 
10
- # Load tokenizer configurations
11
- source_tokenizer_path = "https://huggingface.co/Bajiyo/ml-en-transliteration/blob/main/source_tokenizer.json"
12
  with open(source_tokenizer_path, "r") as f:
13
  source_tokenizer_config = json.load(f)
14
 
15
- target_tokenizer_path = "https://huggingface.co/Bajiyo/ml-en-transliteration/blob/main/target_tokenizer.json"
16
  with open(target_tokenizer_path, "r") as f:
17
  target_tokenizer_config = json.load(f)
18
 
@@ -21,6 +21,7 @@ from keras.preprocessing.text import tokenizer_from_json
21
  source_tokenizer = tokenizer_from_json(source_tokenizer_config)
22
  target_tokenizer = tokenizer_from_json(target_tokenizer_config)
23
 
 
24
  # Define the maximum sequence length
25
  max_seq_length = 50
26
 
 
7
  import json
8
  from keras.preprocessing.sequence import pad_sequences
9
 
10
+ # Load tokenizer configurations from local files (assuming they are saved locally)
11
+ source_tokenizer_path = "Bajiyo/ml-en-transliteration/source_tokenizer.json" # Replace with actual path
12
  with open(source_tokenizer_path, "r") as f:
13
  source_tokenizer_config = json.load(f)
14
 
15
+ target_tokenizer_path = "Bajiyo/ml-en-transliteration/target_tokenizer.json" # Replace with actual path
16
  with open(target_tokenizer_path, "r") as f:
17
  target_tokenizer_config = json.load(f)
18
 
 
21
  source_tokenizer = tokenizer_from_json(source_tokenizer_config)
22
  target_tokenizer = tokenizer_from_json(target_tokenizer_config)
23
 
24
+
25
  # Define the maximum sequence length
26
  max_seq_length = 50
27