PeterPinetree commited on
Commit
d93e1b9
·
verified ·
1 Parent(s): adcd39d

Update index.html

Browse files
Files changed (1) hide show
  1. index.html +37 -22
index.html CHANGED
@@ -146,37 +146,52 @@
146
  return;
147
  }
148
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
149
  status('Tokenizing…');
150
  try {
151
- // 1) Always get IDs from encode
152
- const enc = await tokenizer.encode(text, { add_special_tokens: false });
153
- const ids =
154
- (enc && (enc.ids ?? enc.input_ids ?? enc.inputIds)) || [];
155
 
156
- // 2) Derive token strings in a version-tolerant way
157
- let tokens = [];
 
158
 
159
- // a) Preferred: convert_ids_to_tokens exists on many tokenizers
 
160
  if (typeof tokenizer.convert_ids_to_tokens === 'function') {
161
- tokens = tokenizer.convert_ids_to_tokens(ids);
162
- }
163
- // b) Fallback: id_to_token per-id
164
- else if (typeof tokenizer.id_to_token === 'function') {
165
- tokens = ids.map(id => tokenizer.id_to_token(id));
166
- }
167
- // c) Some builds include enc.tokens
168
- else if (Array.isArray(enc.tokens)) {
169
- tokens = enc.tokens;
170
- }
171
- // d) Last resort: stringify IDs (shouldn’t happen, but keeps UI stable)
172
- else {
173
- tokens = ids.map(String);
174
  }
175
 
176
- if (myRun !== runId) return; // drop stale results
177
 
178
  state.tokens = Array.isArray(tokens) ? tokens : [];
179
- state.ids = Array.isArray(ids) ? ids : [];
180
 
181
  render();
182
  status(`Done. ${state.tokens.length} tokens.`);
 
146
  return;
147
  }
148
 
149
+ async function tokenize(){
150
+ const myRun = ++runId;
151
+
152
+ if (!tokenizer) {
153
+ await loadTokenizer(modelSel.value);
154
+ if (!tokenizer) { render(); return; }
155
+ }
156
+
157
+ // Make sure we always pass a string to encode()
158
+ const text = String(inputEl.value ?? '').trim();
159
+ if (!text) {
160
+ state.tokens = [];
161
+ state.ids = [];
162
+ render();
163
+ status('Type to tokenize…');
164
+ return;
165
+ }
166
+
167
  status('Tokenizing…');
168
  try {
169
+ // 1) Get IDs (no options arg!)
170
+ const enc = await tokenizer.encode(text);
171
+ // robustly pluck ids out of whatever shape the lib returns
172
+ let ids = (enc && (enc.ids ?? enc.input_ids ?? enc.inputIds)) || [];
173
 
174
+ // 2) Drop special tokens (e.g., BOS/EOS) for the demo
175
+ const specials = new Set(tokenizer.all_special_ids || []);
176
+ const idsNoSpecials = ids.filter(id => !specials.has(id));
177
 
178
+ // 3) Turn IDs into token strings
179
+ let tokens = [];
180
  if (typeof tokenizer.convert_ids_to_tokens === 'function') {
181
+ tokens = tokenizer.convert_ids_to_tokens(idsNoSpecials);
182
+ } else if (typeof tokenizer.id_to_token === 'function') {
183
+ tokens = idsNoSpecials.map(id => tokenizer.id_to_token(id));
184
+ } else if (Array.isArray(enc.tokens)) {
185
+ // fallback: some builds expose tokens directly (may include specials)
186
+ tokens = enc.tokens.filter((_, i) => !specials.has(ids[i]));
187
+ } else {
188
+ tokens = idsNoSpecials.map(String);
 
 
 
 
 
189
  }
190
 
191
+ if (myRun !== runId) return; // drop stale result
192
 
193
  state.tokens = Array.isArray(tokens) ? tokens : [];
194
+ state.ids = Array.isArray(idsNoSpecials) ? idsNoSpecials : [];
195
 
196
  render();
197
  status(`Done. ${state.tokens.length} tokens.`);