Sven Schultze commited on
Commit
be9256d
·
1 Parent(s): bddc7f7

add transformersjs config

Browse files
Files changed (1) hide show
  1. config.json +8 -1
config.json CHANGED
@@ -29,7 +29,14 @@
29
  "rope_type": "llama3"
30
  },
31
  "rope_theta": 500000.0,
32
- "tie_word_embeddings": true,
 
 
 
 
 
 
 
33
  "torch_dtype": "float32",
34
  "transformers_version": "4.49.0",
35
  "use_cache": false,
 
29
  "rope_type": "llama3"
30
  },
31
  "rope_theta": 500000.0,
32
+ "tie_word_embeddings": true,
33
+ "transformers.js_config": {
34
+ "dtype": "q4",
35
+ "use_external_data_format": {
36
+ "model.onnx": true,
37
+ "model_fp16.onnx": true
38
+ }
39
+ },
40
  "torch_dtype": "float32",
41
  "transformers_version": "4.49.0",
42
  "use_cache": false,