Ngit commited on
Commit
1bf6be1
·
1 Parent(s): 98ab1b4

Delete ort_config.json

Browse files
Files changed (1) hide show
  1. ort_config.json +0 -39
ort_config.json DELETED
@@ -1,39 +0,0 @@
1
- {
2
- "one_external_file": true,
3
- "opset": null,
4
- "optimization": {
5
- "disable_attention": null,
6
- "disable_attention_fusion": false,
7
- "disable_bias_gelu": null,
8
- "disable_bias_gelu_fusion": false,
9
- "disable_bias_skip_layer_norm": null,
10
- "disable_bias_skip_layer_norm_fusion": false,
11
- "disable_embed_layer_norm": true,
12
- "disable_embed_layer_norm_fusion": true,
13
- "disable_gelu": null,
14
- "disable_gelu_fusion": false,
15
- "disable_group_norm_fusion": true,
16
- "disable_layer_norm": null,
17
- "disable_layer_norm_fusion": false,
18
- "disable_packed_kv": true,
19
- "disable_rotary_embeddings": false,
20
- "disable_shape_inference": false,
21
- "disable_skip_layer_norm": null,
22
- "disable_skip_layer_norm_fusion": false,
23
- "enable_gelu_approximation": false,
24
- "enable_gemm_fast_gelu_fusion": false,
25
- "enable_transformers_specific_optimizations": true,
26
- "fp16": false,
27
- "no_attention_mask": false,
28
- "optimization_level": 1,
29
- "optimize_for_gpu": false,
30
- "optimize_with_onnxruntime_only": null,
31
- "use_mask_index": false,
32
- "use_multi_head_attention": false,
33
- "use_raw_attention_mask": false
34
- },
35
- "optimum_version": "1.14.1",
36
- "quantization": {},
37
- "transformers_version": "4.30.0",
38
- "use_external_data_format": false
39
- }