yujiepan commited on
Commit
decbd59
·
verified ·
1 Parent(s): 73a5767

Upload folder using huggingface_hub

Browse files
Files changed (2) hide show
  1. README.md +1 -1
  2. config.json +1 -1
README.md CHANGED
@@ -30,7 +30,7 @@ config.intermediate_size = 16
30
  config.num_attention_heads = 4
31
  config.num_hidden_layers = 16
32
  config.num_key_value_heads = 2
33
- config.use_mamba_kernels = False
34
 
35
  model = AutoModelForCausalLM.from_config(
36
  config, torch_dtype=torch.bfloat16, attn_implementation="sdpa", trust_remote_code=True
 
30
  config.num_attention_heads = 4
31
  config.num_hidden_layers = 16
32
  config.num_key_value_heads = 2
33
+ # config.use_mamba_kernels = False
34
 
35
  model = AutoModelForCausalLM.from_config(
36
  config, torch_dtype=torch.bfloat16, attn_implementation="sdpa", trust_remote_code=True
config.json CHANGED
@@ -40,6 +40,6 @@
40
  "torch_dtype": "bfloat16",
41
  "transformers_version": "4.44.0",
42
  "use_cache": true,
43
- "use_mamba_kernels": false,
44
  "vocab_size": 65536
45
  }
 
40
  "torch_dtype": "bfloat16",
41
  "transformers_version": "4.44.0",
42
  "use_cache": true,
43
+ "use_mamba_kernels": true,
44
  "vocab_size": 65536
45
  }