File size: 563 Bytes
6ad0cdf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
{
  "_class_name": "FluxTransformer2DModel",
  "_diffusers_version": "0.30.0",
  "_name_or_path": "F:\\om\\2024\\LLM\\converter\\flux16model\\hub\\models--black-forest-labs--FLUX.1-schnell\\snapshots\\741f7c3ce8b383c54771c7003378a50191e9efe9\\transformer",
  "attention_head_dim": 128,
  "axes_dims_rope": [
    16,
    56,
    56
  ],
  "guidance_embeds": false,
  "in_channels": 64,
  "joint_attention_dim": 4096,
  "num_attention_heads": 24,
  "num_layers": 19,
  "num_single_layers": 38,
  "patch_size": 1,
  "pooled_projection_dim": 768
}