AlekseyCalvin commited on
Commit
7eae487
·
verified ·
1 Parent(s): 8a7c9da

Update transformer/config.json

Browse files
Files changed (1) hide show
  1. transformer/config.json +20 -14
transformer/config.json CHANGED
@@ -1,14 +1,20 @@
1
- {
2
- "_class_name": "FluxTransformer2DModel",
3
- "_diffusers_version": "0.30.0.dev0",
4
- "_name_or_path": "../checkpoints/flux-dev/transformer",
5
- "attention_head_dim": 128,
6
- "guidance_embeds": true,
7
- "in_channels": 64,
8
- "joint_attention_dim": 4096,
9
- "num_attention_heads": 24,
10
- "num_layers": 19,
11
- "num_single_layers": 38,
12
- "patch_size": 1,
13
- "pooled_projection_dim": 768
14
- }
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "FluxTransformer2DModel",
3
+ "_diffusers_version": "0.34.0.dev0",
4
+ "_name_or_path": "../checkpoints/flux-dev/transformer",
5
+ "attention_head_dim": 128,
6
+ "axes_dims_rope": [
7
+ 16,
8
+ 56,
9
+ 56
10
+ ],
11
+ "guidance_embeds": true,
12
+ "in_channels": 64,
13
+ "joint_attention_dim": 4096,
14
+ "num_attention_heads": 24,
15
+ "num_layers": 19,
16
+ "num_single_layers": 38,
17
+ "out_channels": null,
18
+ "patch_size": 1,
19
+ "pooled_projection_dim": 768
20
+ }