Jingya HF Staff commited on
Commit
c59d05b
·
verified ·
1 Parent(s): c3a15ad

Upload transformer/config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. transformer/config.json +57 -0
transformer/config.json ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "FluxTransformer2DModel",
3
+ "_commit_hash": null,
4
+ "_diffusers_version": "0.35.0.dev0",
5
+ "attention_head_dim": 128,
6
+ "axes_dims_rope": [
7
+ 16,
8
+ 56,
9
+ 56
10
+ ],
11
+ "guidance_embeds": true,
12
+ "in_channels": 64,
13
+ "joint_attention_dim": 4096,
14
+ "neuron": {
15
+ "auto_cast": null,
16
+ "auto_cast_type": null,
17
+ "compiler_type": "neuronx-cc",
18
+ "compiler_version": "2.19.8089.0+8ab9f450",
19
+ "dynamic_batch_size": false,
20
+ "float_dtype": "bf16",
21
+ "inline_weights_to_neff": false,
22
+ "input_names": [
23
+ "hidden_states",
24
+ "encoder_hidden_states",
25
+ "pooled_projections",
26
+ "timestep",
27
+ "image_rotary_emb",
28
+ "guidance"
29
+ ],
30
+ "int_dtype": "int64",
31
+ "model_type": "flux-transformer-2d",
32
+ "optlevel": "2",
33
+ "output_attentions": false,
34
+ "output_hidden_states": false,
35
+ "output_names": [
36
+ "out_hidden_states"
37
+ ],
38
+ "static_batch_size": 1,
39
+ "static_encoder_hidden_size": 768,
40
+ "static_height": 128,
41
+ "static_num_channels": 64,
42
+ "static_patch_size": 1,
43
+ "static_rotary_axes_dim": 128,
44
+ "static_sequence_length": 512,
45
+ "static_vae_scale_factor": 8,
46
+ "static_width": 128,
47
+ "task": "semantic-segmentation",
48
+ "tensor_parallel_size": 8
49
+ },
50
+ "num_attention_heads": 24,
51
+ "num_layers": 19,
52
+ "num_single_layers": 38,
53
+ "out_channels": null,
54
+ "patch_size": 1,
55
+ "pooled_projection_dim": 768,
56
+ "transformers_version": null
57
+ }