File size: 1,135 Bytes
9d09357 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 | {
"_class_name": "Flux2Transformer2DModel",
"_diffusers_version": "0.37.0",
"_name_or_path": "/root/.cache/huggingface/hub/models--black-forest-labs--FLUX.2-dev/snapshots/26afe3a78bb242c0a8bb181dcc8937bb16e5c66c/transformer",
"attention_head_dim": 128,
"axes_dims_rope": [
32,
32,
32,
32
],
"eps": 1e-06,
"guidance_embeds": true,
"in_channels": 128,
"joint_attention_dim": 15360,
"mlp_ratio": 3.0,
"num_attention_heads": 48,
"num_layers": 8,
"num_single_layers": 48,
"out_channels": null,
"patch_size": 1,
"rope_theta": 2000,
"timestep_guidance_channels": 256,
"quantization_config": {
"config_groups": {
"group_0": {
"input_activations": {
"dynamic": false,
"num_bits": 8,
"type": "float"
},
"weights": {
"dynamic": false,
"num_bits": 8,
"type": "float"
},
"targets": [
"Linear"
]
}
},
"ignore": [],
"producer": {
"name": "modelopt",
"version": "0.42.0"
},
"quant_algo": "FP8",
"quant_method": "modelopt"
}
} |