sayakpaul's picture
sayakpaul HF staff
Upload folder using huggingface_hub
3558706 verified
raw
history blame
932 Bytes
{
"_class_name": "FluxTransformer2DModel",
"_diffusers_version": "0.32.0.dev0",
"_name_or_path": "black-forest-labs/FLUX.1-Depth-dev",
"attention_head_dim": 128,
"axes_dims_rope": [
16,
56,
56
],
"guidance_embeds": true,
"in_channels": 128,
"joint_attention_dim": 4096,
"num_attention_heads": 24,
"num_layers": 19,
"num_single_layers": 38,
"out_channels": 64,
"patch_size": 1,
"pooled_projection_dim": 768,
"quantization_config": {
"_load_in_4bit": true,
"_load_in_8bit": false,
"bnb_4bit_compute_dtype": "bfloat16",
"bnb_4bit_quant_storage": "uint8",
"bnb_4bit_quant_type": "nf4",
"bnb_4bit_use_double_quant": false,
"llm_int8_enable_fp32_cpu_offload": false,
"llm_int8_has_fp16_weight": false,
"llm_int8_skip_modules": null,
"llm_int8_threshold": 6.0,
"load_in_4bit": true,
"load_in_8bit": false,
"quant_method": "bitsandbytes"
}
}