|
{ |
|
"_class_name": "Transformer2DModel", |
|
"_diffusers_version": "0.13.0.dev0", |
|
"activation_fn": "geglu", |
|
"attention_bias": false, |
|
"attention_head_dim": 88, |
|
"cross_attention_dim": null, |
|
"decay": 0.9999, |
|
"dropout": 0.0, |
|
"in_channels": 3, |
|
"inv_gamma": 1.0, |
|
"min_decay": 0.0, |
|
"norm_elementwise_affine": true, |
|
"norm_num_groups": 3, |
|
"norm_type": "ada_norm", |
|
"num_attention_heads": 16, |
|
"num_embeds_ada_norm": 1000, |
|
"num_layers": 8, |
|
"num_vector_embeds": null, |
|
"only_cross_attention": false, |
|
"optimization_step": 25500, |
|
"out_channels": 3, |
|
"patch_size": 8, |
|
"power": 0.75, |
|
"sample_size": 64, |
|
"upcast_attention": false, |
|
"update_after_step": 0, |
|
"use_ema_warmup": true, |
|
"use_linear_projection": false |
|
} |
|
|