h1t commited on
Commit
7c9c1f9
1 Parent(s): da9a302

Upload folder using huggingface_hub

Browse files
.DS_Store ADDED
Binary file (8.2 kB). View file
 
README.md ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ ---
2
+ license: mit
3
+ ---
model_index.json ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "OMSPipeline",
3
+ "_diffusers_version": "0.19.3",
4
+ "oms_module": [
5
+ "diffusion.diffusers_patch.unet_2d_condition_woct",
6
+ "UNet2DConditionWoCTModel"
7
+ ],
8
+ "oms_text_encoder": [
9
+ "transformers",
10
+ "CLIPTextModel"
11
+ ],
12
+ "oms_tokenizer": [
13
+ "transformers",
14
+ "CLIPTokenizer"
15
+ ]
16
+ }
oms_module/config.json ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "UNet2DConditionWoCTModel",
3
+ "_diffusers_version": "0.19.3",
4
+ "_name_or_path": "h1t/oms_b_openclip_xl/oms_module",
5
+ "act_fn": "silu",
6
+ "attention_head_dim": [
7
+ 5,
8
+ 10,
9
+ 20
10
+ ],
11
+ "block_out_channels": [
12
+ 160,
13
+ 320,
14
+ 640
15
+ ],
16
+ "center_input_sample": false,
17
+ "conv_in_kernel": 3,
18
+ "conv_out_kernel": 3,
19
+ "cross_attention_dim": 1024,
20
+ "cross_attention_norm": null,
21
+ "down_block_types": [
22
+ "DownBlock2D",
23
+ "CrossAttnDownBlock2D",
24
+ "CrossAttnDownBlock2D"
25
+ ],
26
+ "downsample_padding": 1,
27
+ "dual_cross_attention": false,
28
+ "encoder_hid_dim": null,
29
+ "encoder_hid_dim_type": null,
30
+ "in_channels": 4,
31
+ "layers_per_block": 2,
32
+ "mid_block_only_cross_attention": null,
33
+ "mid_block_scale_factor": 1,
34
+ "mid_block_type": "UNetMidBlock2DCrossAttn",
35
+ "norm_eps": 1e-05,
36
+ "norm_num_groups": 32,
37
+ "num_attention_heads": null,
38
+ "only_cross_attention": false,
39
+ "out_channels": 4,
40
+ "resnet_out_scale_factor": 1.0,
41
+ "sample_size": null,
42
+ "transformer_layers_per_block": 1,
43
+ "up_block_types": [
44
+ "CrossAttnUpBlock2D",
45
+ "CrossAttnUpBlock2D",
46
+ "UpBlock2D"
47
+ ],
48
+ "upcast_attention": false,
49
+ "use_linear_projection": false
50
+ }
oms_module/diffusion_pytorch_model.fp16.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:512a4b23067a064f97e6350c09d684082c371d4c347f1506493c2f273497f675
3
+ size 308083816
oms_text_encoder/config.json ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "stabilityai/stable-diffusion-2-1/text_encoder",
3
+ "architectures": [
4
+ "CLIPTextModel"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 0,
8
+ "dropout": 0.0,
9
+ "eos_token_id": 2,
10
+ "hidden_act": "gelu",
11
+ "hidden_size": 1024,
12
+ "initializer_factor": 1.0,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 4096,
15
+ "layer_norm_eps": 1e-05,
16
+ "max_position_embeddings": 77,
17
+ "model_type": "clip_text_model",
18
+ "num_attention_heads": 16,
19
+ "num_hidden_layers": 23,
20
+ "pad_token_id": 1,
21
+ "projection_dim": 512,
22
+ "torch_dtype": "float32",
23
+ "transformers_version": "4.25.0.dev0",
24
+ "vocab_size": 49408
25
+ }
oms_tokenizer/config.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "bos_token": {
4
+ "__type": "AddedToken",
5
+ "content": "<|startoftext|>",
6
+ "lstrip": false,
7
+ "normalized": true,
8
+ "rstrip": false,
9
+ "single_word": false
10
+ },
11
+ "do_lower_case": true,
12
+ "eos_token": {
13
+ "__type": "AddedToken",
14
+ "content": "<|endoftext|>",
15
+ "lstrip": false,
16
+ "normalized": true,
17
+ "rstrip": false,
18
+ "single_word": false
19
+ },
20
+ "errors": "replace",
21
+ "model_max_length": 77,
22
+ "_name_or_path": "stabilityai/stable-diffusion-2-1/tokenizer",
23
+ "pad_token": "<|endoftext|>",
24
+ "special_tokens_map_file": "./special_tokens_map.json",
25
+ "tokenizer_class": "CLIPTokenizer",
26
+ "unk_token": {
27
+ "__type": "AddedToken",
28
+ "content": "<|endoftext|>",
29
+ "lstrip": false,
30
+ "normalized": true,
31
+ "rstrip": false,
32
+ "single_word": false
33
+ }
34
+ }