rwightman HF staff commited on
Commit
f60316c
1 Parent(s): 9789e89
Files changed (1) hide show
  1. config.json +5 -5
config.json CHANGED
@@ -3,6 +3,11 @@
3
  "num_classes": 1000,
4
  "num_features": 768,
5
  "global_pool": "avg",
 
 
 
 
 
6
  "pretrained_cfg": {
7
  "tag": "laion2b_ft_in12k_in1k",
8
  "custom_load": false,
@@ -29,10 +34,5 @@
29
  "pool_size": null,
30
  "first_conv": "patch_embed.proj",
31
  "classifier": "head"
32
- },
33
- "model_args": {
34
- "global_pool": "avg",
35
- "act_layer": "silu",
36
- "fc_norm": false
37
  }
38
  }
 
3
  "num_classes": 1000,
4
  "num_features": 768,
5
  "global_pool": "avg",
6
+ "model_args": {
7
+ "global_pool": "avg",
8
+ "act_layer": "silu",
9
+ "fc_norm": false
10
+ },
11
  "pretrained_cfg": {
12
  "tag": "laion2b_ft_in12k_in1k",
13
  "custom_load": false,
 
34
  "pool_size": null,
35
  "first_conv": "patch_embed.proj",
36
  "classifier": "head"
 
 
 
 
 
37
  }
38
  }