hyxmmm commited on
Commit
53d555a
1 Parent(s): f985e26

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +3 -2
config.json CHANGED
@@ -1,4 +1,5 @@
1
  {
 
2
  "architectures": [
3
  "AquilaForCausalLM"
4
  ],
@@ -24,8 +25,8 @@
24
  "rope_scaling": null,
25
  "rope_theta": 1000000.0,
26
  "tie_word_embeddings": false,
27
- "torch_dtype": "bfloat16",
28
- "transformers_version": "4.39.3",
29
  "use_cache": true,
30
  "vocab_size": 143973
31
  }
 
1
  {
2
+ "_name_or_path": "aquila2_34b",
3
  "architectures": [
4
  "AquilaForCausalLM"
5
  ],
 
25
  "rope_scaling": null,
26
  "rope_theta": 1000000.0,
27
  "tie_word_embeddings": false,
28
+ "torch_dtype": "float32",
29
+ "transformers_version": "4.42.3",
30
  "use_cache": true,
31
  "vocab_size": 143973
32
  }