File size: 439 Bytes
2b1abae
1
{"model": "alpindale/WizardLM-2-8x22B", "base_model": "petra", "revision": "087834da175523cffd66a7e19583725e798c1b4f", "precision": "float16", "params": 140.621, "architectures": "MixtralForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T19:15:18Z", "model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets", "job_id": -1, "job_start_time": null, "use_chat_template": true}