models: - model: mistralai/Mistral-7B-v0.1 # No parameters necessary for base model - model: DiscoResearch/DiscoLM_German_7b_v1 parameters: density: 0.6 weight: 0.2 - model: DRXD1000/Phoenix parameters: density: 0.6 weight: 0.2 - model: LeoLM/leo-mistral-hessianai-7b-chat parameters: density: 0.6 weight: 0.1 - model: openaccess-ai-collective/DPOpenHermes-7B-v2 parameters: density: 0.6 weight: 0.2 - model: fblgit/una-cybertron-7b-v2-bf16 parameters: density: 0.6 weight: 0.2 - model: mlabonne/NeuralHermes-2.5-Mistral-7B parameters: density: 0.6 weight: 0.1 merge_method: dare_ties base_model: mistralai/Mistral-7B-v0.1 parameters: int8_mask: true dtype: bfloat16