djinn / mergekit_config.yml
mayacinka's picture
Upload folder using huggingface_hub
bbbca5f verified
raw
history blame contribute delete
No virus
1.11 kB
merge_method: linear
parameters:
weight: 1.0
slices:
- sources:
- model: CultriX/NeuralTrix-7B-dpo # embed_tokens comes along with the ride with whatever is the first layer
layer_range: [0, 1]
- model: paulml/DPOB-INMTOB-7B # add dummy second model with 0 weight so tokenizer-based merge routine is invoked for embed_tokens
layer_range: [0, 1]
parameters:
weight: 0
- sources:
- model: cognitivecomputations/dolphin-2.1-mistral-7b
layer_range: [0, 8]
- sources:
- model: bardsai/jaskier-7b-dpo-v5.6
layer_range: [8, 16]
- sources:
- model: paulml/OGNO-7B
layer_range: [16, 24]
- sources:
- model: argilla/distilabeled-OpenHermes-2.5-Mistral-7B
layer_range: [24, 31]
- sources: # same as above, but for lm_head with the last layer
- model: CultriX/NeuralTrix-7B-dpo
layer_range: [31, 32]
- model: paulml/DPOB-INMTOB-7B
layer_range: [31, 32]
parameters:
weight: 0
dtype: float16
tokenizer_source: model:cognitivecomputations/dolphin-2.1-mistral-7b