--- license: apache-2.0 base_model: - aixonlab/Eurydice-24b-v3.5 - CrucibleLab/M3.2-24B-Loki-V1.3 - Darkhn/M3.2-24B-Animus-V7.1 - Delta-Vector/Rei-24B-KTO - Doctor-Shotgun/MS3.2-24B-Magnum-Diamond - LatitudeGames/Hearthfire-24B - Naphula/BeaverAI_Fallen-Mistral-Small-3.1-24B-v1e_textonly - PocketDoc/Dans-PersonalityEngine-V1.3.0-24b - ReadyArt/Broken-Tutu-24B-Transgression-v2.0 - SicariusSicariiStuff/Impish_Magic_24B - TheDrummer/Magidonia-24B-v4.2.0 - TheDrummer/Precog-24B-v1 - trashpanda-org/MS3.2-24B-Mullein-v2 - Undi95/MistralThinker-v1.1 - zerofata/MS3.2-PaintedFantasy-v3-24B language: - en library_name: transformers tags: - merge - mergekit --- > [!CAUTION] > ⚠️ Warning: This merge is partially broken due to using 2501 and 2503 finetunes, it may have weak prompt adherence. > # Station V Goetia checkpoint test, a modified expansion of the Circuitry/Rotor series. ``` models: - model: Delta-Vector/Rei-24B-KTO parameters: density: 0.5 weight: 0.1 - model: TheDrummer/Magidonia-24B-v4.2.0 parameters: density: 0.6 weight: 0.12 - model: zerofata/MS3.2-PaintedFantasy-v3-24B parameters: density: 0.6 weight: 0.12 - model: ReadyArt/Broken-Tutu-24B-Transgression-v2.0 parameters: density: 0.6 weight: 0.12 - model: LatitudeGames/Hearthfire-24B parameters: density: 0.6 weight: 0.12 - model: CrucibleLab/M3.2-24B-Loki-V1.3 parameters: density: 0.5 weight: 0.1 - model: SicariusSicariiStuff/Impish_Magic_24B parameters: density: 0.5 weight: 0.1 - model: aixonlab/Eurydice-24b-v3.5 parameters: density: 0.5 weight: 0.1 - model: PocketDoc/Dans-PersonalityEngine-V1.3.0-24b parameters: density: 0.5 weight: 0.1 - model: Darkhn/M3.2-24B-Animus-V7.1 parameters: density: 0.5 weight: 0.1 - model: Naphula/BeaverAI_Fallen-Mistral-Small-3.1-24B-v1e_textonly parameters: density: 0.6 weight: 0.12 - model: trashpanda-org/MS3.2-24B-Mullein-v2 parameters: density: 0.5 weight: 0.1 - model: Doctor-Shotgun/MS3.2-24B-Magnum-Diamond parameters: density: 0.5 weight: 0.1 - model: Undi95/MistralThinker-v1.1 parameters: density: 0.5 weight: 0.1 base_model: TheDrummer/Precog-24B-v1 merge_method: ties parameters: normalize: false int8_mask: false dtype: float32 out_dtype: bfloat16 tokenizer: source: union chat_template: auto name: StationV-24B-v1 ```