vall-e / loras /config.lora[delamain].yaml
mrq
Cleanup
f8eb280
raw
history blame contribute delete
441 Bytes
models:
- name: "ar+nar"
size: "full"
resp_levels: 8
tasks: 9
langs: 4
tones: 1
arch_type: llama
attention: auto
version: 5
capabilities: ["ar", "nar"]
experimental:
split_classifiers: True
audio_embedding_sums: True
unified_position_ids: False
rvq_levels_p: [
0, 0, 0, 0, 0, 0, 0,
1, 2, 3, 4, 5, 6, 7
]
loras:
- name : "lora-cyberpunk-delamain"
rank: 128
alpha: 128
rvq_levels: []