vall-e / loras /config.lora[johnny].yaml
mrq
Cleanup
f8eb280
raw
history blame contribute delete
443 Bytes
models:
- name: "ar+nar"
size: "full"
resp_levels: 8
tasks: 9
langs: 4
tones: 1
arch_type: llama
attention: auto
version: 5
capabilities: ["ar", "nar"]
experimental:
split_classifiers: True
audio_embedding_sums: True
unified_position_ids: False
rvq_levels_p: [
0, 0, 0, 0, 0, 0, 0,
1, 2, 3, 4, 5, 6, 7
]
loras:
- name : "lora-cyberpunk-silverhand"
rank: 128
alpha: 128
rvq_levels: []