Fix text tokenizer path (#36)

This commit is contained in:
Václav Volhejn 2025-07-03 14:27:06 +02:00 committed by GitHub
parent c6f262346f
commit c1d248abba
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -6,7 +6,7 @@ authorized_ids = ["public_token"]
[modules.tts_py] [modules.tts_py]
type = "Py" type = "Py"
path = "/api/tts_streaming" path = "/api/tts_streaming"
text_tokenizer_file = "hf://kyutai/unmute/test_en_fr_audio_8000.model" text_tokenizer_file = "hf://kyutai/tts-1.6b-en_fr/tokenizer_spm_8k_en_fr_audio.model"
batch_size = 8 # Adjust to your GPU memory capacity batch_size = 8 # Adjust to your GPU memory capacity
text_bos_token = 1 text_bos_token = 1