Lyte commited on
Commit
b121117
·
verified ·
1 Parent(s): 4c8266a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -9,18 +9,18 @@ from outetts.version.v2.prompt_processor import PromptProcessor
9
  from outetts.version.playback import ModelOutput
10
 
11
  model_path = hf_hub_download(
12
- repo_id="Lyte/CiSiMi",
13
- filename="unsloth.Q8_0.gguf",
14
  )
15
 
16
  model_config = outetts.GGUFModelConfig_v2(
17
  model_path=model_path,
18
- tokenizer_path="Lyte/CiSiMi",
19
  )
20
 
21
  interface = outetts.InterfaceGGUF(model_version="0.3", cfg=model_config)
22
  audio_codec = AudioCodec()
23
- prompt_processor = PromptProcessor("Lyte/Qwen-2.5-0.5B-S2S-test")
24
  whisper_model = whisper.load_model("base.en")
25
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
26
  gguf_model = interface.get_model()
 
9
  from outetts.version.playback import ModelOutput
10
 
11
  model_path = hf_hub_download(
12
+ repo_id="KandirResearch/CiSiMi",
13
+ filename="unsloth.Q8_0.gguf", # unsloth.Q4_K_M.gguf
14
  )
15
 
16
  model_config = outetts.GGUFModelConfig_v2(
17
  model_path=model_path,
18
+ tokenizer_path="KandirResearch/CiSiMi",
19
  )
20
 
21
  interface = outetts.InterfaceGGUF(model_version="0.3", cfg=model_config)
22
  audio_codec = AudioCodec()
23
+ prompt_processor = PromptProcessor("KandirResearch/CiSiMi")
24
  whisper_model = whisper.load_model("base.en")
25
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
26
  gguf_model = interface.get_model()