eusip commited on
Commit
3b49f8c
·
verified ·
1 Parent(s): ec78ca1

Update demos/musicgen_app.py

Browse files
Files changed (1) hide show
  1. demos/musicgen_app.py +16 -15
demos/musicgen_app.py CHANGED
@@ -203,8 +203,8 @@ def predict_batched(texts, melodies):
203
 
204
 
205
  def predict_full(
206
- model,
207
- model_path,
208
  decoder,
209
  text,
210
  melody,
@@ -219,16 +219,16 @@ def predict_full(
219
  global USE_DIFFUSION
220
  INTERRUPTING = False
221
  progress(0, desc="Loading model...")
222
- model_path = model_path.strip()
223
- if model_path:
224
- if not Path(model_path).exists():
225
- raise gr.Error(f"Model path {model_path} doesn't exist.")
226
- if not Path(model_path).is_dir():
227
- raise gr.Error(
228
- f"Model path {model_path} must be a folder containing "
229
- "state_dict.bin and compression_state_dict_.bin."
230
- )
231
- model = model_path
232
  if temperature < 0:
233
  raise gr.Error("Temperature must be >= 0.")
234
  if topk < 0:
@@ -243,7 +243,8 @@ def predict_full(
243
  load_diffusion()
244
  else:
245
  USE_DIFFUSION = False
246
- load_model(model)
 
247
 
248
  max_generated = 0
249
 
@@ -377,8 +378,8 @@ def ui_full(launch_kwargs):
377
  ).then(
378
  predict_full,
379
  inputs=[
380
- model,
381
- model_path,
382
  decoder,
383
  text,
384
  melody,
 
203
 
204
 
205
  def predict_full(
206
+ # model,
207
+ # model_path,
208
  decoder,
209
  text,
210
  melody,
 
219
  global USE_DIFFUSION
220
  INTERRUPTING = False
221
  progress(0, desc="Loading model...")
222
+ # model_path = model_path.strip()
223
+ # if model_path:
224
+ # if not Path(model_path).exists():
225
+ # raise gr.Error(f"Model path {model_path} doesn't exist.")
226
+ # if not Path(model_path).is_dir():
227
+ # raise gr.Error(
228
+ # f"Model path {model_path} must be a folder containing "
229
+ # "state_dict.bin and compression_state_dict_.bin."
230
+ # )
231
+ # model = model_path
232
  if temperature < 0:
233
  raise gr.Error("Temperature must be >= 0.")
234
  if topk < 0:
 
243
  load_diffusion()
244
  else:
245
  USE_DIFFUSION = False
246
+ load_model()
247
+ # load_model(model)
248
 
249
  max_generated = 0
250
 
 
378
  ).then(
379
  predict_full,
380
  inputs=[
381
+ # model,
382
+ # model_path,
383
  decoder,
384
  text,
385
  melody,