rahul7star commited on
Commit
77adc33
·
verified ·
1 Parent(s): 0f8f3f1

Update generate.py

Browse files
Files changed (1) hide show
  1. generate.py +6 -3
generate.py CHANGED
@@ -11,7 +11,7 @@ parser.add_argument("--size", type=str, default="832*480")
11
  parser.add_argument("--frame_num", type=int, default=60)
12
  parser.add_argument("--sample_steps", type=int, default=20)
13
  parser.add_argument("--ckpt_dir", type=str, default="./Wan2.1-T2V-1.3B")
14
- parser.add_argument("--offload_model", type=str, default="True", choices=["True", "False"], help="Whether to offload the model")
15
  parser.add_argument("--t5_cpu", action="store_true", help="Use CPU for T5 model (optional)")
16
  parser.add_argument("--sample_shift", type=int, default=8, help="Sampling shift for generation")
17
  parser.add_argument("--sample_guide_scale", type=int, default=6, help="Sampling guide scale for generation")
@@ -27,7 +27,8 @@ print(f"Generating video with the following settings:\n"
27
  f"Prompt: {args.prompt}\n"
28
  f"Sample Shift: {args.sample_shift}\n"
29
  f"Sample Guide Scale: {args.sample_guide_scale}\n"
30
- f"Using T5 on CPU: {args.t5_cpu}")
 
31
 
32
  # Ensure the model is downloaded
33
  if not os.path.exists(args.ckpt_dir):
@@ -41,7 +42,9 @@ if torch.cuda.is_available():
41
  torch.backends.cudnn.deterministic = True
42
 
43
  # Run the model (Ensure that `generate.py` includes these new params in its model call)
44
- command = f"python generate.py --task {args.task} --size {args.size} --frame_num {args.frame_num} --sample_steps {args.sample_steps} --ckpt_dir {args.ckpt_dir} --offload_model {args.offload_model} --t5_cpu {args.t5_cpu} --sample_shift {args.sample_shift} --sample_guide_scale {args.sample_guide_scale} --prompt \"{args.prompt}\""
 
 
45
 
46
  process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
47
  stdout, stderr = process.communicate()
 
11
  parser.add_argument("--frame_num", type=int, default=60)
12
  parser.add_argument("--sample_steps", type=int, default=20)
13
  parser.add_argument("--ckpt_dir", type=str, default="./Wan2.1-T2V-1.3B")
14
+ parser.add_argument("--offload_model", type=bool, default=True, help="Whether to offload the model (True/False)")
15
  parser.add_argument("--t5_cpu", action="store_true", help="Use CPU for T5 model (optional)")
16
  parser.add_argument("--sample_shift", type=int, default=8, help="Sampling shift for generation")
17
  parser.add_argument("--sample_guide_scale", type=int, default=6, help="Sampling guide scale for generation")
 
27
  f"Prompt: {args.prompt}\n"
28
  f"Sample Shift: {args.sample_shift}\n"
29
  f"Sample Guide Scale: {args.sample_guide_scale}\n"
30
+ f"Using T5 on CPU: {args.t5_cpu}\n"
31
+ f"Offload Model: {args.offload_model}")
32
 
33
  # Ensure the model is downloaded
34
  if not os.path.exists(args.ckpt_dir):
 
42
  torch.backends.cudnn.deterministic = True
43
 
44
  # Run the model (Ensure that `generate.py` includes these new params in its model call)
45
+ # Pass offload_model correctly as either True or False
46
+ offload_model_value = "True" if args.offload_model else "False"
47
+ command = f"python generate.py --task {args.task} --size {args.size} --frame_num {args.frame_num} --sample_steps {args.sample_steps} --ckpt_dir {args.ckpt_dir} --offload_model {offload_model_value} --t5_cpu {args.t5_cpu} --sample_shift {args.sample_shift} --sample_guide_scale {args.sample_guide_scale} --prompt \"{args.prompt}\""
48
 
49
  process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
50
  stdout, stderr = process.communicate()