Ahmadkhan12 commited on
Commit
67016b2
·
verified ·
1 Parent(s): e5501e0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +45 -64
app.py CHANGED
@@ -2,11 +2,9 @@ import gradio as gr
2
  import moviepy.editor as mp
3
  import numpy as np
4
  from PIL import Image
5
- import tempfile
6
  import os
7
 
8
- # Resize image while maintaining aspect ratio
9
- def resize_image_with_aspect_ratio(img, target_size=(1280, 720), padding_color=(0, 0, 0)):
10
  width, height = img.size
11
  target_width, target_height = target_size
12
 
@@ -21,87 +19,70 @@ def resize_image_with_aspect_ratio(img, target_size=(1280, 720), padding_color=(
21
  new_width = int(new_height * aspect_ratio)
22
 
23
  img_resized = img.resize((new_width, new_height))
 
 
 
 
 
24
  final_img = Image.new('RGB', target_size, padding_color)
25
- padding_left = (target_width - new_width) // 2
26
- padding_top = (target_height - new_height) // 2
27
- final_img.paste(img_resized, (padding_left, padding_top))
28
 
29
  return final_img
30
 
31
- # Video generation function with transition and debug logging
32
- def process_and_generate_video(audio_file, images):
33
- debug_log = []
34
 
 
35
  try:
36
- # Log the files received
37
- debug_log.append(f"Audio file received: {audio_file}")
38
- debug_log.append(f"Images received: {[img for img in images]}")
39
-
40
- # Process audio file (use audio_file directly as it's a path)
41
- audio = mp.AudioFileClip(audio_file) # Use file path directly
42
  audio_duration = audio.duration
43
  image_clips = []
44
  image_count = len(images)
45
  image_duration = audio_duration / image_count
46
-
47
- debug_log.append(f"Audio duration: {audio_duration} seconds, Image count: {image_count}")
48
-
49
- # Process each image and create video clip
50
- for img in images:
51
- debug_log.append(f"Processing image: {img}") # Debug print
52
- img = Image.open(img) # Open image from file path
53
- img = resize_image_with_aspect_ratio(img, target_size=(1280, 720))
54
-
55
- # Create image clip with a crossfade transition effect
56
- img_clip = mp.ImageClip(np.array(img)).set_duration(image_duration).set_fps(24)
57
-
58
  # Add transition effect - Crossfade In
59
- if len(image_clips) > 0: # Apply transition only after the first image
60
- img_clip = img_clip.crossfadein(1) # 1-second fade-in transition
61
-
62
  image_clips.append(img_clip)
63
-
64
- debug_log.append(f"Created {len(image_clips)} image clips.")
65
-
66
- # Concatenate image clips with transitions
67
  video = mp.concatenate_videoclips(image_clips, method="compose")
68
  video = video.set_audio(audio)
69
-
70
- # Set output file path in a temporary location
71
- output_path = '/tmp/generated_video.mp4' # Temporary path for output
72
-
73
- debug_log.append(f"Writing video to {output_path}...")
74
-
75
- # Write video to file
76
- video.write_videofile(output_path, codec='libx264', audio_codec='aac')
77
-
78
- # Check if the video file exists
79
- if os.path.exists(output_path):
80
- debug_log.append(f"Video generated successfully at {output_path}")
81
- return output_path, "\n".join(debug_log) # Return the video path and debug log
82
- else:
83
- debug_log.append(f"Error: Video not generated at {output_path}")
84
- return "Error: Video not generated.", "\n".join(debug_log)
85
-
86
  except Exception as e:
87
- debug_log.append(f"Error during video generation: {str(e)}")
88
- return f"Error generating video: {str(e)}", "\n".join(debug_log)
89
 
90
- # Gradio interface setup
91
  def gradio_interface():
92
  with gr.Blocks() as demo:
93
  with gr.Row():
94
  with gr.Column():
95
- mp3_input = gr.Audio(type="filepath", label="Upload MP3") # MP3 input
96
- image_input = gr.File(type="filepath", file_types=[".jpg", ".png"], label="Upload Images", file_count="multiple") # Images input
97
- generate_button = gr.Button("Generate Video") # Button to generate video
98
-
99
- output_video = gr.Video(label="Generated Video") # Video output display
100
- output_logs = gr.Textbox(label="Debug Logs", interactive=False) # Display debug logs
101
-
102
- generate_button.click(fn=process_and_generate_video, inputs=[mp3_input, image_input], outputs=[output_video, output_logs])
103
-
104
- demo.launch() # Launch the Gradio interface
105
 
106
  # Run the interface
107
  gradio_interface()
 
2
  import moviepy.editor as mp
3
  import numpy as np
4
  from PIL import Image
 
5
  import os
6
 
7
+ def resize_and_fit_image(img, target_size=(1280, 720), padding_color=(0, 0, 0)):
 
8
  width, height = img.size
9
  target_width, target_height = target_size
10
 
 
19
  new_width = int(new_height * aspect_ratio)
20
 
21
  img_resized = img.resize((new_width, new_height))
22
+
23
+ if new_width > target_width:
24
+ left = (new_width - target_width) // 2
25
+ img_resized = img_resized.crop((left, 0, left + target_width, target_height))
26
+
27
  final_img = Image.new('RGB', target_size, padding_color)
28
+ final_img.paste(img_resized, (0, 0))
 
 
29
 
30
  return final_img
31
 
32
+ def apply_zoom_effect(image_clip):
33
+ zoomed_clip = image_clip.resize(lambda t: 1 + 0.05 * t) # Zoom in gradually
34
+ return zoomed_clip
35
 
36
+ def process_and_generate_video(audio_file, images):
37
  try:
38
+ audio = mp.AudioFileClip(audio_file)
 
 
 
 
 
39
  audio_duration = audio.duration
40
  image_clips = []
41
  image_count = len(images)
42
  image_duration = audio_duration / image_count
43
+
44
+ print(f"Audio duration: {audio_duration} seconds, Image count: {image_count}")
45
+
46
+ for img_path in images:
47
+ img = Image.open(img_path)
48
+ img = resize_and_fit_image(img, target_size=(1280, 720))
49
+
50
+ img_clip = mp.ImageClip(np.array(img)).set_duration(image_duration).set_fps(30)
51
+ img_clip = apply_zoom_effect(img_clip)
52
+
 
 
53
  # Add transition effect - Crossfade In
54
+ if len(image_clips) > 0:
55
+ img_clip = img_clip.crossfadein(1)
56
+
57
  image_clips.append(img_clip)
58
+
59
+ print(f"Image clips: {len(image_clips)} clips created.")
60
+
 
61
  video = mp.concatenate_videoclips(image_clips, method="compose")
62
  video = video.set_audio(audio)
63
+
64
+ output_path = '/content/generated_video.mp4'
65
+ video.write_videofile(output_path, codec='libx264', audio_codec='aac', threads=4, fps=30, preset='ultrafast')
66
+
67
+ return output_path # Return the file path for Gradio output
68
+
 
 
 
 
 
 
 
 
 
 
 
69
  except Exception as e:
70
+ print(f"Error during video generation: {str(e)}")
71
+ return f"Error generating video: {str(e)}"
72
 
 
73
  def gradio_interface():
74
  with gr.Blocks() as demo:
75
  with gr.Row():
76
  with gr.Column():
77
+ mp3_input = gr.Audio(type="filepath", label="Upload MP3")
78
+ image_input = gr.File(type="filepath", file_types=[".jpg", ".png"], label="Upload Images", file_count="multiple")
79
+ generate_button = gr.Button("Generate Video")
80
+
81
+ output_video = gr.Video(label="Generated Video")
82
+
83
+ generate_button.click(fn=process_and_generate_video, inputs=[mp3_input, image_input], outputs=output_video)
84
+
85
+ demo.launch()
 
86
 
87
  # Run the interface
88
  gradio_interface()