hina19 commited on
Commit
b361484
·
verified ·
1 Parent(s): 0906ed2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -8,7 +8,7 @@ from scenedetect import open_video, SceneManager, ContentDetector
8
  from transformers import BlipProcessor, BlipForConditionalGeneration
9
  from openai import OpenAI
10
  import base64
11
- from moviepy import editor
12
  # Load AI models
13
  caption_processor = BlipProcessor.from_pretrained("Salesforce/blip-image-captioning-base")
14
  caption_model = BlipForConditionalGeneration.from_pretrained("Salesforce/blip-image-captioning-base")
@@ -128,9 +128,9 @@ if uploaded_file:
128
 
129
  def create_summary_video(image_folder, output_video):
130
  images = sorted([os.path.join(image_folder, img) for img in os.listdir(image_folder) if img.endswith(".jpg")])
131
- clips = [editor.ImageClip(img).set_duration(2) for img in images] # 2 sec per frame
132
 
133
- video = editor.concatenate_videoclips(clips, method="compose")
134
  video.write_videofile(output_video, fps=24)
135
 
136
  # Example usage
 
8
  from transformers import BlipProcessor, BlipForConditionalGeneration
9
  from openai import OpenAI
10
  import base64
11
+ import moviepy.editor as mp
12
  # Load AI models
13
  caption_processor = BlipProcessor.from_pretrained("Salesforce/blip-image-captioning-base")
14
  caption_model = BlipForConditionalGeneration.from_pretrained("Salesforce/blip-image-captioning-base")
 
128
 
129
  def create_summary_video(image_folder, output_video):
130
  images = sorted([os.path.join(image_folder, img) for img in os.listdir(image_folder) if img.endswith(".jpg")])
131
+ clips = [mp.ImageClip(img).set_duration(2) for img in images] # 2 sec per frame
132
 
133
+ video = mp.concatenate_videoclips(clips, method="compose")
134
  video.write_videofile(output_video, fps=24)
135
 
136
  # Example usage