reab5555 commited on
Commit
ba6439c
·
verified ·
1 Parent(s): bb62875

Update processing.py

Browse files
Files changed (1) hide show
  1. processing.py +4 -4
processing.py CHANGED
@@ -136,7 +136,7 @@ def process_input(input_file, progress=None):
136
  except Exception as e:
137
  print(f"Progress update failed: {e}")
138
 
139
- safe_progress(0, desc="Processing file...")
140
 
141
  if isinstance(input_file, str):
142
  file_path = input_file
@@ -155,7 +155,7 @@ def process_input(input_file, progress=None):
155
  content = '\n'.join([page.page_content for page in pages])
156
  transcription = content
157
  elif file_extension in ['.mp4', '.avi', '.mov']:
158
- safe_progress(0.2, desc="Processing video...")
159
  srt_path = process_video(file_path, hf_token, "en")
160
  with open(srt_path, 'r', encoding='utf-8') as file:
161
  content = file.read()
@@ -166,7 +166,7 @@ def process_input(input_file, progress=None):
166
 
167
  detected_language = detect_language(content)
168
 
169
- safe_progress(0.2, desc="Initializing analyzer...")
170
  analyzer = SequentialAnalyzer(hf_token)
171
 
172
  tasks = [
@@ -179,7 +179,7 @@ def process_input(input_file, progress=None):
179
  tokens = []
180
 
181
  for i, (task_name, task, db) in enumerate(tasks):
182
- safe_progress((i + 1) * 0.2, desc=f"Analyzing {task_name}...")
183
  answer, task_tokens = analyzer.analyze_task(content, task, db)
184
  results.append(answer)
185
  tokens.append(task_tokens)
 
136
  except Exception as e:
137
  print(f"Progress update failed: {e}")
138
 
139
+ safe_progress(0, desc="Processing file")
140
 
141
  if isinstance(input_file, str):
142
  file_path = input_file
 
155
  content = '\n'.join([page.page_content for page in pages])
156
  transcription = content
157
  elif file_extension in ['.mp4', '.avi', '.mov']:
158
+ safe_progress(0.2, desc="Processing video")
159
  srt_path = process_video(file_path, hf_token, "en")
160
  with open(srt_path, 'r', encoding='utf-8') as file:
161
  content = file.read()
 
166
 
167
  detected_language = detect_language(content)
168
 
169
+ safe_progress(0.2, desc="Initializing analyzer")
170
  analyzer = SequentialAnalyzer(hf_token)
171
 
172
  tasks = [
 
179
  tokens = []
180
 
181
  for i, (task_name, task, db) in enumerate(tasks):
182
+ safe_progress((i + 1) * 0.2, desc=f"Analyzing {task_name}")
183
  answer, task_tokens = analyzer.analyze_task(content, task, db)
184
  results.append(answer)
185
  tokens.append(task_tokens)