rahul7star commited on
Commit
2b24924
·
verified ·
1 Parent(s): 2bf6a09

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +52 -8
app.py CHANGED
@@ -42,6 +42,26 @@ sys.path.insert(0, "ai-toolkit")
42
  from toolkit.job import get_job
43
 
44
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
45
  app = FastAPI()
46
 
47
  # CORS setup to allow requests from your frontend
@@ -154,11 +174,15 @@ async def upload_images(
154
  background_tasks: BackgroundTasks,
155
  files: List[UploadFile] = File(...)
156
  ):
157
- # Step 1: Generate dynamic folder name
158
  timestamp = datetime.utcnow().strftime("%Y%m%d_%H%M%S")
159
  unique_id = uuid.uuid4().hex[:6]
160
  folder_name = f"upload_{timestamp}_{unique_id}"
161
  hf_folder_prefix = f"demo/{folder_name}"
 
 
 
 
162
 
163
  responses = []
164
 
@@ -192,25 +216,45 @@ async def upload_images(
192
 
193
  os.remove(temp_path)
194
 
195
- # Step 3: Add filter job to background
196
- def run_filter():
 
 
 
 
 
197
  try:
198
- result = filter_and_rename_images(folder=hf_folder_prefix)
199
- print(f"🧼 Filter result: {result}")
 
 
 
 
 
 
200
  except Exception as e:
201
- print(f"❌ Filter failed: {str(e)}")
 
202
 
203
- background_tasks.add_task(run_filter)
204
 
205
  return {
206
  "message": f"{len(files)} file(s) uploaded",
 
207
  "upload_folder": hf_folder_prefix,
208
  "results": responses,
209
- "note": "Filtering started in background"
210
  }
211
 
212
 
213
 
 
 
 
 
 
 
 
214
 
215
 
216
 
 
42
  from toolkit.job import get_job
43
 
44
 
45
+ from dataclasses import dataclass, field
46
+ from typing import Dict
47
+ import uuid
48
+ from datetime import datetime
49
+
50
+ @dataclass
51
+ class Job:
52
+ job_id: str
53
+ folder_path: str
54
+ status: str = "pending"
55
+ created_at: str = field(default_factory=lambda: datetime.utcnow().isoformat())
56
+
57
+ # Global job queue (in-memory)
58
+ JOB_QUEUE: Dict[str, Job] = {}
59
+
60
+
61
+
62
+
63
+
64
+
65
  app = FastAPI()
66
 
67
  # CORS setup to allow requests from your frontend
 
174
  background_tasks: BackgroundTasks,
175
  files: List[UploadFile] = File(...)
176
  ):
177
+ # Step 1: Generate dynamic folder name and job ID
178
  timestamp = datetime.utcnow().strftime("%Y%m%d_%H%M%S")
179
  unique_id = uuid.uuid4().hex[:6]
180
  folder_name = f"upload_{timestamp}_{unique_id}"
181
  hf_folder_prefix = f"demo/{folder_name}"
182
+ job_id = f"job_{unique_id}"
183
+
184
+ # Register job
185
+ JOB_QUEUE[job_id] = Job(job_id=job_id, folder_path=hf_folder_prefix)
186
 
187
  responses = []
188
 
 
216
 
217
  os.remove(temp_path)
218
 
219
+ # Step 3 & 4: Add background task with job ID
220
+ def process_job(job_id: str):
221
+ job = JOB_QUEUE.get(job_id)
222
+ if not job:
223
+ print(f"❌ Job {job_id} not found")
224
+ return
225
+
226
  try:
227
+ print(f"🔧 Starting filter for {job.folder_path}")
228
+ filter_result = filter_and_rename_images(folder=job.folder_path)
229
+ print(f"🧼 Filter result: {filter_result}")
230
+
231
+ print(f"🚀 Triggering LoRA for {job.folder_path}")
232
+ auto_run_lora_from_repo(folder_path=job.folder_path)
233
+
234
+ job.status = "completed"
235
  except Exception as e:
236
+ job.status = f"failed: {str(e)}"
237
+ print(f"❌ Job {job_id} failed: {str(e)}")
238
 
239
+ background_tasks.add_task(process_job, job_id)
240
 
241
  return {
242
  "message": f"{len(files)} file(s) uploaded",
243
+ "job_id": job_id,
244
  "upload_folder": hf_folder_prefix,
245
  "results": responses,
246
+ "note": "Filtering + LoRA training queued"
247
  }
248
 
249
 
250
 
251
+ def auto_run_lora_from_repo(folder_path: str):
252
+ try:
253
+ FOLDER_IN_REPO = folder_path
254
+ print(f"🚀 Auto-run triggered for: {FOLDER_IN_REPO}")
255
+ # Do your training etc...
256
+ except Exception as e:
257
+ print(f"❌ auto_run_lora_from_repo failed: {str(e)}")
258
 
259
 
260