randydev commited on
Commit
5b6294d
·
verified ·
1 Parent(s): 0be4ee6
Files changed (1) hide show
  1. akn/Gemini/gemini.py +32 -21
akn/Gemini/gemini.py CHANGED
@@ -315,34 +315,45 @@ async def chatbot_talk(client: Client, message: Message):
315
  command = parts[0].lower()
316
  pic_query = parts[1].strip() if len(parts) > 1 else ""
317
  new_check_flux_matches = re.findall(r"\bimage\b", query_base)
 
 
318
  try:
 
319
  if new_check_flux_matches:
320
- new_js = await message.reply_text("Please wait, it's still being processed")
321
- response_js = await js.image.create(
322
- "black-forest-labs/flux-1-schnell",
323
- image_read=True,
324
- params_data={"query": query_base},
325
- )
326
- file_path = "randydev.jpg"
327
- with open(file_path, "wb") as f:
328
- f.write(response_js)
329
- await new_js.edit_text("Uploading image...")
330
- await message.reply_photo(
331
- file_path,
332
- progress=progress,
333
- progress_args=(
334
- new_js,
335
- time.time(),
336
- "Uploading image..."
337
  )
338
- )
339
- await new_js.delete()
340
- return
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
341
 
342
  model_flash = genai.GenerativeModel(
343
  model_name="gemini-1.5-flash"
344
  )
345
- backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
346
  backup_chat.append({"role": "user", "parts": [{"text": query_base}]})
347
  chat_session = model_flash.start_chat(history=backup_chat)
348
  response_data = chat_session.send_message(query_base)
 
315
  command = parts[0].lower()
316
  pic_query = parts[1].strip() if len(parts) > 1 else ""
317
  new_check_flux_matches = re.findall(r"\bimage\b", query_base)
318
+ if query_base == "image":
319
+ return await message.reply_text("i don't have, what do you mean by image?")
320
  try:
321
+ backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
322
  if new_check_flux_matches:
323
+ try:
324
+ backup_chat.append({"role": "user", "parts": [{"text": query_base}]})
325
+ await message.reply_text("Please wait, it's still being processed")
326
+ response_js = await js.image.create(
327
+ "black-forest-labs/flux-1-schnell",
328
+ image_read=True,
329
+ params_data={"query": query_base},
 
 
 
 
 
 
 
 
 
 
330
  )
331
+ file_path = "randydev.jpg"
332
+ with open(file_path, "wb") as f:
333
+ f.write(response_js)
334
+ ok = await message.edit_text("Uploading image...")
335
+ await message.reply_photo(
336
+ file_path,
337
+ progress=progress,
338
+ progress_args=(
339
+ "Please wait, it's still being processed",
340
+ time.time(),
341
+ "Uploading image..."
342
+ )
343
+ )
344
+ backup_chat.append({"role": "model", "parts": [{"text": f"IMAGE OUTPUT: {query_base}"}]})
345
+ await db._update_chatbot_chat_in_db(message.from_user.id, backup_chat)
346
+ await ok.delete()
347
+ return
348
+ except ImageProcessFailed as e:
349
+ return await message.edit_text("The server failed to process your image")
350
+ except Exception as e:
351
+ LOGS.error(f"Error new_check_flux_matches {str(e)}")
352
+ return await message.edit_text("Try again error image")
353
 
354
  model_flash = genai.GenerativeModel(
355
  model_name="gemini-1.5-flash"
356
  )
 
357
  backup_chat.append({"role": "user", "parts": [{"text": query_base}]})
358
  chat_session = model_flash.start_chat(history=backup_chat)
359
  response_data = chat_session.send_message(query_base)