LiamKhoaLe commited on
Commit
6d29691
·
1 Parent(s): c14055f

Upd logger

Browse files
Files changed (1) hide show
  1. vlm.py +17 -4
vlm.py CHANGED
@@ -1,5 +1,5 @@
1
  # vlm.py
2
- import os, logging
3
  from huggingface_hub import InferenceClient
4
  from translation import translate_query
5
 
@@ -30,9 +30,22 @@ def process_medical_image(base64_image: str, prompt: str = None, lang: str = "EN
30
  ]
31
  }]
32
  )
33
- result = response.choices[0].message.content.strip()
 
 
 
 
 
 
 
 
34
  logger.info(f"[VLM] MedGemma returned {result}")
35
  return result
36
  except Exception as e:
37
- logger.error(f"[VLM] ⚠️ Error from image diagnosis model: {e}")
38
- return f"[VLM] ⚠️ Error from image diagnosis model: {e}"
 
 
 
 
 
 
1
  # vlm.py
2
+ import os, logging, traceback, json
3
  from huggingface_hub import InferenceClient
4
  from translation import translate_query
5
 
 
30
  ]
31
  }]
32
  )
33
+ # Validate response
34
+ if not response or not hasattr(response, "choices") or not response.choices:
35
+ raise ValueError("Empty or malformed response from MedGEMMA.")
36
+ # Get choice resp
37
+ message = response.choices[0].message
38
+ if not message or not hasattr(message, "content"):
39
+ raise ValueError("MedGEMMA response missing `.content`.")
40
+ # Beautify
41
+ result = message.content.strip()
42
  logger.info(f"[VLM] MedGemma returned {result}")
43
  return result
44
  except Exception as e:
45
+ logger.error(f"[VLM] Exception: {e}")
46
+ logger.error(f"[VLM] 🔍 Traceback:\n{traceback.format_exc()}")
47
+ try:
48
+ logger.error(f"[VLM] ⚠️ Raw response: {json.dumps(response, default=str, indent=2)}")
49
+ except:
50
+ logger.warning("[VLM] ⚠️ Response not serializable.")
51
+ return f"[VLM] ⚠️ Image diagnosis failed: {str(e)}"