Spaces:
Running
Running
Update routers/analyze.py
Browse files- routers/analyze.py +12 -16
routers/analyze.py
CHANGED
@@ -135,11 +135,13 @@ async def rewrite_article_direct(content: str) -> Optional[Dict[str, Any]]:
|
|
135 |
"title": result.title,
|
136 |
"subhead": result.subhead,
|
137 |
"content": result.content,
|
138 |
-
"
|
|
|
139 |
}
|
140 |
|
141 |
# Validação básica da resposta
|
142 |
-
|
|
|
143 |
log.info("Artigo reescrito com sucesso (chamada direta)")
|
144 |
return {
|
145 |
"success": True,
|
@@ -157,7 +159,7 @@ async def rewrite_article_direct(content: str) -> Optional[Dict[str, Any]]:
|
|
157 |
"raw_response": str(rewritten_data),
|
158 |
"status_code": 200,
|
159 |
"method": "direct_call",
|
160 |
-
"missing_keys": [key for key in
|
161 |
}
|
162 |
|
163 |
except Exception as e:
|
@@ -197,7 +199,8 @@ async def rewrite_article_http(content: str) -> Optional[Dict[str, Any]]:
|
|
197 |
rewritten_data = json.loads(response_text)
|
198 |
|
199 |
# Validação básica da resposta
|
200 |
-
|
|
|
201 |
log.info("Artigo reescrito com sucesso (HTTP)")
|
202 |
return {
|
203 |
"success": True,
|
@@ -215,7 +218,7 @@ async def rewrite_article_http(content: str) -> Optional[Dict[str, Any]]:
|
|
215 |
"raw_response": response_text,
|
216 |
"status_code": response.status,
|
217 |
"method": "http_call",
|
218 |
-
"missing_keys": [key for key in
|
219 |
}
|
220 |
|
221 |
except json.JSONDecodeError as e:
|
@@ -294,25 +297,18 @@ async def fetch_brazil_interest_news():
|
|
294 |
raise HTTPException(status_code=500, detail=f"Erro Supabase: {str(e)}")
|
295 |
|
296 |
async def update_news_rewrite(news_id: int, rewritten_data: Dict[str, str]):
|
297 |
-
"""Atualiza a notícia com os dados reescritos
|
298 |
try:
|
299 |
session = await get_http_session()
|
300 |
url = f"{SUPABASE_URL}/rest/v1/news"
|
301 |
params = {"id": f"eq.{news_id}"}
|
302 |
|
303 |
-
# Conta as fontes - se vazio ou None, usa 1 como padrão
|
304 |
-
sources_list = rewritten_data.get("sources", [])
|
305 |
-
sources_count = len(sources_list) if sources_list else 1
|
306 |
-
|
307 |
-
# Log para debug
|
308 |
-
log.info(f"Fontes encontradas: {sources_list}")
|
309 |
-
log.info(f"Contagem de fontes: {sources_count}")
|
310 |
-
|
311 |
payload = {
|
312 |
"title_pt": rewritten_data.get("title", ""),
|
313 |
"text_pt": rewritten_data.get("content", ""),
|
314 |
"subhead_pt": rewritten_data.get("subhead", ""),
|
315 |
-
"
|
|
|
316 |
}
|
317 |
|
318 |
async with session.patch(url, headers=SUPABASE_ROLE_HEADERS, json=payload, params=params) as response:
|
@@ -321,7 +317,7 @@ async def update_news_rewrite(news_id: int, rewritten_data: Dict[str, str]):
|
|
321 |
log.error(f"Erro ao atualizar notícia - Status: {response.status}, Response: {response_text}")
|
322 |
raise HTTPException(status_code=500, detail=f"Erro ao atualizar notícia - Status: {response.status}")
|
323 |
|
324 |
-
log.info(f"Notícia {news_id} atualizada com sucesso - Status: {response.status}
|
325 |
|
326 |
except Exception as e:
|
327 |
log.error(f"Erro ao atualizar notícia {news_id}: {str(e)}")
|
|
|
135 |
"title": result.title,
|
136 |
"subhead": result.subhead,
|
137 |
"content": result.content,
|
138 |
+
"title_instagram": getattr(result, "title_instagram", ""),
|
139 |
+
"content_instagram": getattr(result, "content_instagram", "")
|
140 |
}
|
141 |
|
142 |
# Validação básica da resposta
|
143 |
+
required_keys = ["title", "subhead", "content", "title_instagram", "content_instagram"]
|
144 |
+
if all(key in rewritten_data and rewritten_data[key].strip() for key in required_keys):
|
145 |
log.info("Artigo reescrito com sucesso (chamada direta)")
|
146 |
return {
|
147 |
"success": True,
|
|
|
159 |
"raw_response": str(rewritten_data),
|
160 |
"status_code": 200,
|
161 |
"method": "direct_call",
|
162 |
+
"missing_keys": [key for key in required_keys if not rewritten_data.get(key, "").strip()]
|
163 |
}
|
164 |
|
165 |
except Exception as e:
|
|
|
199 |
rewritten_data = json.loads(response_text)
|
200 |
|
201 |
# Validação básica da resposta
|
202 |
+
required_keys = ["title", "subhead", "content", "title_instagram", "content_instagram"]
|
203 |
+
if all(key in rewritten_data for key in required_keys):
|
204 |
log.info("Artigo reescrito com sucesso (HTTP)")
|
205 |
return {
|
206 |
"success": True,
|
|
|
218 |
"raw_response": response_text,
|
219 |
"status_code": response.status,
|
220 |
"method": "http_call",
|
221 |
+
"missing_keys": [key for key in required_keys if key not in rewritten_data]
|
222 |
}
|
223 |
|
224 |
except json.JSONDecodeError as e:
|
|
|
297 |
raise HTTPException(status_code=500, detail=f"Erro Supabase: {str(e)}")
|
298 |
|
299 |
async def update_news_rewrite(news_id: int, rewritten_data: Dict[str, str]):
|
300 |
+
"""Atualiza a notícia com os dados reescritos incluindo campos do Instagram"""
|
301 |
try:
|
302 |
session = await get_http_session()
|
303 |
url = f"{SUPABASE_URL}/rest/v1/news"
|
304 |
params = {"id": f"eq.{news_id}"}
|
305 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
306 |
payload = {
|
307 |
"title_pt": rewritten_data.get("title", ""),
|
308 |
"text_pt": rewritten_data.get("content", ""),
|
309 |
"subhead_pt": rewritten_data.get("subhead", ""),
|
310 |
+
"title_instagram": rewritten_data.get("title_instagram", ""),
|
311 |
+
"content_instagram": rewritten_data.get("content_instagram", "")
|
312 |
}
|
313 |
|
314 |
async with session.patch(url, headers=SUPABASE_ROLE_HEADERS, json=payload, params=params) as response:
|
|
|
317 |
log.error(f"Erro ao atualizar notícia - Status: {response.status}, Response: {response_text}")
|
318 |
raise HTTPException(status_code=500, detail=f"Erro ao atualizar notícia - Status: {response.status}")
|
319 |
|
320 |
+
log.info(f"Notícia {news_id} atualizada com sucesso - Status: {response.status}")
|
321 |
|
322 |
except Exception as e:
|
323 |
log.error(f"Erro ao atualizar notícia {news_id}: {str(e)}")
|