habulaj commited on
Commit
9b1d7c0
·
verified ·
1 Parent(s): 7bdb090

Create logs.py

Browse files
Files changed (1) hide show
  1. routes/logs.py +79 -52
routes/logs.py CHANGED
@@ -1,67 +1,94 @@
1
- import os
2
  import logging
3
  import aiohttp
4
- from fastapi import APIRouter, HTTPException
5
  from typing import List, Dict, Any
6
 
 
 
7
  router = APIRouter()
 
8
 
9
- # Supabase configs
10
- SUPABASE_URL = "https://ussxqnifefkgkaumjann.supabase.co"
11
- SUPABASE_KEY = os.getenv("SUPA_KEY")
12
- SUPABASE_ROLE_KEY = os.getenv("SUPA_SERVICE_KEY") # <- necessária para acessar logs
 
 
 
 
 
 
 
 
 
13
 
14
- if not SUPABASE_KEY or not SUPABASE_ROLE_KEY:
15
- raise ValueError("❌ SUPA_KEY ou SUPA_SERVICE_KEY não foram definidos no ambiente!")
16
 
17
- # Headers corretos para API de LOGS (precisa da role key!)
18
- SUPABASE_ROLE_HEADERS = {
19
- "apikey": SUPABASE_ROLE_KEY,
20
- "Authorization": f"Bearer {SUPABASE_ROLE_KEY}",
21
- "Content-Type": "application/json"
22
- }
23
 
24
- # Logging interno
25
- logging.basicConfig(level=logging.INFO)
26
- logger = logging.getLogger(__name__)
 
 
 
27
 
 
28
 
29
- @router.get("/logs", response_model=List[Dict[str, Any]])
30
- async def get_onboarding_logs():
31
- """
32
- Retorna os últimos 10 logs de modificação na tabela Onboarding.
33
- """
34
- project_ref = SUPABASE_URL.split("//")[1].split(".")[0]
35
- log_api_url = f"https://api.supabase.com/v1/projects/{project_ref}/logs"
36
 
37
- params = {
38
- "sql": """
39
- select
40
- cast(timestamp as text) as timestamp,
41
- m.method,
42
- m.status_code,
43
- r.url
44
- from edge_logs
45
- cross join unnest(metadata) as m
46
- cross join unnest(m.request) as r
47
- where
48
- path like '%rest/v1/Onboarding%'
49
- and m.method in ('POST', 'PATCH', 'PUT', 'DELETE')
50
- order by timestamp desc
51
- limit 10
52
- """
53
- }
54
 
55
- async with aiohttp.ClientSession() as session:
56
- try:
57
- async with session.get(log_api_url, headers=SUPABASE_ROLE_HEADERS, params=params) as response:
58
- if response.status != 200:
59
- error_body = await response.text()
60
- logger.error(f"Erro ao consultar logs: {error_body}")
61
- raise HTTPException(status_code=response.status, detail=error_body)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
62
 
63
- return await response.json()
 
 
 
 
64
 
65
- except Exception as e:
66
- logger.exception("Erro inesperado ao consultar logs")
67
- raise HTTPException(status_code=500, detail=str(e))
 
 
 
 
1
  import logging
2
  import aiohttp
3
+ from fastapi import APIRouter, HTTPException, Query, Header
4
  from typing import List, Dict, Any
5
 
6
+ from . import SUPABASE_URL, SUPABASE_HEADERS, verify_token_with_permissions
7
+
8
  router = APIRouter()
9
+ logger = logging.getLogger(__name__)
10
 
11
+ @router.get("/logs")
12
+ async def get_logs(
13
+ page: int = Query(0, ge=0),
14
+ user_token: str = Header(..., alias="User-key")
15
+ ):
16
+ """
17
+ Retorna uma lista paginada de logs com informações do usuário.
18
+ Requer permissão de admin.
19
+ Cada página contém no máximo 50 logs.
20
+ """
21
+ try:
22
+ # Verifica permissão de administrador
23
+ await verify_token_with_permissions(user_token)
24
 
25
+ limit = 50
26
+ offset = page * limit
27
 
28
+ query_url = f"{SUPABASE_URL}/rest/v1/Logs?order=created_at.desc&limit={limit}&offset={offset}&select=id,user,action,reference,old_data,new_data,created_at"
 
 
 
 
 
29
 
30
+ async with aiohttp.ClientSession() as session:
31
+ async with session.get(query_url, headers=SUPABASE_HEADERS) as response:
32
+ if response.status != 200:
33
+ detail = await response.text()
34
+ logger.error(f"❌ Erro ao buscar logs: {detail}")
35
+ raise HTTPException(status_code=response.status, detail="Erro ao buscar logs")
36
 
37
+ logs = await response.json()
38
 
39
+ # Se não houver registros, retorna lista vazia
40
+ if not logs:
41
+ return {
42
+ "logs": [],
43
+ "page": page,
44
+ "has_next": False
45
+ }
46
 
47
+ # Coletar todos os user_ids distintos
48
+ user_ids = list({log["user"] for log in logs if log.get("user")})
49
+ if not user_ids:
50
+ user_info_map = {}
51
+ else:
52
+ user_ids_query = ",".join(f'"{uid}"' for uid in user_ids)
53
+ users_url = f"{SUPABASE_URL}/rest/v1/User?id=in.({user_ids_query})&select=id,name,avatar"
 
 
 
 
 
 
 
 
 
 
54
 
55
+ async with aiohttp.ClientSession() as session:
56
+ async with session.get(users_url, headers=SUPABASE_HEADERS) as response:
57
+ if response.status != 200:
58
+ logger.warning("⚠️ Erro ao buscar dados dos usuários")
59
+ user_info_map = {}
60
+ else:
61
+ users_data = await response.json()
62
+ user_info_map = {user["id"]: {"name": user["name"], "avatar": user["avatar"]} for user in users_data}
63
+
64
+ # Montar resposta com user_info
65
+ enriched_logs = []
66
+ for log in logs:
67
+ user_data = user_info_map.get(log["user"], {"name": None, "avatar": None})
68
+ enriched_logs.append({
69
+ "id": log["id"],
70
+ "user": {
71
+ "id": log["user"],
72
+ "name": user_data["name"],
73
+ "avatar": user_data["avatar"]
74
+ },
75
+ "action": log["action"],
76
+ "reference": log["reference"],
77
+ "old_data": log["old_data"],
78
+ "new_data": log["new_data"],
79
+ "created_at": log["created_at"]
80
+ })
81
+
82
+ has_next = len(logs) == limit
83
 
84
+ return {
85
+ "logs": enriched_logs,
86
+ "page": page,
87
+ "has_next": has_next
88
+ }
89
 
90
+ except HTTPException as he:
91
+ raise he
92
+ except Exception as e:
93
+ logger.error(f"❌ Erro interno ao buscar logs: {str(e)}")
94
+ raise HTTPException(status_code=500, detail="Erro interno do servidor")