|
import os |
|
import stripe |
|
import requests |
|
import logging |
|
import pytz |
|
from fastapi import APIRouter, HTTPException, Header, Query |
|
from datetime import datetime, timedelta |
|
from dateutil.relativedelta import relativedelta |
|
from typing import List, Dict, Any |
|
from pydantic import BaseModel |
|
|
|
router = APIRouter() |
|
|
|
|
|
stripe.api_key = os.getenv("STRIPE_KEY") |
|
stripe.api_version = "2023-10-16" |
|
SUPABASE_URL = "https://ussxqnifefkgkaumjann.supabase.co" |
|
SUPABASE_KEY = os.getenv("SUPA_KEY") |
|
|
|
if not stripe.api_key or not SUPABASE_KEY: |
|
raise ValueError("❌ STRIPE_KEY ou SUPA_KEY não foram definidos no ambiente!") |
|
|
|
SUPABASE_HEADERS = { |
|
"apikey": SUPABASE_KEY, |
|
"Authorization": f"Bearer {SUPABASE_KEY}", |
|
"Content-Type": "application/json" |
|
} |
|
|
|
logging.basicConfig(level=logging.INFO) |
|
logger = logging.getLogger(__name__) |
|
|
|
class UserIDRequest(BaseModel): |
|
user_id: str |
|
|
|
def verify_token(user_token: str) -> str: |
|
headers = { |
|
"Authorization": f"Bearer {user_token}", |
|
"apikey": SUPABASE_KEY, |
|
"Content-Type": "application/json" |
|
} |
|
response = requests.get(f"{SUPABASE_URL}/auth/v1/user", headers=headers) |
|
if response.status_code == 200: |
|
user_data = response.json() |
|
user_id = user_data.get("id") |
|
if not user_id: |
|
raise HTTPException(status_code=400, detail="Invalid token: User ID not found") |
|
return user_id |
|
else: |
|
raise HTTPException(status_code=401, detail="Invalid or expired token") |
|
|
|
def get_account_balance(account_id: str) -> Dict[str, Any]: |
|
try: |
|
balance = stripe.Balance.retrieve(stripe_account=account_id) |
|
available_balance = next((b.amount for b in balance.available if b.currency.upper() == "BRL"), 0) |
|
pending_balance = next((b.amount for b in balance.pending if b.currency.upper() == "BRL"), 0) |
|
return { |
|
"available_balance": available_balance, |
|
"pending_balance": pending_balance, |
|
"currency": "BRL" |
|
} |
|
except Exception as e: |
|
logger.error(f"❌ Error getting account balance: {str(e)}") |
|
return {"available_balance": 0, "pending_balance": 0, "currency": "BRL"} |
|
|
|
def get_payout_history(account_id: str, limit: int = 10) -> List[Dict[str, Any]]: |
|
try: |
|
payouts = stripe.Payout.list( |
|
stripe_account=account_id, |
|
limit=limit, |
|
expand=["data.destination"] |
|
) |
|
|
|
payout_history = [] |
|
for payout in payouts.data: |
|
arrival_date = datetime.fromtimestamp(payout.arrival_date) if payout.arrival_date else None |
|
|
|
payout_entry = { |
|
"id": payout.id, |
|
"amount": payout.amount, |
|
"currency": payout.currency, |
|
"status": payout.status, |
|
"type": payout.type, |
|
"method": payout.method, |
|
"created": datetime.fromtimestamp(payout.created).isoformat(), |
|
"arrival_date": arrival_date.isoformat() if arrival_date else None, |
|
"description": payout.description, |
|
"failure_code": payout.failure_code, |
|
"failure_message": payout.failure_message, |
|
} |
|
|
|
|
|
if hasattr(payout, 'destination') and payout.destination: |
|
bank = payout.destination |
|
payout_entry["bank_details"] = { |
|
"bank_name": getattr(bank, 'bank_name', None), |
|
"last4": getattr(bank, 'last4', None), |
|
"account_holder_name": getattr(bank, 'account_holder_name', None) |
|
} |
|
|
|
payout_history.append(payout_entry) |
|
|
|
return payout_history |
|
except Exception as e: |
|
logger.error(f"❌ Error getting payout history: {str(e)}") |
|
return [] |
|
|
|
def get_connected_account_details(account_id: str) -> Dict[str, Any]: |
|
try: |
|
account = stripe.Account.retrieve(account_id) |
|
external_accounts = stripe.Account.list_external_accounts( |
|
account_id, |
|
object="bank_account", |
|
limit=1 |
|
) |
|
|
|
payout_schedule = account.settings.payouts.schedule |
|
|
|
bank_account = {} |
|
if external_accounts and external_accounts.data: |
|
bank = external_accounts.data[0] |
|
bank_account = { |
|
"account_holder_name": bank.account_holder_name, |
|
"bank_name": bank.bank_name, |
|
"last4": bank.last4, |
|
"routing_number": bank.routing_number, |
|
"status": bank.status |
|
} |
|
|
|
return { |
|
"id": account.id, |
|
"business_type": account.business_type, |
|
"company": { |
|
"name": account.business_profile.name if hasattr(account, 'business_profile') and hasattr(account.business_profile, 'name') else None, |
|
}, |
|
"email": account.email, |
|
"payouts_enabled": account.payouts_enabled, |
|
"charges_enabled": account.charges_enabled, |
|
"payout_schedule": { |
|
"interval": payout_schedule.interval, |
|
"monthly_anchor": payout_schedule.monthly_anchor if hasattr(payout_schedule, 'monthly_anchor') else None, |
|
"weekly_anchor": payout_schedule.weekly_anchor if hasattr(payout_schedule, 'weekly_anchor') else None, |
|
"delay_days": payout_schedule.delay_days |
|
}, |
|
"bank_account": bank_account, |
|
"verification_status": account.requirements.currently_due or [] |
|
} |
|
except Exception as e: |
|
logger.error(f"❌ Error getting connected account details: {str(e)}") |
|
return { |
|
"id": account_id, |
|
"payouts_enabled": False, |
|
"charges_enabled": False, |
|
"error": str(e) |
|
} |
|
|
|
def get_monthly_revenue(account_id: str) -> Dict[str, Any]: |
|
ny_timezone = pytz.timezone('America/New_York') |
|
now_ny = datetime.now(ny_timezone) |
|
monthly_data = {} |
|
total_revenue_last_6_months = 0 |
|
|
|
for i in range(6): |
|
target_date = now_ny - relativedelta(months=i) |
|
month_num = target_date.month |
|
month_name = target_date.strftime('%b') |
|
year = target_date.year |
|
month_key = f"{year}-{month_num}" |
|
monthly_data[month_key] = { |
|
"month": month_num, |
|
"name": month_name, |
|
"current": (month_num == now_ny.month and year == now_ny.year), |
|
"amount": 0, |
|
"growth": {"status": "", "percentage": 0, "formatted": "0.0%"} |
|
} |
|
|
|
start_date = now_ny - relativedelta(months=12) |
|
start_timestamp = int(start_date.timestamp()) |
|
|
|
try: |
|
transfers = stripe.Transfer.list( |
|
destination=account_id, |
|
created={"gte": start_timestamp}, |
|
limit=100 |
|
) |
|
for transfer in transfers.data: |
|
transfer_date = datetime.fromtimestamp(transfer.created, ny_timezone) |
|
month_key = f"{transfer_date.year}-{transfer_date.month}" |
|
if month_key in monthly_data: |
|
monthly_data[month_key]["amount"] += transfer.amount |
|
|
|
total_revenue_last_6_months += transfer.amount |
|
|
|
|
|
result = list(monthly_data.values()) |
|
|
|
result.sort(key=lambda x: (now_ny.year * 12 + now_ny.month - (x["month"])) % 12) |
|
|
|
|
|
for i in range(len(result)): |
|
current_month_data = result[i] |
|
|
|
|
|
prev_index = i + 1 if i + 1 < len(result) else None |
|
|
|
if prev_index is not None: |
|
previous_amount = result[prev_index]["amount"] |
|
else: |
|
previous_amount = 0 |
|
|
|
current_amount = current_month_data["amount"] |
|
|
|
if previous_amount > 0: |
|
growth_percentage = ((current_amount - previous_amount) / previous_amount) * 100 |
|
else: |
|
growth_percentage = 100 if current_amount > 0 else 0 |
|
|
|
|
|
if growth_percentage > 0: |
|
status = "up" |
|
elif growth_percentage < 0: |
|
status = "down" |
|
else: |
|
status = "neutral" |
|
|
|
current_month_data["growth"] = { |
|
"status": status, |
|
"percentage": round(growth_percentage, 1), |
|
"formatted": f"{round(growth_percentage, 1)}%" |
|
} |
|
|
|
return { |
|
"monthly_data": result, |
|
"total_last_6_months": total_revenue_last_6_months |
|
} |
|
except Exception as e: |
|
logger.error(f"❌ Error getting monthly revenue: {str(e)}") |
|
return { |
|
"monthly_data": list(monthly_data.values()), |
|
"total_last_6_months": 0 |
|
} |
|
|
|
def format_subscription_date(created_at_str: str) -> str: |
|
"""Format the subscription date to the requested format: e.g. '13th January 2025'""" |
|
try: |
|
created_at = datetime.fromisoformat(created_at_str.replace('Z', '+00:00')) |
|
|
|
|
|
day = created_at.day |
|
if 4 <= day <= 20 or 24 <= day <= 30: |
|
suffix = "th" |
|
else: |
|
suffix = {1: "st", 2: "nd", 3: "rd"}.get(day % 10, "th") |
|
|
|
|
|
return f"{day}{suffix} {created_at.strftime('%B %Y')}" |
|
except Exception as e: |
|
logger.error(f"❌ Error formatting subscription date: {str(e)}") |
|
return "Unknown date" |
|
|
|
def get_active_subscribers(user_id: str, page: int) -> Dict[str, Any]: |
|
limit = 3 |
|
offset = page * limit |
|
|
|
|
|
url = f"{SUPABASE_URL}/rest/v1/Subscriptions?stylist_id=eq.{user_id}&active=eq.true&order=created_at.desc&limit={limit}&offset={offset}" |
|
|
|
response = requests.get(url, headers=SUPABASE_HEADERS) |
|
if response.status_code == 200: |
|
subscribers = response.json() |
|
subscriber_list = [] |
|
for sub in subscribers: |
|
customer_id = sub.get("customer_id") |
|
user_data_url = f"{SUPABASE_URL}/rest/v1/User?id=eq.{customer_id}" |
|
user_response = requests.get(user_data_url, headers=SUPABASE_HEADERS) |
|
if user_response.status_code == 200 and user_response.json(): |
|
user_info = user_response.json()[0] |
|
subscription_date = format_subscription_date(sub.get("created_at", "")) |
|
subscriber_list.append({ |
|
"id": user_info.get("id"), |
|
"name": user_info.get("name"), |
|
"avatar": user_info.get("avatar"), |
|
"blurhash": user_info.get("blurhash"), |
|
"subscription_date": subscription_date |
|
}) |
|
|
|
has_next_page = len(subscribers) == limit |
|
return {"subscribers": subscriber_list, "has_next_page": has_next_page} |
|
|
|
return {"subscribers": [], "has_next_page": False} |
|
|
|
def get_total_followers(user_id: str) -> int: |
|
url = f"{SUPABASE_URL}/rest/v1/followers?following_id=eq.{user_id}" |
|
response = requests.get(url, headers=SUPABASE_HEADERS) |
|
if response.status_code == 200: |
|
followers = response.json() |
|
return len(followers) |
|
return 0 |
|
|
|
def get_total_subscribers(user_id: str) -> int: |
|
url = f"{SUPABASE_URL}/rest/v1/Subscriptions?stylist_id=eq.{user_id}&active=eq.true" |
|
response = requests.get(url, headers=SUPABASE_HEADERS) |
|
if response.status_code == 200: |
|
subscribers = response.json() |
|
return len(subscribers) |
|
return 0 |
|
|
|
def get_courtesy_consultations(user_id: str) -> Dict[str, Any]: |
|
ny_timezone = pytz.timezone('America/New_York') |
|
now_ny = datetime.now(ny_timezone) |
|
monthly_data = {} |
|
|
|
|
|
for i in range(6): |
|
target_date = now_ny - relativedelta(months=i) |
|
month_num = target_date.month |
|
month_name = target_date.strftime('%b') |
|
year = target_date.year |
|
month_key = f"{year}-{month_num}" |
|
monthly_data[month_key] = { |
|
"month": month_num, |
|
"name": month_name, |
|
"current": (month_num == now_ny.month and year == now_ny.year), |
|
"courtesy_count": 0 |
|
} |
|
|
|
|
|
start_date = (now_ny - relativedelta(months=6)).strftime('%Y-%m-%d') |
|
|
|
try: |
|
|
|
url = f"{SUPABASE_URL}/rest/v1/schedules?stylist_id=eq.{user_id}&courtesy=eq.true&date=gte.{start_date}" |
|
response = requests.get(url, headers=SUPABASE_HEADERS) |
|
|
|
if response.status_code == 200: |
|
schedules = response.json() |
|
|
|
|
|
for schedule in schedules: |
|
|
|
schedule_date_str = schedule.get("date") |
|
schedule_date = datetime.fromisoformat(schedule_date_str.replace('Z', '+00:00')).astimezone(ny_timezone) |
|
|
|
month_key = f"{schedule_date.year}-{schedule_date.month}" |
|
if month_key in monthly_data: |
|
monthly_data[month_key]["courtesy_count"] += 1 |
|
|
|
|
|
result = list(monthly_data.values()) |
|
result.sort(key=lambda x: (now_ny.year * 12 + now_ny.month - (x["month"])) % 12) |
|
|
|
return { |
|
"monthly_courtesy_consultations": result |
|
} |
|
except Exception as e: |
|
logger.error(f"❌ Error getting courtesy consultations: {str(e)}") |
|
return { |
|
"monthly_courtesy_consultations": list(monthly_data.values()) |
|
} |
|
|
|
def parse_datetime_safely(date_str: str) -> datetime: |
|
"""Parse datetime string safely, handling microseconds overflow and timezone formats""" |
|
try: |
|
original_date_str = date_str |
|
|
|
|
|
if date_str.endswith('Z'): |
|
date_str = date_str.replace('Z', '+00:00') |
|
|
|
|
|
import re |
|
tz_pattern = r'([+-])(\d{2})$' |
|
match = re.search(tz_pattern, date_str) |
|
if match: |
|
sign, hours = match.groups() |
|
date_str = re.sub(tz_pattern, f'{sign}{hours}:00', date_str) |
|
|
|
|
|
if '.' in date_str: |
|
|
|
parts = date_str.split('.') |
|
if len(parts) == 2: |
|
base_part = parts[0] |
|
microsec_and_tz = parts[1] |
|
|
|
|
|
tz_start_idx = -1 |
|
for i in range(len(microsec_and_tz)): |
|
if microsec_and_tz[i] in ['+', '-']: |
|
tz_start_idx = i |
|
break |
|
|
|
if tz_start_idx > 0: |
|
|
|
microsec_part = microsec_and_tz[:tz_start_idx] |
|
tz_part = microsec_and_tz[tz_start_idx:] |
|
|
|
|
|
if len(microsec_part) > 6: |
|
microsec_part = microsec_part[:6] |
|
|
|
date_str = f"{base_part}.{microsec_part}{tz_part}" |
|
else: |
|
|
|
if len(microsec_and_tz) > 6: |
|
microsec_and_tz = microsec_and_tz[:6] |
|
date_str = f"{base_part}.{microsec_and_tz}" |
|
|
|
parsed_date = datetime.fromisoformat(date_str) |
|
logger.debug(f"✅ Successfully parsed date '{original_date_str}' -> '{date_str}' -> {parsed_date}") |
|
return parsed_date |
|
|
|
except ValueError as e: |
|
logger.warning(f"⚠️ Error parsing date '{original_date_str}': {str(e)}") |
|
|
|
try: |
|
|
|
simple_date = original_date_str.split('.')[0] |
|
|
|
if not any(tz in simple_date for tz in ['+', '-', 'Z']): |
|
simple_date += '-04:00' |
|
elif simple_date.endswith('-04'): |
|
simple_date += ':00' |
|
return datetime.fromisoformat(simple_date) |
|
except: |
|
logger.error(f"❌ Failed to parse date '{original_date_str}' with fallback method") |
|
|
|
return datetime.now(pytz.UTC) |
|
|
|
|
|
def get_monthly_likes(user_id: str) -> Dict[str, Any]: |
|
ny_timezone = pytz.timezone('America/New_York') |
|
now_ny = datetime.now(ny_timezone) |
|
monthly_data = {} |
|
total_likes_last_6_months = 0 |
|
|
|
|
|
for i in range(6): |
|
target_date = now_ny - relativedelta(months=i) |
|
month_num = target_date.month |
|
month_name = target_date.strftime('%b') |
|
year = target_date.year |
|
month_key = f"{year}-{month_num}" |
|
monthly_data[month_key] = { |
|
"month": month_num, |
|
"name": month_name, |
|
"current": (month_num == now_ny.month and year == now_ny.year), |
|
"likes_count": 0 |
|
} |
|
|
|
|
|
start_date = (now_ny - relativedelta(months=6)).strftime('%Y-%m-%d') |
|
|
|
try: |
|
|
|
feeds_url = f"{SUPABASE_URL}/rest/v1/Feeds?user_id=eq.{user_id}" |
|
feeds_response = requests.get(feeds_url, headers=SUPABASE_HEADERS) |
|
|
|
if feeds_response.status_code == 200: |
|
feeds = feeds_response.json() |
|
feed_ids = [feed.get("id") for feed in feeds] |
|
|
|
if feed_ids: |
|
|
|
|
|
feed_ids_str = ','.join([str(id) for id in feed_ids]) |
|
|
|
|
|
likes_url = f"{SUPABASE_URL}/rest/v1/likes?feed_item_id=in.({feed_ids_str})&created_at=gte.{start_date}" |
|
likes_response = requests.get(likes_url, headers=SUPABASE_HEADERS) |
|
|
|
if likes_response.status_code == 200: |
|
likes = likes_response.json() |
|
logger.info(f"📊 Found {len(likes)} likes for user {user_id}") |
|
|
|
|
|
for like in likes: |
|
|
|
like_date_str = like.get("created_at") |
|
if like_date_str: |
|
like_date = parse_datetime_safely(like_date_str).astimezone(ny_timezone) |
|
|
|
month_key = f"{like_date.year}-{like_date.month}" |
|
logger.debug(f"📅 Like date: {like_date_str} -> {like_date} -> month_key: {month_key}") |
|
|
|
if month_key in monthly_data: |
|
monthly_data[month_key]["likes_count"] += 1 |
|
total_likes_last_6_months += 1 |
|
logger.debug(f"✅ Added like to {month_key}, total now: {monthly_data[month_key]['likes_count']}") |
|
else: |
|
logger.debug(f"⚠️ Month key {month_key} not in range, skipping like from {like_date}") |
|
else: |
|
logger.warning(f"⚠️ Failed to fetch likes: {likes_response.status_code}") |
|
else: |
|
logger.info(f"📭 No feed items found for user {user_id}") |
|
else: |
|
logger.warning(f"⚠️ Failed to fetch feeds: {feeds_response.status_code}") |
|
|
|
|
|
result = list(monthly_data.values()) |
|
result.sort(key=lambda x: (now_ny.year * 12 + now_ny.month - (x["month"])) % 12) |
|
|
|
return { |
|
"monthly_likes": result, |
|
"total_likes_last_6_months": total_likes_last_6_months |
|
} |
|
except Exception as e: |
|
logger.error(f"❌ Error getting monthly likes: {str(e)}") |
|
return { |
|
"monthly_likes": list(monthly_data.values()), |
|
"total_likes_last_6_months": 0 |
|
} |
|
|
|
@router.post("/bank_account_dashboard_link") |
|
async def generate_bank_account_dashboard_link(data: UserIDRequest): |
|
try: |
|
user_id = data.user_id |
|
|
|
if not user_id.startswith("acct_"): |
|
raise HTTPException(status_code=400, detail="Invalid user ID format. Must be a Stripe connected account (acct_).") |
|
|
|
try: |
|
|
|
login_link = stripe.Account.create_login_link(user_id) |
|
|
|
return { |
|
"status": "success", |
|
"dashboard_link": login_link.url |
|
} |
|
|
|
except stripe.error.StripeError as e: |
|
logger.error(f"❌ Error creating login link: {str(e)}") |
|
raise HTTPException(status_code=500, detail="Error creating login link for stylist account.") |
|
|
|
except Exception as e: |
|
logger.error(f"❌ Error generating bank account dashboard link: {str(e)}") |
|
raise HTTPException(status_code=500, detail="Error generating bank account dashboard link.") |
|
|
|
@router.get("/dashboard") |
|
def get_dashboard(user_token: str = Header(None, alias="User-key"), page: int = Query(0, ge=0)): |
|
try: |
|
user_id = verify_token(user_token) |
|
user_data_url = f"{SUPABASE_URL}/rest/v1/User?id=eq.{user_id}" |
|
response = requests.get(user_data_url, headers=SUPABASE_HEADERS) |
|
|
|
|
|
user_data_response = response.json() |
|
if not user_data_response: |
|
raise HTTPException(status_code=404, detail="User not found") |
|
|
|
user_data = user_data_response[0] |
|
stripe_id = user_data.get("stripe_id") |
|
|
|
|
|
if not stripe_id: |
|
raise HTTPException(status_code=400, detail="User does not have a Stripe account") |
|
|
|
|
|
revenue_data = get_monthly_revenue(stripe_id) |
|
|
|
|
|
courtesy_data = get_courtesy_consultations(user_id) |
|
|
|
|
|
likes_data = get_monthly_likes(user_id) |
|
|
|
|
|
account_details = get_connected_account_details(stripe_id) |
|
|
|
|
|
payout_history = get_payout_history(stripe_id) |
|
|
|
return { |
|
"stripe_id": stripe_id, |
|
"available_balance": get_account_balance(stripe_id), |
|
"monthly_revenue": revenue_data["monthly_data"], |
|
"total_revenue_last_6_months": revenue_data["total_last_6_months"], |
|
"monthly_courtesy_consultations": courtesy_data["monthly_courtesy_consultations"], |
|
"monthly_likes": likes_data["monthly_likes"], |
|
"total_likes_last_6_months": likes_data["total_likes_last_6_months"], |
|
"total_followers": get_total_followers(user_id), |
|
"total_subscribers": get_total_subscribers(user_id), |
|
"account_details": account_details, |
|
"payout_history": payout_history, |
|
**get_active_subscribers(user_id, page) |
|
} |
|
except Exception as e: |
|
logger.error(f"❌ Error: {str(e)}") |
|
raise HTTPException(status_code=500, detail=str(e)) |