diff --git a/.env b/.env new file mode 100644 index 0000000000000000000000000000000000000000..25c38eeff0b9d1b1dad4fe5df6c3608633e61ab6 --- /dev/null +++ b/.env @@ -0,0 +1,18 @@ +ENVIRONMENT=PROD +DATABASE_HOSTNAME=ep-royal-meadow-a4zzp6z8-pooler.us-east-1.aws.neon.tech +DATABASE_USER=neondb_owner +DATABASE_PASSWORD=npg_Kuh24FTfEsrx +DATABASE_PORT=5432 +DATABASE_DB=neondb +DATABASE_SSL_MODE=require + +CACHE_HOST=localhost +CACHE_PORT=11211 +CACHE_TTL=300 + +UVICORN_HOST=0.0.0.0 +UVICORN_PORT=7860 + +LOG_LEVEL=INFO +LOG_JSON_FORMAT=False +ROOT_PATH=/ \ No newline at end of file diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..09c81241f65cc87b2307d8dc732022df61d9b996 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,62 @@ +# +# Dependencies stage +# +FROM python:3.12-slim-bullseye AS deps + +ENV POETRY_VERSION 1.5.1 + +RUN apt-get update && apt-get install --no-install-recommends -y \ + gcc \ + libc-dev \ + libpq-dev \ + libpq5 \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /tmp +COPY ./pyproject.toml /tmp + +RUN pip install --no-cache-dir email-validator==2.1.0 +COPY requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt +RUN pip install -q --no-cache-dir poetry==$POETRY_VERSION \ + && poetry lock -q -n \ + && poetry export -f requirements.txt -o /tmp/requirements.txt --without-hashes \ + && pip uninstall -y poetry \ + && pip install --no-cache-dir -q -r /tmp/requirements.txt + +# +# Base stage +# +FROM python:3.12-slim-bullseye AS base + +ENV APP_NAME MailPilot_ai_agents +ENV PREFIX /opt/MailPilot +ENV PREFIX_APP ${PREFIX}/${APP_NAME} + +ENV PYTHONUNBUFFERED 1 + +RUN groupadd -g 20001 MailPilot \ + && useradd -l -M -u 10001 -g MailPilot MailPilot + +WORKDIR ${PREFIX_APP} + +COPY ./docker-entrypoint.sh /usr/local/bin/docker-entrypoint.sh +RUN chmod +x /usr/local/bin/docker-entrypoint.sh + +RUN apt-get update && apt-get install --no-install-recommends -y libpq5 postgresql-client \ + && rm -rf /var/lib/apt/lists/* + +COPY --from=deps /usr/local/lib/python3.12/site-packages /usr/local/lib/python3.12/site-packages +COPY --from=deps /usr/local/bin /usr/local/bin +COPY . ${PREFIX_APP} + +RUN chown -R MailPilot:MailPilot ${PREFIX_APP} + +# Hugging Face specific configuration +EXPOSE 7860 +ENV UVICORN_PORT=7860 +ENV UVICORN_HOST=0.0.0.0 + +USER MailPilot + +CMD ["uvicorn", "app.main:fastapi_app", "--host", "0.0.0.0", "--port", "7860"] \ No newline at end of file diff --git a/README.md b/README.md index a97ae51fbae04622eb379341b00efec0cba9da4d..aa19a0e0cf53a7d0097a5233d09bd045ad5f7367 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,18 @@ --- -title: Mailpilot -emoji: ⚡ -colorFrom: red -colorTo: green +title: MailPilot AI Agents +emoji: 📧 +colorFrom: blue +colorTo: purple sdk: docker -pinned: false +app_port: 7860 --- -Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference +# MailPilot AI Agents API + +FastAPI-based AI agent application for email processing and analysis. + +## API Documentation + +Once deployed, API documentation will be available at: +- Swagger UI: `/docs` +- ReDoc: `/redoc` diff --git a/Spacefile b/Spacefile new file mode 100644 index 0000000000000000000000000000000000000000..3e73d657c5e215cc01272173013570d07e0abcbd --- /dev/null +++ b/Spacefile @@ -0,0 +1,2 @@ +dockerfile: Dockerfile.huggingface +port: 7860 \ No newline at end of file diff --git a/app/__init__.py b/app/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/app/__pycache__/__init__.cpython-312.pyc b/app/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c9c41f45bce60e8fc4e0a7539adb5da483723eb0 Binary files /dev/null and b/app/__pycache__/__init__.cpython-312.pyc differ diff --git a/app/__pycache__/main.cpython-312.pyc b/app/__pycache__/main.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f15051de88faf1cd263a92e5e19bc3610a544753 Binary files /dev/null and b/app/__pycache__/main.cpython-312.pyc differ diff --git a/app/__pycache__/router.cpython-312.pyc b/app/__pycache__/router.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..63c842806111199240f19601742eea94fc6e50f2 Binary files /dev/null and b/app/__pycache__/router.cpython-312.pyc differ diff --git a/app/api/__init__.py b/app/api/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/app/api/__pycache__/__init__.cpython-312.pyc b/app/api/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..45e1975ce006973bba546bf187d95adcdf3a55f4 Binary files /dev/null and b/app/api/__pycache__/__init__.cpython-312.pyc differ diff --git a/app/api/endpoints/__init__.py b/app/api/endpoints/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/app/api/endpoints/__pycache__/__init__.cpython-312.pyc b/app/api/endpoints/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..111e7253074e867199232bdc878895c86f5699ca Binary files /dev/null and b/app/api/endpoints/__pycache__/__init__.cpython-312.pyc differ diff --git a/app/api/endpoints/v1/__init__.py b/app/api/endpoints/v1/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/app/api/endpoints/v1/__pycache__/__init__.cpython-312.pyc b/app/api/endpoints/v1/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2e5436da2b2a7c75be9ebda5cf8036d1a8bfaf57 Binary files /dev/null and b/app/api/endpoints/v1/__pycache__/__init__.cpython-312.pyc differ diff --git a/app/api/endpoints/v1/firebaseauth/__init__.py b/app/api/endpoints/v1/firebaseauth/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/app/api/endpoints/v1/firebaseauth/__pycache__/__init__.cpython-312.pyc b/app/api/endpoints/v1/firebaseauth/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a35a331b860fe3c6a33985c5fbb50aea8ab75118 Binary files /dev/null and b/app/api/endpoints/v1/firebaseauth/__pycache__/__init__.cpython-312.pyc differ diff --git a/app/api/endpoints/v1/firebaseauth/__pycache__/app.cpython-312.pyc b/app/api/endpoints/v1/firebaseauth/__pycache__/app.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fb580e980d490fbbc214b0c4243ab0461ec7cd69 Binary files /dev/null and b/app/api/endpoints/v1/firebaseauth/__pycache__/app.cpython-312.pyc differ diff --git a/app/api/endpoints/v1/firebaseauth/app.py b/app/api/endpoints/v1/firebaseauth/app.py new file mode 100644 index 0000000000000000000000000000000000000000..96121b0b45476772be3bfd80e8d6c143304c9d13 --- /dev/null +++ b/app/api/endpoints/v1/firebaseauth/app.py @@ -0,0 +1,361 @@ +from fastapi import FastAPI, Depends, HTTPException, status, Request, APIRouter +from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm +import firebase_admin +from firebase_admin import credentials, auth +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select +from app.models.api.user import UserCreate, UserSignIn, PasswordReset, TokenVerify, UserResponse +from app.models.database.DBUser import DBUser +import datetime +import os +from app.core.database.session_manager import get_db_session as get_db +from pydantic import BaseModel, EmailStr + +router = APIRouter(prefix="/FirebaseAuth", tags=["FirebaseAuth related APIs"]) + +# Initialize Firebase Admin SDK with better error handling +try: + current_dir = os.path.dirname(os.path.abspath(__file__)) + # Try multiple possible paths for the service account file + service_account_paths = [ + "/opt/MailPilot/MailPilot_ai_agents/app/serviceAccountKey/mailpoilt-firebase-adminsdk-fbsvc-26bb455f79.json", + os.path.join(current_dir, "../serviceAccountKey/mailpoilt-firebase-adminsdk-fbsvc-26bb455f79.json"), + os.path.join(current_dir, "../../serviceAccountKey/mailpoilt-firebase-adminsdk-fbsvc-26bb455f79.json") + ] + + cred = None + for path in service_account_paths: + if os.path.exists(path): + cred = credentials.Certificate(path) + break + + if cred is None: + raise FileNotFoundError("Firebase service account key not found") + + if not firebase_admin._apps: + firebase_admin.initialize_app(cred) + +except Exception as e: + print(f"Firebase initialization error: {str(e)}") + # Continue without crashing, but auth functions will fail + +oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/FirebaseAuth/signin") +async def get_current_user(token: str = Depends(oauth2_scheme), db: AsyncSession = Depends(get_db)): + try: + decoded_token = auth.verify_id_token(token) + user_id = decoded_token["uid"] + + # Get the Firebase user + firebase_user = auth.get_user(user_id) + + result = await db.execute(select(DBUser).filter(DBUser.firebase_uid == user_id)) + db_user = result.scalar_one_or_none() + + if db_user is None: + raise HTTPException(status_code=404, detail="User not found in database") + + return UserResponse( + firebase_uid=db_user.firebase_uid, + email=db_user.email, + display_name=db_user.display_name, + is_active=db_user.is_active, + created_at=db_user.created_at, + last_login=db_user.last_login, + provider=db_user.provider, + email_verified=firebase_user.email_verified + ) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=f"Invalid authentication credentials: {str(e)}", + headers={"WWW-Authenticate": "Bearer"}, + ) + +@router.post("/signup", response_model=dict) +async def create_user(user_data: UserCreate, db: AsyncSession = Depends(get_db)): + """Create a new user with email and password and store in database""" + try: + # Check if user already exists + try: + existing_user = auth.get_user_by_email(user_data.email) + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"User with email {user_data.email} already exists" + ) + except auth.UserNotFoundError: + # This is what we want - user doesn't exist yet + pass + + # Create Firebase user + firebase_user = auth.create_user( + email=user_data.email, + password=user_data.password, + display_name=user_data.display_name, + email_verified=False # Explicitly set to false + ) + + # Generate email verification link + action_code_settings = auth.ActionCodeSettings( + url=f"https://mailpoilt.web.app/verify-email?email={user_data.email}", + handle_code_in_app=True + ) + verification_link = auth.generate_email_verification_link( + user_data.email, + action_code_settings + ) + + # Firebase will handle sending the verification email automatically + + current_time = datetime.datetime.utcnow() + + db_user = DBUser( + firebase_uid=firebase_user.uid, + email=user_data.email, + display_name=user_data.display_name, + is_active=True, + created_at=current_time, + last_login=current_time, + provider="email" + ) + + db.add(db_user) + await db.commit() + await db.refresh(db_user) + + return { + "message": "User created successfully. Please check your email to verify your account.", + "verification_link": verification_link, # In production, you might not return this + "user": { + "firebase_uid": db_user.firebase_uid, + "email": db_user.email, + "display_name": db_user.display_name, + "is_active": db_user.is_active, + "created_at": db_user.created_at.isoformat() if db_user.created_at else None, + "last_login": db_user.last_login.isoformat() if db_user.last_login else None, + "provider": db_user.provider, + "email_verified": firebase_user.email_verified + } + } + except Exception as e: + await db.rollback() + try: + if 'firebase_user' in locals(): + auth.delete_user(firebase_user.uid) + except: + pass + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"Error creating user: {str(e)}" + ) + +@router.post("/signin", response_model=dict) +async def signin_user(user_data: UserSignIn, db: AsyncSession = Depends(get_db)): + """Sign in a user with email and password""" + try: + try: + firebase_user = auth.get_user_by_email(user_data.email) + except auth.UserNotFoundError: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=f"No user found with email: {user_data.email}" + ) + + # Generate a custom token that can be exchanged for an ID token + custom_token = auth.create_custom_token(firebase_user.uid) + + # Update last login time + result = await db.execute(select(DBUser).filter(DBUser.firebase_uid == firebase_user.uid)) + db_user = result.scalar_one_or_none() + + if not db_user: + # Create db user if not exists + db_user = DBUser( + firebase_uid=firebase_user.uid, + email=firebase_user.email, + display_name=firebase_user.display_name or user_data.email.split('@')[0], + is_active=True, + created_at=datetime.datetime.utcnow(), + last_login=datetime.datetime.utcnow(), + provider="email" + ) + db.add(db_user) + else: + db_user.last_login = datetime.datetime.utcnow() + + await db.commit() + await db.refresh(db_user) + + user_info = { + "firebase_uid": db_user.firebase_uid, + "email": db_user.email, + "display_name": db_user.display_name, + "is_active": db_user.is_active, + "created_at": db_user.created_at.isoformat() if db_user.created_at else None, + "last_login": db_user.last_login.isoformat() if db_user.last_login else None, + "provider": db_user.provider, + "email_verified": firebase_user.email_verified, + "custom_token": custom_token.decode("utf-8") if isinstance(custom_token, bytes) else custom_token + } + + return { + "message": "Login successful", + "user": user_info, + "custom_token": custom_token.decode("utf-8") if isinstance(custom_token, bytes) else custom_token, + "email_verified": firebase_user.email_verified + } + except Exception as e: + if isinstance(e, HTTPException): + raise e + await db.rollback() + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=f"Authentication failed: {str(e)}" + ) +class EmailVerifyRequest(BaseModel): + email: EmailStr + +@router.post("/resend-verification", status_code=status.HTTP_200_OK) +async def resend_verification_email( + email_data: EmailVerifyRequest = None, + current_user: UserResponse = Depends(get_current_user) +): + """ + Resend verification email to a user + + If user is logged in, uses their email. + Otherwise, uses the email provided in the request body. + """ + try: + # If email is provided in request body, use that + # Otherwise use logged in user's email + email = email_data.email if email_data else current_user.email + + # Check if user exists + try: + firebase_user = auth.get_user_by_email(email) + except auth.UserNotFoundError: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"No user found with email: {email}" + ) + + # Check if email is already verified + if firebase_user.email_verified: + return {"message": "Email is already verified"} + + # Generate a new verification link + action_code_settings = auth.ActionCodeSettings( + url=f"https://mailpoilt.web.app/verify-email?email={email}", + handle_code_in_app=True + ) + verification_link = auth.generate_email_verification_link( + email, + action_code_settings + ) + + return { + "message": "Verification email sent successfully", + "verification_link": verification_link + } + except Exception as e: + if isinstance(e, HTTPException): + raise e + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"Failed to resend verification email: {str(e)}" + ) + + + email: EmailStr + +@router.post("/check-email-verified") +async def check_email_verified(email_data: EmailVerifyRequest): + """Check if a user's email is verified""" + try: + # Check if user exists + try: + firebase_user = auth.get_user_by_email(email_data.email) + except auth.UserNotFoundError: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"No user found with email: {email_data.email}" + ) + + return { + "email": email_data.email, + "email_verified": firebase_user.email_verified + } + except Exception as e: + if isinstance(e, HTTPException): + raise e + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"Failed to check email verification status: {str(e)}" + ) + +@router.post("/verify-token", response_model=UserResponse) +async def verify_token(token_data: TokenVerify, db: AsyncSession = Depends(get_db)): + """Verify a Firebase ID token or UID and return user data""" + try: + # First try to verify as an ID token + try: + decoded_token = auth.verify_id_token(token_data.token) + user_id = decoded_token["uid"] + except: + # If that fails, treat it as a UID + user_id = token_data.token + + try: + firebase_user = auth.get_user(user_id) + except auth.UserNotFoundError: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="User not found" + ) + + result = await db.execute(select(DBUser).filter(DBUser.firebase_uid == user_id)) + db_user = result.scalar_one_or_none() + + if not db_user: + # Create DB user if it doesn't exist + db_user = DBUser( + firebase_uid=user_id, + email=firebase_user.email, + display_name=firebase_user.display_name or firebase_user.email.split('@')[0], + is_active=True, + created_at=datetime.datetime.utcnow(), + last_login=datetime.datetime.utcnow(), + provider="firebase" + ) + db.add(db_user) + await db.commit() + await db.refresh(db_user) + else: + # Update last_login time + db_user.last_login = datetime.datetime.utcnow() + await db.commit() + await db.refresh(db_user) + + return UserResponse( + firebase_uid=db_user.firebase_uid, + email=db_user.email, + display_name=db_user.display_name, + is_active=db_user.is_active, + created_at=db_user.created_at, + last_login=db_user.last_login, + provider=db_user.provider, + email_verified=firebase_user.email_verified + ) + except Exception as e: + if isinstance(e, HTTPException): + raise e + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=f"Token verification failed: {str(e)}" + ) +@router.post("/token") +async def get_token(form_data: OAuth2PasswordRequestForm = Depends(), db: AsyncSession = Depends(get_db)): + return await signin_user( + UserSignIn(email=form_data.username, password=form_data.password), + db + ) diff --git a/app/api/endpoints/v1/login/__init__.py b/app/api/endpoints/v1/login/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/app/api/endpoints/v1/login/__pycache__/__init__.cpython-312.pyc b/app/api/endpoints/v1/login/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0c5c460a7213a7788d3216c1b72dc682ce45629d Binary files /dev/null and b/app/api/endpoints/v1/login/__pycache__/__init__.cpython-312.pyc differ diff --git a/app/api/endpoints/v1/login/__pycache__/api.cpython-312.pyc b/app/api/endpoints/v1/login/__pycache__/api.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..161ad2ea070e06bf2f6c5dfc5661b80d3b644870 Binary files /dev/null and b/app/api/endpoints/v1/login/__pycache__/api.cpython-312.pyc differ diff --git a/app/api/endpoints/v1/login/api.py b/app/api/endpoints/v1/login/api.py new file mode 100644 index 0000000000000000000000000000000000000000..5355ab004c4f68e6ffa4e84d77347ce7fa229b40 --- /dev/null +++ b/app/api/endpoints/v1/login/api.py @@ -0,0 +1,10 @@ +from fastapi import APIRouter, Depends , HTTPException +from sqlalchemy.ext.asyncio import AsyncSession +from app.core.database.session_manager import get_db_session as db_session + +router = APIRouter(prefix="/login", tags=["login related APIs"]) + + +@router.post("/login") +async def home(): + return {"message": "Welcome to the Simple Router!"} \ No newline at end of file diff --git a/app/core/__init__.py b/app/core/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/app/core/__pycache__/__init__.cpython-312.pyc b/app/core/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..390444fa9a6097b510978fae5103ffb997b3b30e Binary files /dev/null and b/app/core/__pycache__/__init__.cpython-312.pyc differ diff --git a/app/core/__pycache__/config.cpython-312.pyc b/app/core/__pycache__/config.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d607bc857e8173a4a3366cf22140f2770e4e6916 Binary files /dev/null and b/app/core/__pycache__/config.cpython-312.pyc differ diff --git a/app/core/__pycache__/logger.cpython-312.pyc b/app/core/__pycache__/logger.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..40a9f4d2e0606fb2c4bc66a35e28698c4cecaeef Binary files /dev/null and b/app/core/__pycache__/logger.cpython-312.pyc differ diff --git a/app/core/cache/__init__.py b/app/core/cache/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/app/core/cache/cache.py b/app/core/cache/cache.py new file mode 100644 index 0000000000000000000000000000000000000000..831fd378f9d29763e9f68e5f8d0d9a941c8c9c3e --- /dev/null +++ b/app/core/cache/cache.py @@ -0,0 +1,112 @@ +import pickle +from logging import Logger + +from pymemcache.client.base import Client + +from app.core.config import settings +from app.core.exceptions.base_exception import ( + ConnectionException, + CouldNotEditMemcache, + KeyNotFoundException, +) + + +class Cache: + """ + A generic cache class for interacting with Memcached. + """ + + def __init__(self, logger: Logger) -> None: + """ + Initialize the cache connection. + + :param logger: Logger instance for logging operations + """ + + # Load Memcache config from .env + self.host = settings.CACHE_HOST + self.port = settings.CACHE_HOST + self.default_ttl = settings.CACHE_TTL + + self.client = self._initialize_connection() + self.logger = logger + + def _initialize_connection(self): + """ + Establish a connection to the Memcached server. + + :return: Client instance + :raises ConnectionException: If the connection cannot be established + """ + client = Client((self.host, self.port)) + if client: + self.logger.info(f"Connected to Memcached at {self.host}: {self.port}") + return client + else: + raise ConnectionException("Could not connect to Memcached server.") + + def add(self, key: str, value: dict): + """ + Add an item to the cache. + + :param key: Cache key + :param value: Value to store (serialized using pickle) + :raises CouldNotEditMemcache: If the item could not be added + """ + serialized_value = pickle.dumps(value) + res = self.client.add(key, serialized_value, expire=self.default_ttl) + if not res: + raise CouldNotEditMemcache(f"Could not add key {key} to cache.") + self.logger.info(f"Added {key} to cache.") + + def get(self, key: str): + """ + Retrieve an item from the cache. + + :param key: Cache key + :return: Deserialized value + :raises KeyNotFoundException: If the key is not found in the cache + """ + byte_string = self.get_raw(key) + return pickle.loads(byte_string) + + def get_raw(self, key: str): + """ + Retrieve the raw byte string from the cache. + + :param key: Cache key + :return: Raw byte string + :raises KeyNotFoundException: If the key is not found in the cache + """ + byte_string = self.client.get(key) + if not byte_string: + raise KeyNotFoundException(f"Key {key} not found in cache.") # noqa: E713 + return byte_string + + def delete(self, key: str): + """ + Delete an item from the cache. + + :param key: Cache key + :return: Result of the delete operation + :raises CouldNotEditMemcache: If the item could not be deleted + """ + res = self.client.delete(key) + if not res: + raise CouldNotEditMemcache(f"Could not delete key {key} from cache.") + self.logger.info(f"Deleted {key} from cache.") + return res + + def update(self, key: str, value: dict): + """ + Update an item in the cache. + + :param key: Cache key + :param value: New value to store (serialized using pickle) + :raises CouldNotEditMemcache: If the item could not be updated + """ + serialized_value = pickle.dumps(value) + res = self.client.set(key, serialized_value, expire=self.default_ttl) + if not res: + raise CouldNotEditMemcache(f"Could not update key {key} in cache.") + self.logger.info(f"Updated {key} in cache.") diff --git a/app/core/config.py b/app/core/config.py new file mode 100644 index 0000000000000000000000000000000000000000..44a3561c23f87b1d25ca76f5c429a65a1a96d903 --- /dev/null +++ b/app/core/config.py @@ -0,0 +1,97 @@ +""" +File with environment variables and general configuration logic. +Environment variables are loaded from `.env`, with default values as fallback. + +For project metadata, pyproject.toml is used. +Complex types like lists are read as JSON-encoded strings. +""" + +import tomllib +from pathlib import Path +from typing import Literal +from urllib.parse import quote_plus + +from environs import Env +from pydantic import validator +from pydantic_settings import BaseSettings +from structlog.stdlib import BoundLogger + + +from app.core.logger import Logger + +PROJECT_DIR = Path(__file__).parent.parent.parent +with open(f"{PROJECT_DIR}/pyproject.toml", "rb") as f: + PYPROJECT_CONTENT = tomllib.load(f)["tool"]["poetry"] + +env = Env() +env.read_env() + +CORS_ALLOWED_HEADERS = list(map(str.strip, env.list("CORS_ALLOWED_HEADERS", ["*"]))) +CORS_ORIGINS = list(map(str.strip, env.list("CORS_ORIGINS", ["http://localhost:3000"]))) + + +class Settings(BaseSettings): + # CORE SETTINGS + ENVIRONMENT: Literal["DEV", "STG", "PROD"] = env.str("ENVIRONMENT", "DEV").upper() + + # CORS SETTINGS + # BACKEND_CORS_ORIGINS: list[str] = env.list("BACKEND_CORS_ORIGINS", ["http://localhost:3000"]) + # BACKEND_CORS_HEADERS: list[str] = env.list("BACKEND_CORS_HEADERS", ["*"]) + # ALLOWED_HOSTS: list[str] = env.list("ALLOWED_HOSTS", ["*"]) + + # LOG SETTINGS + LOG_LEVEL: Literal["INFO", "DEBUG", "WARN", "ERROR"] = env.str("LOG_LEVEL", "INFO") + LOG_JSON_FORMAT: bool = env.bool("LOG_JSON_FORMAT", False) + + # PROJECT NAME, VERSION AND DESCRIPTION + PROJECT_NAME: str = PYPROJECT_CONTENT["name"] + VERSION: str = PYPROJECT_CONTENT["version"] + DESCRIPTION: str = PYPROJECT_CONTENT["description"] + + ROOT_PATH: str = env.str("ROOT_PATH", "") + + # DOCS SETTINGS + DOCS_URL: str = f"{ROOT_PATH}/docs" + OPENAPI_URL: str = f"{ROOT_PATH}/openapi.json" + + # POSTGRESQL DATABASE SETTINGS + DATABASE_HOSTNAME: str = env.str("DATABASE_HOSTNAME") + DATABASE_USER: str = env.str("DATABASE_USER") + DATABASE_PASSWORD: str = env.str("DATABASE_PASSWORD") + DATABASE_PORT: str = env.str("DATABASE_PORT", "5432") + DATABASE_DB: str = env.str("DATABASE_DB") + SQLALCHEMY_DATABASE_URI: str = "" + + @validator("SQLALCHEMY_DATABASE_URI") + def _assemble_db_connection(cls, v: str, values: dict[str, str]) -> str: + return "postgresql+asyncpg://{}:{}@{}:{}/{}".format( + values["DATABASE_USER"], + quote_plus(values["DATABASE_PASSWORD"]), + values["DATABASE_HOSTNAME"], + values["DATABASE_PORT"], + values["DATABASE_DB"], + ) + + # UVICORN SETTINGS + UVICORN_HOST: str = env.str("UVICORN_HOST", "0.0.0.0") + UVICORN_PORT: int = env.int("UVICORN_PORT", 5001) + + CACHE_HOST: str = env.str("CACHE_HOST", "localhost") + CACHE_PORT: int = env.int("CACHE_PORT", 11211) + CACHE_TTL: int = env.int("CACHE_TTL", 300) + + BEDROCK_MODEL_ID: str = env.str("BEDROCK_MODEL_ID", "anthropic.claude-v2") + BEDROCK_PROVIDER: str = env.str("BEDROCK_PROVIDER", "anthropic") + AWS_ACCESS_KEY: str = env.str("AWS_ACCESS_KEY", "") + AWS_SECRET_KEY: str = env.str("AWS_SECRET_KEY", "") + AWS_REGION: str = env.str("AWS_REGION", "us-east-1") + + TOKENIZER_MODEL: str = env.str("TOKENIZER_MODEL") + TOKEN_LIMIT_PER_REQUEST: int = env.int("TOKEN_LIMIT_PER_REQUEST", 20000) + + +settings: Settings = Settings() # type: ignore + +log: BoundLogger = Logger( + json_logs=settings.LOG_JSON_FORMAT, log_level=settings.LOG_LEVEL +).setup_logging() \ No newline at end of file diff --git a/app/core/database/__init__.py b/app/core/database/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/app/core/database/__pycache__/__init__.cpython-312.pyc b/app/core/database/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..87f00802841d883366b05571b5ea8d54533ffabc Binary files /dev/null and b/app/core/database/__pycache__/__init__.cpython-312.pyc differ diff --git a/app/core/database/__pycache__/session_manager.cpython-312.pyc b/app/core/database/__pycache__/session_manager.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b34114b75293cf2cbaccfe918b494258ecc0f7e6 Binary files /dev/null and b/app/core/database/__pycache__/session_manager.cpython-312.pyc differ diff --git a/app/core/database/session_manager.py b/app/core/database/session_manager.py new file mode 100644 index 0000000000000000000000000000000000000000..a6127214edaa7cd326042a396608f2dc777abf0c --- /dev/null +++ b/app/core/database/session_manager.py @@ -0,0 +1,64 @@ +import contextlib +from collections.abc import AsyncIterator + +from sqlalchemy.ext.asyncio import ( + AsyncConnection, + AsyncSession, + async_sessionmaker, + create_async_engine, +) +from sqlalchemy.orm import declarative_base + +from app.core.config import settings + +Base = declarative_base() + +# Inspiration https://praciano.com.br/fastapi-and-async-sqlalchemy-20-with-pytest-done-right.html + + +class DatabaseSessionManager: + def __init__(self): + self._engine = create_async_engine(settings.SQLALCHEMY_DATABASE_URI) + self._sessionmaker = async_sessionmaker(autocommit=False, bind=self._engine) + + async def close(self): + if self._engine is None: + raise Exception("DatabaseSessionManager is not initialized") + await self._engine.dispose() + + self._engine = None + self._sessionmaker = None + + @contextlib.asynccontextmanager + async def connect(self) -> AsyncIterator[AsyncConnection]: + if self._engine is None: + raise Exception("DatabaseSessionManager is not initialized") + + async with self._engine.begin() as connection: + try: + yield connection + except Exception: + await connection.rollback() + raise + + @contextlib.asynccontextmanager + async def session(self) -> AsyncIterator[AsyncSession]: + if self._sessionmaker is None: + raise Exception("DatabaseSessionManager is not initialized") + + session = self._sessionmaker() + try: + yield session + except Exception: + await session.rollback() + raise + finally: + await session.close() + + +sessionmanager = DatabaseSessionManager() + + +async def get_db_session(): + async with sessionmanager.session() as session: + yield session diff --git a/app/core/logger.py b/app/core/logger.py new file mode 100644 index 0000000000000000000000000000000000000000..a80fc7f8ef88d367d777caba4b97373920f33462 --- /dev/null +++ b/app/core/logger.py @@ -0,0 +1,181 @@ +import logging +import os +import sys +from logging.handlers import TimedRotatingFileHandler + +import structlog +from dotenv import load_dotenv +from structlog.processors import CallsiteParameter +from structlog.stdlib import BoundLogger +from structlog.typing import EventDict, Processor + +# Load environment variables +load_dotenv() + + +class Logger: + """ + Configure and setup logging with Structlog. + + Args: + json_logs (bool, optional): Whether to log in JSON format. Defaults to False. + log_level (str, optional): Minimum log level to display. Defaults to "INFO". + """ + + def __init__(self, json_logs: bool = False, log_level: str = "INFO"): + self.json_logs = json_logs + self.log_level = log_level.upper() + + self.environment = os.getenv("ENVIRONMENT", "PROD").upper() # Default to PROD + self.log_file_path = os.getenv( + "LOG_FILE_PATH", self._get_default_log_file_path() + ) + + def _get_default_log_file_path(self) -> str | None: + """ + Provides a default log file path outside the project folder. + + Returns: + str: The default log file path. + """ + return + # default_log_dir = os.path.expanduser("./logs") + # if not os.path.exists(default_log_dir): + # os.makedirs(default_log_dir) + # return os.path.join(default_log_dir, "app.log") + + def _rename_event_key(self, _, __, event_dict: EventDict) -> EventDict: + """ + Renames the 'event' key to 'message' in log entries. + """ + event_dict["message"] = event_dict.pop("event", "") + return event_dict + + def _drop_color_message_key(self, _, __, event_dict: EventDict) -> EventDict: + """ + Removes the 'color_message' key from log entries. + """ + event_dict.pop("color_message", None) + return event_dict + + def _get_processors(self) -> list[Processor]: + """ + Returns a list of structlog processors based on the specified configuration. + """ + processors: list[Processor] = [ + structlog.contextvars.merge_contextvars, + structlog.stdlib.add_logger_name, + structlog.stdlib.add_log_level, + structlog.stdlib.PositionalArgumentsFormatter(), + structlog.stdlib.ExtraAdder(), + self._drop_color_message_key, + structlog.processors.TimeStamper(fmt="iso"), + structlog.processors.StackInfoRenderer(), + structlog.processors.CallsiteParameterAdder( + [ + CallsiteParameter.FILENAME, + CallsiteParameter.FUNC_NAME, + CallsiteParameter.LINENO, + ], + ), + ] + + if self.json_logs: + processors.append(self._rename_event_key) + processors.append(structlog.processors.format_exc_info) + + return processors + + def _clear_uvicorn_loggers(self): + """ + Clears the log handlers for uvicorn loggers. + """ + for _log in ["uvicorn", "uvicorn.error", "uvicorn.access"]: + logging.getLogger(_log).handlers.clear() + logging.getLogger(_log).propagate = True + + def _configure_structlog(self, processors: list[Processor]): + """ + Configures structlog with the specified processors. + """ + structlog.configure( + processors=processors + + [ + structlog.stdlib.ProcessorFormatter.wrap_for_formatter, + ], + logger_factory=structlog.stdlib.LoggerFactory(), + cache_logger_on_first_use=True, + ) + + def _configure_logging(self, processors: list[Processor]) -> logging.Logger: + """ + Configures logging with the specified processors based on the environment. + + Returns: + logging.Logger: The configured root logger. + """ + formatter = structlog.stdlib.ProcessorFormatter( + foreign_pre_chain=processors, + processors=[ + structlog.stdlib.ProcessorFormatter.remove_processors_meta, + structlog.processors.JSONRenderer() + if self.json_logs + else structlog.dev.ConsoleRenderer(colors=True), + ], + ) + + root_logger = logging.getLogger() + root_logger.handlers.clear() # Clear existing handlers + + if self.environment == "DEV": + # Console logging for development + stream_handler = logging.StreamHandler() + stream_handler.setFormatter(formatter) + root_logger.addHandler(stream_handler) + else: + # File logging for production + file_handler = TimedRotatingFileHandler( + filename=self.log_file_path, + when="midnight", + interval=1, + backupCount=7, + encoding="utf-8", + ) + file_handler.setFormatter(formatter) + root_logger.addHandler(file_handler) + + root_logger.setLevel(self.log_level.upper()) + return root_logger + + def _configure(self): + """ + Configures logging and structlog, and sets up exception handling. + """ + shared_processors: list[Processor] = self._get_processors() + self._configure_structlog(shared_processors) + root_logger = self._configure_logging(shared_processors) + self._clear_uvicorn_loggers() + + def handle_exception(exc_type, exc_value, exc_traceback): + """ + Logs uncaught exceptions. + """ + if issubclass(exc_type, KeyboardInterrupt): + sys.__excepthook__(exc_type, exc_value, exc_traceback) + return + + root_logger.error( + "Uncaught exception", exc_info=(exc_type, exc_value, exc_traceback) + ) + + sys.excepthook = handle_exception + + def setup_logging(self) -> BoundLogger: + """ + Sets up logging configuration for the application. + + Returns: + BoundLogger: The configured logger instance. + """ + self._configure() + return structlog.get_logger() \ No newline at end of file diff --git a/app/core/middlewares/__init__.py b/app/core/middlewares/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..5e20b04c8284613800a5d2d1b4e1238458b6a227 --- /dev/null +++ b/app/core/middlewares/__init__.py @@ -0,0 +1,20 @@ +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +from starlette.middleware.base import BaseHTTPMiddleware + +from app.core.config import CORS_ALLOWED_HEADERS, CORS_ORIGINS +from app.core.middlewares.execution_middleware import measure_execution_time + + +def add_middlewares(app: FastAPI) -> None: + """ + Wrap FastAPI application, with various of middlewares + """ + app.add_middleware( + CORSMiddleware, + allow_origins=["*"], # For development only. In production, use specific origins + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + ) + app.add_middleware(BaseHTTPMiddleware, dispatch=measure_execution_time) diff --git a/app/core/middlewares/__pycache__/__init__.cpython-312.pyc b/app/core/middlewares/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c5f76d22446b9862fb73af17890b4e5b0aef6f43 Binary files /dev/null and b/app/core/middlewares/__pycache__/__init__.cpython-312.pyc differ diff --git a/app/core/middlewares/__pycache__/execution_middleware.cpython-312.pyc b/app/core/middlewares/__pycache__/execution_middleware.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ded81cc41a6c2d5aa90edafeb78b1033a55a5957 Binary files /dev/null and b/app/core/middlewares/__pycache__/execution_middleware.cpython-312.pyc differ diff --git a/app/core/middlewares/execution_middleware.py b/app/core/middlewares/execution_middleware.py new file mode 100644 index 0000000000000000000000000000000000000000..78b18c3424af1d24c2dec8611b1e99cbf92af689 --- /dev/null +++ b/app/core/middlewares/execution_middleware.py @@ -0,0 +1,22 @@ + +import time + +from fastapi import Request + +from app.core.config import log + + +async def measure_execution_time(request: Request, call_next): + start_time = time.time() + response = await call_next(request) + process_time = time.time() - start_time + response.headers["X-Process-Time"] = f"{process_time:.2f} s" # noqa: E231 + + log_dict = { + "url": request.url.path, + "method": request.method, + "process_time": process_time, + } + log.info(log_dict, extra=log_dict) + + return response \ No newline at end of file diff --git a/app/llm/llm_interface.py b/app/llm/llm_interface.py new file mode 100644 index 0000000000000000000000000000000000000000..c31d532fda1aad2dba7b0f47db7d13cb504e695f --- /dev/null +++ b/app/llm/llm_interface.py @@ -0,0 +1,14 @@ +from abc import ABC, abstractmethod +from langchain_core.messages import BaseMessage + + +class LLMInterface(ABC): + @abstractmethod + def query(self, messages: list[BaseMessage]) -> BaseMessage: + """Query the LLM with a list of messages""" + pass + + @abstractmethod + async def aquery(self, messages: list[BaseMessage]) -> BaseMessage: + """Asynchronously query the LLM with a list of messages""" + pass \ No newline at end of file diff --git a/app/llm/provider/bedrock_provider.py b/app/llm/provider/bedrock_provider.py new file mode 100644 index 0000000000000000000000000000000000000000..5330a5c7194f881d4f3213621d8ade2cb561d639 --- /dev/null +++ b/app/llm/provider/bedrock_provider.py @@ -0,0 +1,63 @@ +import threading +from langchain_core.messages import BaseMessage +from langchain_aws import ChatBedrock + +from app.llm.token.token_manager import TokenManager +from app.core.config import settings +from app.llm.llm_interface import LLMInterface + + +class BedrockProvider(LLMInterface): + _instance = None + _lock = threading.Lock() + token_manager = TokenManager(token_limit=50000, reset_interval=30) + + def __new__(cls): + if cls._instance is None: + with cls._lock: + if cls._instance is None: + cls._instance = super().__new__(cls) + cls._instance._initialized = False + return cls._instance + + def __init__(self): + if not self._initialized: + self.model_id = settings.BEDROCK_MODEL_ID + self.aws_access_key = settings.AWS_ACCESS_KEY + self.aws_secret_key = settings.AWS_SECRET_KEY + self.aws_region = settings.AWS_REGION + self.provider = settings.BEDROCK_PROVIDER + + # Initialize BedrockChat + self.llm = ChatBedrock( + model_id=self.model_id, + region_name=self.aws_region, + aws_access_key_id=self.aws_access_key, + aws_secret_access_key=self.aws_secret_key, + provider=self.provider, + streaming=False, + model_kwargs={ + "temperature": 0.7, + "max_tokens": 2000 + } + ) + + self._initialized = True + + def query(self, messages: list[BaseMessage]) -> BaseMessage: + """Query AWS Bedrock with messages""" + response = self.llm.invoke(messages) + self._track_tokens(response) + return response + + async def aquery(self, messages: list[BaseMessage]) -> BaseMessage: + """Asynchronous query method""" + response = await self.llm.ainvoke(messages) + self._track_tokens(response) + return response + + def _track_tokens(self, response: BaseMessage) -> None: + """Helper to track token usage""" + token_usage = response.response_metadata.get("token_usage", {}) if hasattr(response, "response_metadata") else {} + total_tokens = token_usage.get("total_tokens", 0) + self.token_manager.track_tokens(total_tokens) \ No newline at end of file diff --git a/app/llm/token/token_manager.py b/app/llm/token/token_manager.py new file mode 100644 index 0000000000000000000000000000000000000000..2fd8c6235a3448e7248d032386ce121d6f4a7ad8 --- /dev/null +++ b/app/llm/token/token_manager.py @@ -0,0 +1,37 @@ +import time +from threading import Lock + + +class TokenManager: + def __init__(self, token_limit: int = 50000, reset_interval: int = 30): + self.token_limit = token_limit + self.reset_interval = reset_interval + self.token_count = 0 + self.last_reset = time.time() + self.lock = Lock() + + def track_tokens(self, tokens: int) -> None: + """ + Track token usage and reset if needed + """ + with self.lock: + current_time = time.time() + if current_time - self.last_reset > self.reset_interval: + self.token_count = 0 + self.last_reset = current_time + + self.token_count += tokens + if self.token_count > self.token_limit: + print(f"Warning: Token limit of {self.token_limit} exceeded!") + + def get_token_usage(self) -> int: + """ + Get current token usage + """ + with self.lock: + current_time = time.time() + if current_time - self.last_reset > self.reset_interval: + self.token_count = 0 + self.last_reset = current_time + + return self.token_count \ No newline at end of file diff --git a/app/main.py b/app/main.py new file mode 100644 index 0000000000000000000000000000000000000000..6d7c03b3923010b49bba43c87c808d8385dabf1b --- /dev/null +++ b/app/main.py @@ -0,0 +1,34 @@ +"""Main FastAPI app instance declaration.""" +import fastapi +import structlog +import uvicorn + +from app.core.config import settings +from .core.middlewares import add_middlewares +from app.router import api_router + +# Set up structlog for logging +logger = structlog.get_logger() + + +fastapi_app = fastapi.FastAPI( + title=settings.PROJECT_NAME, + version=settings.VERSION, + description=settings.DESCRIPTION, + openapi_url=settings.OPENAPI_URL, + docs_url=settings.DOCS_URL, +) +fastapi_app.include_router(api_router) +add_middlewares(fastapi_app) +# Log the app startup +logger.info( + "Application started", project=settings.PROJECT_NAME, version=settings.VERSION +) + +if __name__ == "__main__": + uvicorn.run( + "main:fastapi_app", + host=settings.UVICORN_HOST, + port=settings.UVICORN_PORT, + reload=True, + ) diff --git a/app/migrations/__init__.py b/app/migrations/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/app/migrations/__pycache__/env.cpython-312.pyc b/app/migrations/__pycache__/env.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b9d315a9764c574f554c1af7e4eb2a9e0407c3d3 Binary files /dev/null and b/app/migrations/__pycache__/env.cpython-312.pyc differ diff --git a/app/migrations/alembic.ini b/app/migrations/alembic.ini new file mode 100644 index 0000000000000000000000000000000000000000..0a9c920de9c7189867a8132f88b3dceeb6b38c39 --- /dev/null +++ b/app/migrations/alembic.ini @@ -0,0 +1,101 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = /opt/MailPilot/MailPilot_ai_agents/app/migrations + +# template used to generate migration files +file_template = %%(year)d%%(month).2d%%(day).2d%%(minute).2d_%%(slug)s_%%(rev)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python-dateutil library that can be +# installed by adding `alembic[tz]` to the pip requirements +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to migrations/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" +# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. Valid values are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +version_path_separator = os # default: use os.pathsep + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = driver://user:pass@localhost/dbname + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +hooks = black + +black.type = console_scripts +black.entrypoint = black +black.options = REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S \ No newline at end of file diff --git a/app/migrations/env.py b/app/migrations/env.py new file mode 100644 index 0000000000000000000000000000000000000000..61dad4ab22591d3868773d77857cacda6a40ef43 --- /dev/null +++ b/app/migrations/env.py @@ -0,0 +1,96 @@ +import asyncio +from logging.config import fileConfig + +from alembic import context +from sqlalchemy import engine_from_config, pool +from sqlalchemy.ext.asyncio import AsyncEngine + +from app.core import config as app_config + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) # type: ignore + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +from app.models.database.base import Base # noqa + +target_metadata = Base.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def get_database_uri(): + return app_config.settings.SQLALCHEMY_DATABASE_URI + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = get_database_uri() + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + compare_type=True, + compare_server_default=True, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def do_run_migrations(connection): + context.configure( + connection=connection, target_metadata=target_metadata, compare_type=True + ) + + with context.begin_transaction(): + context.run_migrations() + + +async def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + configuration = config.get_section(config.config_ini_section) + assert configuration + configuration["sqlalchemy.url"] = get_database_uri() + connectable = AsyncEngine( + engine_from_config( + configuration, + prefix="sqlalchemy.", + poolclass=pool.NullPool, + future=True, + ) # type: ignore + ) + async with connectable.connect() as connection: + await connection.run_sync(do_run_migrations) + + +if context.is_offline_mode(): + run_migrations_offline() +else: + asyncio.run(run_migrations_online()) \ No newline at end of file diff --git a/app/migrations/script.py.mako b/app/migrations/script.py.mako new file mode 100644 index 0000000000000000000000000000000000000000..1e4564e5e8a8dbd10a70fb2d8b87e3af72172925 --- /dev/null +++ b/app/migrations/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} \ No newline at end of file diff --git a/app/migrations/utils.py b/app/migrations/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..9b00b3ece11b53d0520a7611098d351a2da072b7 --- /dev/null +++ b/app/migrations/utils.py @@ -0,0 +1,24 @@ +from alembic import op +from sqlalchemy import text +from sqlalchemy.engine import reflection + + +def table_has_column(table: str, column: str): + if not hasattr(table_has_column, "inspection"): + conn = op.get_bind() + insp = table_has_column.inspection = reflection.Inspector.from_engine(conn) + else: + insp = table_has_column.inspection + has_column = False + for col in insp.get_columns(table): + if column not in col["name"]: + continue + has_column = True + return has_column + + +def table_exists(table): + conn = op.get_bind() + inspector = reflection.Inspector.from_engine(conn) + tables = inspector.get_table_names() + return table in tables \ No newline at end of file diff --git a/app/migrations/versions/2025041655_new_migration_0c372b179073.py b/app/migrations/versions/2025041655_new_migration_0c372b179073.py new file mode 100644 index 0000000000000000000000000000000000000000..ed6d630e82a05dfd0bd3c200441c2284f3eb7d8f --- /dev/null +++ b/app/migrations/versions/2025041655_new_migration_0c372b179073.py @@ -0,0 +1,32 @@ +"""new migration + +Revision ID: 0c372b179073 +Revises: +Create Date: 2025-04-16 14:55:45.297069 + +""" +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision = '0c372b179073' +down_revision = None +branch_labels = None +depends_on = None + +def upgrade(): + op.create_table( + "users", + sa.Column("firebase_uid", sa.String(), nullable=False), + sa.Column("email", sa.String(), nullable=False), + sa.Column("display_name", sa.String(), nullable=True), + sa.Column("is_active", sa.Boolean(), nullable=False, server_default='true'), + sa.Column("created_at", sa.DateTime(), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')), + sa.Column("last_login", sa.DateTime(), nullable=True), + sa.Column("provider", sa.String(), nullable=False, server_default='email'), + sa.PrimaryKeyConstraint("firebase_uid"), + sa.UniqueConstraint("email"), + ) + +def downgrade(): + op.drop_table("users") diff --git a/app/migrations/versions/__pycache__/2025041655_new_migration_0c372b179073.cpython-312.pyc b/app/migrations/versions/__pycache__/2025041655_new_migration_0c372b179073.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..12f7d6eae536b0f5c0efc947dca7938fd4e539b5 Binary files /dev/null and b/app/migrations/versions/__pycache__/2025041655_new_migration_0c372b179073.cpython-312.pyc differ diff --git a/app/models/__init__.py b/app/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/app/models/__pycache__/__init__.cpython-312.pyc b/app/models/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8b3b86f608816a26d37d3ef524cddf003e2658ca Binary files /dev/null and b/app/models/__pycache__/__init__.cpython-312.pyc differ diff --git a/app/models/api/__init__.py b/app/models/api/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/app/models/api/__pycache__/__init__.cpython-312.pyc b/app/models/api/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6c2e39754d56077a7a03e5dab039d3eafa048f1e Binary files /dev/null and b/app/models/api/__pycache__/__init__.cpython-312.pyc differ diff --git a/app/models/api/__pycache__/user.cpython-312.pyc b/app/models/api/__pycache__/user.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8b591021b3226618871f4c59fc0bc87efbfe3a61 Binary files /dev/null and b/app/models/api/__pycache__/user.cpython-312.pyc differ diff --git a/app/models/api/user.py b/app/models/api/user.py new file mode 100644 index 0000000000000000000000000000000000000000..7968d72a30b6c58ad355ee1813f66237e5385cdb --- /dev/null +++ b/app/models/api/user.py @@ -0,0 +1,29 @@ + +from pydantic import BaseModel +from typing import Optional +import datetime + +# Pydantic Models +class UserCreate(BaseModel): + email: str + password: str + display_name: Optional[str] = None + +class UserSignIn(BaseModel): + email: str + password: str + +class PasswordReset(BaseModel): + email: str + +class TokenVerify(BaseModel): + token: str + +class UserResponse(BaseModel): + firebase_uid: str + email: str + display_name: Optional[str] = None + is_active: bool + created_at: datetime.datetime + last_login: Optional[datetime.datetime] = None + provider: str diff --git a/app/models/database/DBUser.py b/app/models/database/DBUser.py new file mode 100644 index 0000000000000000000000000000000000000000..ec9ada4584685b2f03ac30939cdc9d25ccd2eae1 --- /dev/null +++ b/app/models/database/DBUser.py @@ -0,0 +1,22 @@ +from uuid import uuid4 +from sqlalchemy import Column, DateTime, ForeignKey, JSON, delete,String, Boolean +from sqlalchemy.dialects.postgresql import UUID as PGUUID +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select +from app.models.database.base import Base +from datetime import datetime + + + +# Database Model +class DBUser(Base): + __tablename__ = "users" + + firebase_uid = Column(String, primary_key=True, index=True) + email = Column(String, unique=True, index=True) + display_name = Column(String, nullable=True) + is_active = Column(Boolean, default=True) + created_at = Column(DateTime, default=datetime.utcnow) + last_login = Column(DateTime, nullable=True) + provider = Column(String, default="email") + diff --git a/app/models/database/__init__.py b/app/models/database/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/app/models/database/__pycache__/DBUser.cpython-312.pyc b/app/models/database/__pycache__/DBUser.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..60445fb9112f47c4199fdb1d368967cc1ca2a63d Binary files /dev/null and b/app/models/database/__pycache__/DBUser.cpython-312.pyc differ diff --git a/app/models/database/__pycache__/__init__.cpython-312.pyc b/app/models/database/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6df3ecbf3ca7ce403a9fa52725f9881a26459b9e Binary files /dev/null and b/app/models/database/__pycache__/__init__.cpython-312.pyc differ diff --git a/app/models/database/__pycache__/base.cpython-312.pyc b/app/models/database/__pycache__/base.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b078ffcbf5ec81a4c69d47dade48991215d19db2 Binary files /dev/null and b/app/models/database/__pycache__/base.cpython-312.pyc differ diff --git a/app/models/database/base.py b/app/models/database/base.py new file mode 100644 index 0000000000000000000000000000000000000000..28d6a2e83aa0929be5a54f99ae8816f60a273b56 --- /dev/null +++ b/app/models/database/base.py @@ -0,0 +1,21 @@ +""" +SQL Alchemy models declaration. +https://docs.sqlalchemy.org/en/14/orm/declarative_styles.html#example-two-dataclasses-with-declarative-table +Dataclass style for powerful autocompletion support. + +https://alembic.sqlalchemy.org/en/latest/tutorial.html +Note, it is used by alembic migrations logic, see `alembic/env.py` + +Alembic shortcuts: +# create migration +alembic revision --autogenerate -m "migration_name" + +# apply all migrations +alembic upgrade head +""" + +from sqlalchemy.orm import DeclarativeBase + + +class Base(DeclarativeBase): + pass diff --git a/app/pg_isready.py b/app/pg_isready.py new file mode 100644 index 0000000000000000000000000000000000000000..e776f33fefd81e51bc00dc948d909ca77d8a88ec --- /dev/null +++ b/app/pg_isready.py @@ -0,0 +1,23 @@ + +from os import environ as env +from urllib.parse import quote_plus + +import psycopg2 + +config = { + "host": env.get("DATABASE_HOSTNAME"), + "port": env.get("DATABASE_PORT", 5432), + "user": env.get("DATABASE_USER", "revmigrate"), + "pass": quote_plus(env.get("DATABASE_PASSWORD", "revmigrate")), + "database": env.get("DATABASE_DB", "revmigrate_ai_agent_db"), +} + +dsn = "postgresql://%(user)s:%(pass)s@%(host)s:%(port)s/%(db)s" % config + +if __name__ == "__main__": + try: + db = psycopg2.connect(dsn) + except (Exception, psycopg2.DatabaseError): + exit(1) + + exit(0) \ No newline at end of file diff --git a/app/router.py b/app/router.py new file mode 100644 index 0000000000000000000000000000000000000000..99dfec1717e2b501044d203a4b0597ab9e354cd4 --- /dev/null +++ b/app/router.py @@ -0,0 +1,10 @@ +from fastapi import APIRouter +from app.api.endpoints.v1.login.api import router as login_router +from app.core.config import settings +from app.api.endpoints.v1.firebaseauth.app import router as firebase_router + + + +api_router = APIRouter(prefix=f"{settings.ROOT_PATH}/v1") +api_router.include_router(login_router) +api_router.include_router(firebase_router) \ No newline at end of file diff --git a/app/serviceAccountKey/__init__.py b/app/serviceAccountKey/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/app/serviceAccountKey/mailpoilt-firebase-adminsdk-fbsvc-26bb455f79.json b/app/serviceAccountKey/mailpoilt-firebase-adminsdk-fbsvc-26bb455f79.json new file mode 100644 index 0000000000000000000000000000000000000000..219d0ab587d289fec05b59ccb6e70c4bb0b32740 --- /dev/null +++ b/app/serviceAccountKey/mailpoilt-firebase-adminsdk-fbsvc-26bb455f79.json @@ -0,0 +1,13 @@ +{ + "type": "service_account", + "project_id": "mailpoilt", + "private_key_id": "26bb455f798a846ba180f28607f69615b3052ff7", + "private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQCoEQ5nJfbRbliZ\nCYbJ2+/2VXqvtFlpE/5N2LHCkpdTAZPBvYLiV5ISuwhFWwCmbkr1RjpI3y/ofJ0s\nz/aNKKdSQguo+RO1WvlPzEeWjsYzIInI898TcMYjL3tPaW5Rpc+jgLDE96ju+qv7\nc6w5hVs7PEt6eIp0AVgk6pDW5KyjQpQAGNZVBm2v17A4Sp6R59WGiBHj5RoXbJSe\nWcF44qQRvEZKiY/ukj8k6zyEWwfc7RExmAZMYlzdS8leoLQQgXhXOqRpYm3fuhSR\nmt/HWdc8L7NxkGE7yW8erDhPAh/jXv2G4dk7KX7zL+JxNYcBbE7lrBywdXMltD+3\nYbjWGRo3AgMBAAECggEADmNLatwuqZY7Qnq0zdIk1F9T/ffAEcVS54IuEZywFLyV\nVCgvrMXXbQATALD+OMFHbIEDG1r0BICqifwRvNrmnB2oVuJf5A4yOrkfxc1aPWVD\nNm9eQRFN/RQjB9TcS2QZBwPM3kffb/oIwCGSdGgsu8D5KzZnj+/PhnWIScVQ3DcC\n00ZZqxh2U3SU7LAFd5q4ULJL8CbgUdx0Du7yCWwNUDlovEllEWjuy4J8Duw12lUs\ng0DhYnrgc7o9m184h3GlDM+6ZZC4KiR85PBHbkJ+kkmfdHTAqZCpW+Os5OCPJpNM\n11wbNQUKf5ihCPi44qhWgOiWV9DDXIPEiA94PiTrqQKBgQDdEEpO0slIprG7Wkq0\nZ92rK83ppI8rrsDfb27sj96BEx1U6UwNxL19jH9Y5sYCdo0j1KrM4Mk2BkmOwKJA\nNu/UDcpXNL5LRMjyJ0VdryKarxO+LdmS+/rlWVBFlXOJiz7bSxHFeUFtGDi8PA6x\nKfKG4wLrpkhk7x+AtGR+hm9hOQKBgQDCoKIFZqCkRnRl1OYyI0cykkZp7qwrvvE0\nOAUjVEkGfKMkf3aek4HW6BlpfpaDpxM1pGiweNdqTBVPMMoZHBCanZaTGG/yvzuN\nn5j/wR0q23HjguaBTgFfPh0yGcpEtVMPJ04ZxlI5YukRGYjaNGlHdofqr0daN86f\nw4rYdIYG7wKBgQCkFqFxnNi3yrQGg2EMRESv99XiZbm4DQExWL+XbLsWSB/vlTRU\nzAJznMQyMyorTeA6BnO5m+j+5u/WvHoy/BRM7OeSj53otOgLhsbgYU1oUChxdAsa\nJVv4vbDt1Z9rKya/FBve3sCHwADtCA9HxgE1ym+yhz0ZXr2dhUlK+XLXUQKBgQCv\njYjXaky4ILzZJYJF80BDQ2G0J46xHtgei/FGfr4n1cCqKTbrro3NNSnG2GIXelp/\nZj7v/u//d4U5Gv8QSxpG/KJXKBHCWe/zvOOavav7fllqKCaWt6/RIwvepxilhRDY\neFGgOhzsmmU0AV5JxEC6LxZF/7hSjkgYH59bwy8pjQKBgQDPjaWOYyO3ggmxbX9K\nl1pTLFKvu7MUZqpezHpq8qBO+t4SamVle5N0YFSnkM6M7syWiVmEQf+hRY7k5ZIW\nLvjdXGTBZT92S5Qn+ywHtFCoO8eenAbPxxlHAdWiMWDQuE6HR35RyyaNrquzU/cj\nM5z9ysc7KL8rSYHLKvG7QMoPVQ==\n-----END PRIVATE KEY-----\n", + "client_email": "firebase-adminsdk-fbsvc@mailpoilt.iam.gserviceaccount.com", + "client_id": "111695517349668536196", + "auth_uri": "https://accounts.google.com/o/oauth2/auth", + "token_uri": "https://oauth2.googleapis.com/token", + "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", + "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/firebase-adminsdk-fbsvc%40mailpoilt.iam.gserviceaccount.com", + "universe_domain": "googleapis.com" +} diff --git a/docker-entrypoint.sh b/docker-entrypoint.sh new file mode 100644 index 0000000000000000000000000000000000000000..1f79a4ac0fd73b4fdee715ac01cd775bdd56bcad --- /dev/null +++ b/docker-entrypoint.sh @@ -0,0 +1,62 @@ +#!/bin/bash + +set -e + +# Run any necessary startup commands here, like migrations +# Create the MailPilot user if it does not exist + + +# Start Uvicorn with app module, host, and port from environment variables +exec uvicorn app.main:fastapi_app --host 0.0.0.0 --port 5001 --reload + +DB_CHECK_INTERVAL=${DB_CHECK_INTERVAL:-5} +DB_CHECK_RETRIES=${DB_CHECK_RETRIES:-120} + +pg_isready() { + i=0 + echo -n "waiting for database connection " + while [ ${i} -le ${DB_CHECK_RETRIES} ]; do + python pg_isready.py && return || echo -n "." + sleep ${DB_CHECK_INTERVAL} + let i++ + done +} + +ACTION="" +if [ $# -ge 1 ]; then + ACTION=${1} ; shift +fi + +case "${ACTION}" in + + ''|-*) + pg_isready + exec uvicorn ${UVICORN_APP} ${ACTION} ${@} + ;; + + uvicorn) + pg_isready + exec uvicorn ${UVICORN_APP} ${@} + ;; + + migration) + pg_isready + exec alembic -c app/migrations/alembic.ini upgrade head + ;; + + pytest) + pg_isready + exec pytest ${@} + ;; + + noexit) + # used locally for docker-based development + # so things don't shut down after the process ends/exits. + while sleep 1000; do :; done + ;; + + *) + exec ${ACTION} ${@} + ;; + +esac \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000000000000000000000000000000000000..a2fb24718d226c26d74babaf4eac51c1b458f8f2 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,77 @@ +[tool.poetry] +authors = ["RevupAI Engineers"] +description = "MailPilot AI agents" +name = "MailPilot_ai_agents" +version = "0.1.0-alpha" + +[tool.poetry.dependencies] +fastapi = "0.112.0" +python = "^3.12" +uvicorn = {extras = ["standard"], version = "^0.22.0"} +asyncpg = "0.30.0" +environs = "^9.5.0" +msal = "^1.23.0" +aiofiles = "^23.2.1" +langchain = "0.3.7" +langgraph = "0.2.52" +langgraph-checkpoint = "2.0.5" +langgraph-checkpoint-postgres="2.0.3" +pydantic-settings = "2.6.1" +langchain-openai = "0.2.5" +langchain-community = "0.3.5" +pyyaml = "6.0.2" +alembic = "1.14.0" +sqlalchemy = "2.0.35" +greenlet = "3.1.1" +psycopg2-binary = "2.9.10" +logger = "1.4" +structlog = "24.4.0" +starlette = ">=0.37.2,<0.38.0" +pymemcache = "4.0.0" +pandas = "2.2.3" +XlsxWriter = "3.2.0" +tiktoken = "0.8.0" +python-multipart="0.0.20" +openpyxl="3.1.5" +firebase-admin="6.2.0" +email-validator = "^2.1.0" + +[tool.poetry.dev-dependencies] +autoflake = "^2.1.0" +black = "^23.9.1" +coverage = "^7.3.1" +flake8 = "^6.1.0" +isort = "^5.12.0" +pytest = "^8.3.3" +pytest-asyncio = "^0.21.1" +pre-commit = "^3.5.0" +pytest-mock = "^3.11.1" +factory-boy = "^3.3.0" + +[build-system] +build-backend = "poetry.core.masonry.api" +requires = ["poetry-core>=1.8.0"] + +[tool.pytest.ini_options] +addopts = "-v" +asyncio_mode = "auto" +filterwarnings = [] +markers = ["pytest.mark.asyncio"] +minversion = "6.0" +testpaths = ["app/tests"] + +[tool.isort] +profile = "black" + +[tool.flake8] +max-line-length = 120 +select = "C, E, F, W, B, B9" +ignore = "E203, E501, W503" +exclude = """ + __init__.py, + .venv, + venv, + __pycache__, + .github, + .vscode, +""" diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..057cc244c72deb655a8c7b435a01a336616d0a02 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,94 @@ +aiofiles==23.2.1 ; python_version >= "3.12" and python_version < "4.0" +aiohappyeyeballs==2.4.4 ; python_version >= "3.12" and python_version < "4.0" +aiohttp==3.11.10 ; python_version >= "3.12" and python_version < "4.0" +aiosignal==1.3.1 ; python_version >= "3.12" and python_version < "4.0" +alembic==1.14.0 ; python_version >= "3.12" and python_version < "4.0" +annotated-types==0.7.0 ; python_version >= "3.12" and python_version < "4.0" +anyio==4.7.0 ; python_version >= "3.12" and python_version < "4.0" +asyncpg==0.30.0 ; python_version >= "3.12" and python_version < "4.0" +attrs==24.2.0 ; python_version >= "3.12" and python_version < "4.0" +certifi==2024.8.30 ; python_version >= "3.12" and python_version < "4.0" +cffi==1.17.1 ; python_version >= "3.12" and python_version < "4.0" and platform_python_implementation != "PyPy" +charset-normalizer==3.4.0 ; python_version >= "3.12" and python_version < "4.0" +click==8.1.7 ; python_version >= "3.12" and python_version < "4.0" +colorama==0.4.6 ; python_version >= "3.12" and python_version < "4.0" and (sys_platform == "win32" or platform_system == "Windows") +cryptography==44.0.0 ; python_version >= "3.12" and python_version < "4.0" +dataclasses-json==0.6.7 ; python_version >= "3.12" and python_version < "4.0" +distro==1.9.0 ; python_version >= "3.12" and python_version < "4.0" +environs==9.5.0 ; python_version >= "3.12" and python_version < "4.0" +fastapi==0.112.0 ; python_version >= "3.12" and python_version < "4.0" +frozenlist==1.5.0 ; python_version >= "3.12" and python_version < "4.0" +greenlet==3.1.1 ; python_version >= "3.12" and python_version < "4.0" +h11==0.14.0 ; python_version >= "3.12" and python_version < "4.0" +httpcore==1.0.7 ; python_version >= "3.12" and python_version < "4.0" +httptools==0.6.4 ; python_version >= "3.12" and python_version < "4.0" +httpx-sse==0.4.0 ; python_version >= "3.12" and python_version < "4.0" +httpx==0.28.1 ; python_version >= "3.12" and python_version < "4.0" +idna==3.10 ; python_version >= "3.12" and python_version < "4.0" +jiter==0.8.2 ; python_version >= "3.12" and python_version < "4.0" +jsonpatch==1.33 ; python_version >= "3.12" and python_version < "4.0" +jsonpointer==3.0.0 ; python_version >= "3.12" and python_version < "4.0" +langchain-community==0.3.5 ; python_version >= "3.12" and python_version < "4.0" +langchain-core==0.3.22 ; python_version >= "3.12" and python_version < "4.0" +langchain-openai==0.2.5 ; python_version >= "3.12" and python_version < "4.0" +langchain-text-splitters==0.3.2 ; python_version >= "3.12" and python_version < "4.0" +langchain==0.3.7 ; python_version >= "3.12" and python_version < "4.0" +langgraph-checkpoint-postgres==2.0.3 ; python_version >= "3.12" and python_version < "4.0" +langgraph-checkpoint==2.0.5 ; python_version >= "3.12" and python_version < "4.0" +langgraph-sdk==0.1.43 ; python_version >= "3.12" and python_version < "4.0" +langgraph==0.2.52 ; python_version >= "3.12" and python_version < "4.0" +langsmith==0.1.147 ; python_version >= "3.12" and python_version < "4.0" +logger==1.4 ; python_version >= "3.12" and python_version < "4.0" +mako==1.3.8 ; python_version >= "3.12" and python_version < "4.0" +markupsafe==3.0.2 ; python_version >= "3.12" and python_version < "4.0" +marshmallow==3.23.1 ; python_version >= "3.12" and python_version < "4.0" +msal==1.31.1 ; python_version >= "3.12" and python_version < "4.0" +msgpack==1.1.0 ; python_version >= "3.12" and python_version < "4.0" +multidict==6.1.0 ; python_version >= "3.12" and python_version < "4.0" +mypy-extensions==1.0.0 ; python_version >= "3.12" and python_version < "4.0" +numpy==1.26.4 ; python_version >= "3.12" and python_version < "4.0" +openai==1.57.1 ; python_version >= "3.12" and python_version < "4.0" +orjson==3.10.12 ; python_version >= "3.12" and python_version < "4.0" +packaging==24.2 ; python_version >= "3.12" and python_version < "4.0" +pandas==2.2.3 ; python_version >= "3.12" and python_version < "4.0" +propcache==0.2.1 ; python_version >= "3.12" and python_version < "4.0" +psycopg-pool==3.2.4 ; python_version >= "3.12" and python_version < "4.0" +psycopg2-binary==2.9.10 ; python_version >= "3.12" and python_version < "4.0" +psycopg==3.2.3 ; python_version >= "3.12" and python_version < "4.0" +pycparser==2.22 ; python_version >= "3.12" and python_version < "4.0" and platform_python_implementation != "PyPy" +pydantic-core==2.27.1 ; python_version >= "3.12" and python_version < "4.0" +pydantic-settings==2.6.1 ; python_version >= "3.12" and python_version < "4.0" +pydantic==2.10.3 ; python_version >= "3.12" and python_version < "4.0" +pyjwt[crypto]==2.10.1 ; python_version >= "3.12" and python_version < "4.0" +pymemcache==4.0.0 ; python_version >= "3.12" and python_version < "4.0" +python-dateutil==2.9.0.post0 ; python_version >= "3.12" and python_version < "4.0" +python-dotenv==1.0.1 ; python_version >= "3.12" and python_version < "4.0" +pytz==2024.2 ; python_version >= "3.12" and python_version < "4.0" +pyyaml==6.0.2 ; python_version >= "3.12" and python_version < "4.0" +regex==2024.11.6 ; python_version >= "3.12" and python_version < "4.0" +requests-toolbelt==1.0.0 ; python_version >= "3.12" and python_version < "4.0" +requests==2.32.3 ; python_version >= "3.12" and python_version < "4.0" +six==1.17.0 ; python_version >= "3.12" and python_version < "4.0" +sniffio==1.3.1 ; python_version >= "3.12" and python_version < "4.0" +sqlalchemy==2.0.35 ; python_version >= "3.12" and python_version < "4.0" +starlette==0.37.2 ; python_version >= "3.12" and python_version < "4.0" +structlog==24.4.0 ; python_version >= "3.12" and python_version < "4.0" +tenacity==9.0.0 ; python_version >= "3.12" and python_version < "4.0" +tiktoken==0.8.0 ; python_version >= "3.12" and python_version < "4.0" +tqdm==4.67.1 ; python_version >= "3.12" and python_version < "4.0" +typing-extensions==4.12.2 ; python_version >= "3.12" and python_version < "4.0" +typing-inspect==0.9.0 ; python_version >= "3.12" and python_version < "4.0" +tzdata==2024.2 ; python_version >= "3.12" and python_version < "4.0" +urllib3==2.2.3 ; python_version >= "3.12" and python_version < "4.0" +uvicorn[standard]==0.22.0 ; python_version >= "3.12" and python_version < "4.0" +uvloop==0.21.0 ; (sys_platform != "win32" and sys_platform != "cygwin") and platform_python_implementation != "PyPy" and python_version >= "3.12" and python_version < "4.0" +watchfiles==1.0.0 ; python_version >= "3.12" and python_version < "4.0" +websockets==14.1 ; python_version >= "3.12" and python_version < "4.0" +xlsxwriter==3.2.0 ; python_version >= "3.12" and python_version < "4.0" +yarl==1.18.3 ; python_version >= "3.12" and python_version < "4.0" +python-multipart==0.0.20 ; python_version >= "3.12" and python_version < "4.0" +openpyxl==3.1.5 ; python_version >= "3.12" and python_version < "4.0" +boto3>=1.28.0 ; python_version >= "3.12" and python_version < "4.0" +langchain-aws>=0.1.0 ; python_version >= "3.12" and python_version < "4.0" +firebase-admin==6.2.0 ; python_version >= "3.12" and python_version < "4.0" +email-validator==2.1.0