removed pool erros
Browse files- App/routers/stocks/routes.py +56 -8
- App/routers/users/models.py +1 -1
- App/routers/users/routes.py +7 -2
- db.py +9 -9
- get-pip.py +0 -0
- main.py +7 -7
App/routers/stocks/routes.py
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
-
from fastapi import APIRouter, BackgroundTasks
|
2 |
from .schemas import DividendResponse, StockResponse, PriceDataResponse
|
3 |
from .crud import (
|
4 |
create_or_get_stock,
|
@@ -12,9 +12,9 @@ from .metrics import calculate_metrics
|
|
12 |
from .models import Stock, StockPriceData
|
13 |
from App.routers.tasks.models import ImportTask
|
14 |
from App.schemas import ResponseModel
|
15 |
-
from typing import Dict, List
|
16 |
import datetime
|
17 |
-
from datetime import datetime, timedelta
|
18 |
from .models import Dividend
|
19 |
from .utils import AsyncCurlCffiDividendScraper, run_stock_import_task
|
20 |
from App.schemas import AppException
|
@@ -96,18 +96,66 @@ async def list_stocks_orm():
|
|
96 |
|
97 |
|
98 |
@router.get("/{symbol}/prices", response_model=ResponseModel)
|
99 |
-
async def get_stock_prices(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
100 |
stock = await Stock.get_or_none(symbol=symbol.upper())
|
101 |
if not stock:
|
102 |
raise AppException(status_code=404, detail="Stock not found")
|
103 |
|
104 |
-
|
105 |
-
|
106 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
107 |
return ResponseModel(
|
108 |
success=True,
|
109 |
message="Stock prices retrieved",
|
110 |
-
data={
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
111 |
)
|
112 |
|
113 |
|
|
|
1 |
+
from fastapi import APIRouter, BackgroundTasks, Query
|
2 |
from .schemas import DividendResponse, StockResponse, PriceDataResponse
|
3 |
from .crud import (
|
4 |
create_or_get_stock,
|
|
|
12 |
from .models import Stock, StockPriceData
|
13 |
from App.routers.tasks.models import ImportTask
|
14 |
from App.schemas import ResponseModel
|
15 |
+
from typing import Dict, List, Optional
|
16 |
import datetime
|
17 |
+
from datetime import datetime, timedelta, date
|
18 |
from .models import Dividend
|
19 |
from .utils import AsyncCurlCffiDividendScraper, run_stock_import_task
|
20 |
from App.schemas import AppException
|
|
|
96 |
|
97 |
|
98 |
@router.get("/{symbol}/prices", response_model=ResponseModel)
|
99 |
+
async def get_stock_prices(
|
100 |
+
symbol: str,
|
101 |
+
time_range: Optional[str] = Query(
|
102 |
+
"max", enum=["1w", "1m", "1y", "ytd", "max"]
|
103 |
+
),
|
104 |
+
page: int = Query(1, ge=1),
|
105 |
+
page_size: int = Query(100, ge=1, le=1000),
|
106 |
+
):
|
107 |
stock = await Stock.get_or_none(symbol=symbol.upper())
|
108 |
if not stock:
|
109 |
raise AppException(status_code=404, detail="Stock not found")
|
110 |
|
111 |
+
prices_queryset = StockPriceData.filter(stock_id=stock.id).order_by("-date")
|
112 |
+
|
113 |
+
# Time range filtering
|
114 |
+
if time_range != "max":
|
115 |
+
today = date.today()
|
116 |
+
if time_range == "1w":
|
117 |
+
start_date = today - timedelta(weeks=1)
|
118 |
+
elif time_range == "1m":
|
119 |
+
start_date = today - timedelta(days=30)
|
120 |
+
elif time_range == "1y":
|
121 |
+
start_date = today - timedelta(days=365)
|
122 |
+
elif time_range == "ytd":
|
123 |
+
start_date = date(today.year, 1, 1)
|
124 |
+
prices_queryset = prices_queryset.filter(date__gte=start_date)
|
125 |
+
|
126 |
+
# Pagination
|
127 |
+
total_count = await prices_queryset.count()
|
128 |
+
paginated_queryset = prices_queryset.offset((page - 1) * page_size).limit(
|
129 |
+
page_size
|
130 |
+
)
|
131 |
+
|
132 |
+
# Select only the required fields
|
133 |
+
prices = await paginated_queryset.values(
|
134 |
+
"id",
|
135 |
+
"date",
|
136 |
+
"opening_price",
|
137 |
+
"closing_price",
|
138 |
+
"high",
|
139 |
+
"low",
|
140 |
+
"volume",
|
141 |
+
"turnover",
|
142 |
+
"shares_in_issue",
|
143 |
+
"market_cap",
|
144 |
+
"created_at",
|
145 |
+
)
|
146 |
+
|
147 |
return ResponseModel(
|
148 |
success=True,
|
149 |
message="Stock prices retrieved",
|
150 |
+
data={
|
151 |
+
"prices": prices,
|
152 |
+
"pagination": {
|
153 |
+
"total_count": total_count,
|
154 |
+
"total_pages": (total_count + page_size - 1) // page_size,
|
155 |
+
"current_page": page,
|
156 |
+
"page_size": page_size,
|
157 |
+
},
|
158 |
+
},
|
159 |
)
|
160 |
|
161 |
|
App/routers/users/models.py
CHANGED
@@ -39,4 +39,4 @@ class Watchlist(models.Model):
|
|
39 |
async def to_dict(self):
|
40 |
if type(self) == models.Model:
|
41 |
parser=pydantic_model_creator(Watchlist)
|
42 |
-
return await parser.from_tortoise_orm(self)
|
|
|
39 |
async def to_dict(self):
|
40 |
if type(self) == models.Model:
|
41 |
parser=pydantic_model_creator(Watchlist)
|
42 |
+
return await parser.from_tortoise_orm(self)
|
App/routers/users/routes.py
CHANGED
@@ -22,9 +22,14 @@ async def login(user: UserLogin):
|
|
22 |
|
23 |
if not bcrypt.verify(user.password, user_obj.hashed_password):
|
24 |
raise AppException(status_code=400, detail=ResponseModel(success=False, message="Invalid email or password"))
|
|
|
|
|
25 |
_user = await user_obj.to_dict()
|
26 |
-
|
27 |
-
|
|
|
|
|
|
|
28 |
|
29 |
@router.post("/register", response_model=ResponseModel)
|
30 |
async def register(user: UserCreate):
|
|
|
22 |
|
23 |
if not bcrypt.verify(user.password, user_obj.hashed_password):
|
24 |
raise AppException(status_code=400, detail=ResponseModel(success=False, message="Invalid email or password"))
|
25 |
+
|
26 |
+
# Use the modified to_dict method
|
27 |
_user = await user_obj.to_dict()
|
28 |
+
|
29 |
+
# The _user object is now a Pydantic model, so we can pass it to UserResponse
|
30 |
+
_user_response = UserResponse.model_validate(_user.model_dump())
|
31 |
+
|
32 |
+
return ResponseModel(success=True, message="Login successful", data=_user_response)
|
33 |
|
34 |
@router.post("/register", response_model=ResponseModel)
|
35 |
async def register(user: UserCreate):
|
db.py
CHANGED
@@ -4,13 +4,6 @@ import ssl
|
|
4 |
import uuid
|
5 |
from asyncpg import Connection
|
6 |
|
7 |
-
# 1. Create a custom connection class to generate unique IDs for every statement
|
8 |
-
class UniquePreparedStatementConnection(Connection):
|
9 |
-
def _get_unique_id(self, prefix: str) -> str:
|
10 |
-
# This forces every prepared statement to have a new, unique name,
|
11 |
-
# preventing any possible collisions when using a connection pooler.
|
12 |
-
return f"__asyncpg_{prefix}_{uuid.uuid4()}__"
|
13 |
-
|
14 |
# It's a good practice to create the SSL context outside the dict
|
15 |
ssl_context = ssl.create_default_context()
|
16 |
|
@@ -21,15 +14,22 @@ TORTOISE_ORM = {
|
|
21 |
"engine": "tortoise.backends.asyncpg",
|
22 |
"credentials": {
|
23 |
"host": "aws-0-us-east-2.pooler.supabase.com",
|
24 |
-
"port": "
|
25 |
"user": os.getenv("DB_USER"),
|
26 |
"password": os.getenv("DB_PASSWORD"),
|
27 |
"database": "postgres",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
28 |
},
|
|
|
29 |
# Pass the custom connection class and disable the cache
|
30 |
"connect_args": {
|
31 |
"statement_cache_size": 0,
|
32 |
-
"connection_class": UniquePreparedStatementConnection,
|
33 |
"ssl": ssl_context
|
34 |
}
|
35 |
}
|
|
|
4 |
import uuid
|
5 |
from asyncpg import Connection
|
6 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
7 |
# It's a good practice to create the SSL context outside the dict
|
8 |
ssl_context = ssl.create_default_context()
|
9 |
|
|
|
14 |
"engine": "tortoise.backends.asyncpg",
|
15 |
"credentials": {
|
16 |
"host": "aws-0-us-east-2.pooler.supabase.com",
|
17 |
+
"port": "5432",
|
18 |
"user": os.getenv("DB_USER"),
|
19 |
"password": os.getenv("DB_PASSWORD"),
|
20 |
"database": "postgres",
|
21 |
+
"min_size": 1, # Start with a small pool, e.g., 1-5 connections
|
22 |
+
"max_size": 10, # Adjust based on expected load and Supabase limits. Common values: 10-50
|
23 |
+
"timeout": 30, # Connection timeout in seconds [16]
|
24 |
+
# "ssl": True, # Enable SSL if required by Supabase for production
|
25 |
+
# "statement_cache_size": 0, # Optional: Keep for completeness if other issues arise, but primary fix is connection mode
|
26 |
+
"max_queries": 50000, # Max queries before a connection is closed and replaced [15]
|
27 |
+
"max_inactive_connection_lifetime": 300.0, # Max idle time before a connection is closed [15]
|
28 |
},
|
29 |
+
|
30 |
# Pass the custom connection class and disable the cache
|
31 |
"connect_args": {
|
32 |
"statement_cache_size": 0,
|
|
|
33 |
"ssl": ssl_context
|
34 |
}
|
35 |
}
|
get-pip.py
ADDED
The diff for this file is too large to render.
See raw diff
|
|
main.py
CHANGED
@@ -36,13 +36,13 @@ async def validation_exception_handler(request: Request, exc: RequestValidationE
|
|
36 |
|
37 |
|
38 |
# # Configure CORS
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
|
47 |
# Include routers
|
48 |
app.include_router(stocks_router)
|
|
|
36 |
|
37 |
|
38 |
# # Configure CORS
|
39 |
+
app.add_middleware(
|
40 |
+
CORSMiddleware,
|
41 |
+
allow_origins=["*"],
|
42 |
+
allow_credentials=False,
|
43 |
+
allow_methods=["*"],
|
44 |
+
allow_headers=["*"],
|
45 |
+
)
|
46 |
|
47 |
# Include routers
|
48 |
app.include_router(stocks_router)
|