version 1.1
Browse files- .env.example +8 -1
- .gitattributes +0 -29
- .gitignore +4 -0
- README.md +59 -1
- api_documentation.txt +0 -318
- app.py +48 -3
- app/__init__.py +3 -1
- app/api/mongodb_routes.py +2 -2
- app/api/pdf_routes.py +233 -0
- app/api/pdf_websocket.py +263 -0
- app/api/postgresql_routes.py +0 -0
- app/api/rag_routes.py +78 -278
- app/database/models.py +27 -0
- app/database/mongodb.py +49 -60
- app/database/pinecone.py +40 -48
- app/database/postgresql.py +119 -30
- app/models/pdf_models.py +51 -0
- app/utils/cache.py +271 -0
- app/utils/pdf_processor.py +211 -0
- app/utils/utils.py +380 -28
- docs/api_documentation.md +581 -0
- requirements.txt +5 -1
.env.example
CHANGED
|
@@ -23,4 +23,11 @@ WEBSOCKET_PATH=/notify
|
|
| 23 |
# Application settings
|
| 24 |
ENVIRONMENT=production
|
| 25 |
DEBUG=false
|
| 26 |
-
PORT=7860
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 23 |
# Application settings
|
| 24 |
ENVIRONMENT=production
|
| 25 |
DEBUG=false
|
| 26 |
+
PORT=7860
|
| 27 |
+
|
| 28 |
+
# Cache Configuration
|
| 29 |
+
CACHE_TTL_SECONDS=300
|
| 30 |
+
CACHE_CLEANUP_INTERVAL=60
|
| 31 |
+
CACHE_MAX_SIZE=1000
|
| 32 |
+
HISTORY_QUEUE_SIZE=10
|
| 33 |
+
HISTORY_CACHE_TTL=3600
|
.gitattributes
DELETED
|
@@ -1,29 +0,0 @@
|
|
| 1 |
-
# Auto detect text files and perform LF normalization
|
| 2 |
-
* text=auto eol=lf
|
| 3 |
-
|
| 4 |
-
# Documents
|
| 5 |
-
*.md text
|
| 6 |
-
*.txt text
|
| 7 |
-
*.ini text
|
| 8 |
-
*.yaml text
|
| 9 |
-
*.yml text
|
| 10 |
-
*.json text
|
| 11 |
-
*.py text
|
| 12 |
-
*.env.example text
|
| 13 |
-
|
| 14 |
-
# Binary files
|
| 15 |
-
*.png binary
|
| 16 |
-
*.jpg binary
|
| 17 |
-
*.jpeg binary
|
| 18 |
-
*.gif binary
|
| 19 |
-
*.ico binary
|
| 20 |
-
*.db binary
|
| 21 |
-
|
| 22 |
-
# Git related files
|
| 23 |
-
.gitignore text
|
| 24 |
-
.gitattributes text
|
| 25 |
-
|
| 26 |
-
# Docker related files
|
| 27 |
-
Dockerfile text
|
| 28 |
-
docker-compose.yml text
|
| 29 |
-
.dockerignore text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
.gitignore
CHANGED
|
@@ -60,6 +60,8 @@ tests/
|
|
| 60 |
|
| 61 |
Admin_bot/
|
| 62 |
|
|
|
|
|
|
|
| 63 |
# Hugging Face Spaces
|
| 64 |
.gitattributes
|
| 65 |
|
|
@@ -77,3 +79,5 @@ Thumbs.db
|
|
| 77 |
*.log
|
| 78 |
.env
|
| 79 |
main.py
|
|
|
|
|
|
|
|
|
| 60 |
|
| 61 |
Admin_bot/
|
| 62 |
|
| 63 |
+
Pix-Agent/
|
| 64 |
+
|
| 65 |
# Hugging Face Spaces
|
| 66 |
.gitattributes
|
| 67 |
|
|
|
|
| 79 |
*.log
|
| 80 |
.env
|
| 81 |
main.py
|
| 82 |
+
|
| 83 |
+
test/
|
README.md
CHANGED
|
@@ -358,4 +358,62 @@ You can customize the retrieval parameters when making API requests:
|
|
| 358 |
|
| 359 |
## Implementation Details
|
| 360 |
|
| 361 |
-
The system is implemented as a custom retriever class `ThresholdRetriever` that integrates with LangChain's retrieval infrastructure while providing enhanced functionality.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 358 |
|
| 359 |
## Implementation Details
|
| 360 |
|
| 361 |
+
The system is implemented as a custom retriever class `ThresholdRetriever` that integrates with LangChain's retrieval infrastructure while providing enhanced functionality.
|
| 362 |
+
|
| 363 |
+
## In-Memory Cache
|
| 364 |
+
|
| 365 |
+
Dự án bao gồm một hệ thống cache trong bộ nhớ để giảm thiểu truy cập đến cơ sở dữ liệu PostgreSQL và MongoDB.
|
| 366 |
+
|
| 367 |
+
### Cấu hình Cache
|
| 368 |
+
|
| 369 |
+
Cache được cấu hình thông qua các biến môi trường:
|
| 370 |
+
|
| 371 |
+
```
|
| 372 |
+
# Cache Configuration
|
| 373 |
+
CACHE_TTL_SECONDS=300 # Thời gian tồn tại của cache item (giây)
|
| 374 |
+
CACHE_CLEANUP_INTERVAL=60 # Chu kỳ xóa cache hết hạn (giây)
|
| 375 |
+
CACHE_MAX_SIZE=1000 # Số lượng item tối đa trong cache
|
| 376 |
+
HISTORY_QUEUE_SIZE=10 # Số lượng item tối đa trong queue lịch sử người dùng
|
| 377 |
+
HISTORY_CACHE_TTL=3600 # Thời gian tồn tại của lịch sử người dùng (giây)
|
| 378 |
+
```
|
| 379 |
+
|
| 380 |
+
### Cơ chế Cache
|
| 381 |
+
|
| 382 |
+
Hệ thống cache kết hợp hai cơ chế hết hạn:
|
| 383 |
+
|
| 384 |
+
1. **Lazy Expiration**: Kiểm tra thời hạn khi truy cập cache item. Nếu item đã hết hạn, nó sẽ bị xóa và trả về kết quả là không tìm thấy.
|
| 385 |
+
|
| 386 |
+
2. **Active Expiration**: Một background thread định kỳ quét và xóa các item đã hết hạn. Điều này giúp tránh tình trạng cache quá lớn với các item không còn được sử dụng.
|
| 387 |
+
|
| 388 |
+
### Các loại dữ liệu được cache
|
| 389 |
+
|
| 390 |
+
- **Dữ liệu PostgreSQL**: Thông tin từ các bảng FAQ, Emergency Contacts, và Events.
|
| 391 |
+
- **Lịch sử người dùng từ MongoDB**: Lịch sử hội thoại người dùng được lưu trong queue với thời gian sống tính theo lần truy cập cuối cùng.
|
| 392 |
+
|
| 393 |
+
### API Cache
|
| 394 |
+
|
| 395 |
+
Dự án cung cấp các API endpoints để quản lý cache:
|
| 396 |
+
|
| 397 |
+
- `GET /cache/stats`: Xem thống kê về cache (tổng số item, bộ nhớ sử dụng, v.v.)
|
| 398 |
+
- `DELETE /cache/clear`: Xóa toàn bộ cache
|
| 399 |
+
- `GET /debug/cache`: (Chỉ trong chế độ debug) Xem thông tin chi tiết về cache, bao gồm các keys và cấu hình
|
| 400 |
+
|
| 401 |
+
### Cách hoạt động
|
| 402 |
+
|
| 403 |
+
1. Khi một request đến, hệ thống sẽ kiểm tra dữ liệu trong cache trước.
|
| 404 |
+
2. Nếu dữ liệu tồn tại và còn hạn, trả về từ cache.
|
| 405 |
+
3. Nếu dữ liệu không tồn tại hoặc đã hết hạn, truy vấn từ database và lưu kết quả vào cache.
|
| 406 |
+
4. Khi dữ liệu được cập nhật hoặc xóa, cache liên quan sẽ tự động được xóa.
|
| 407 |
+
|
| 408 |
+
### Lịch sử người dùng
|
| 409 |
+
|
| 410 |
+
Lịch sử hội thoại người dùng được lưu trong queue riêng với cơ chế đặc biệt:
|
| 411 |
+
|
| 412 |
+
- Mỗi người dùng có một queue riêng với kích thước giới hạn (`HISTORY_QUEUE_SIZE`).
|
| 413 |
+
- Thời gian sống của queue được làm mới mỗi khi có tương tác mới.
|
| 414 |
+
- Khi queue đầy, các item cũ nhất sẽ bị loại bỏ.
|
| 415 |
+
- Queue tự động bị xóa sau một thời gian không hoạt động.
|
| 416 |
+
|
| 417 |
+
## Tác giả
|
| 418 |
+
|
| 419 |
+
- **PIX Project Team**
|
api_documentation.txt
DELETED
|
@@ -1,318 +0,0 @@
|
|
| 1 |
-
# Frontend Integration Guide for PixAgent API
|
| 2 |
-
|
| 3 |
-
This guide provides instructions for integrating with the optimized PostgreSQL-based API endpoints for Event, FAQ, and Emergency data.
|
| 4 |
-
|
| 5 |
-
## API Endpoints
|
| 6 |
-
|
| 7 |
-
### Events
|
| 8 |
-
|
| 9 |
-
| Endpoint | Method | Description |
|
| 10 |
-
|----------|--------|-------------|
|
| 11 |
-
| /postgres/events/ | GET | Fetch all events (with optional filtering) |
|
| 12 |
-
| /postgres/events/{event_id} | GET | Fetch a specific event by ID |
|
| 13 |
-
| /postgres/events/featured | GET | Fetch featured events |
|
| 14 |
-
| /postgres/events/ | POST | Create a new event |
|
| 15 |
-
| /postgres/events/{event_id} | PUT | Update an existing event |
|
| 16 |
-
| /postgres/events/{event_id} | DELETE | Delete an event |
|
| 17 |
-
|
| 18 |
-
### FAQs
|
| 19 |
-
|
| 20 |
-
| Endpoint | Method | Description |
|
| 21 |
-
|----------|--------|-------------|
|
| 22 |
-
| /postgres/faqs/ | GET | Fetch all FAQs |
|
| 23 |
-
| /postgres/faqs/{faq_id} | GET | Fetch a specific FAQ by ID |
|
| 24 |
-
| /postgres/faqs/ | POST | Create a new FAQ |
|
| 25 |
-
| /postgres/faqs/{faq_id} | PUT | Update an existing FAQ |
|
| 26 |
-
| /postgres/faqs/{faq_id} | DELETE | Delete a FAQ |
|
| 27 |
-
|
| 28 |
-
### Emergency Contacts
|
| 29 |
-
|
| 30 |
-
| Endpoint | Method | Description |
|
| 31 |
-
|----------|--------|-------------|
|
| 32 |
-
| /postgres/emergencies/ | GET | Fetch all emergency contacts |
|
| 33 |
-
| /postgres/emergencies/{emergency_id} | GET | Fetch a specific emergency contact by ID |
|
| 34 |
-
| /postgres/emergencies/ | POST | Create a new emergency contact |
|
| 35 |
-
| /postgres/emergencies/{emergency_id} | PUT | Update an existing emergency contact |
|
| 36 |
-
| /postgres/emergencies/{emergency_id} | DELETE | Delete an emergency contact |
|
| 37 |
-
|
| 38 |
-
## Response Models
|
| 39 |
-
|
| 40 |
-
### Event Response Model
|
| 41 |
-
|
| 42 |
-
interface EventResponse {
|
| 43 |
-
id: number;
|
| 44 |
-
name: string;
|
| 45 |
-
description: string;
|
| 46 |
-
date_start: string; // ISO format date
|
| 47 |
-
date_end: string; // ISO format date
|
| 48 |
-
location: string;
|
| 49 |
-
image_url: string;
|
| 50 |
-
price: {
|
| 51 |
-
currency: string;
|
| 52 |
-
amount: string;
|
| 53 |
-
};
|
| 54 |
-
featured: boolean;
|
| 55 |
-
is_active: boolean;
|
| 56 |
-
created_at: string; // ISO format date
|
| 57 |
-
updated_at: string; // ISO format date
|
| 58 |
-
}
|
| 59 |
-
|
| 60 |
-
### FAQ Response Model
|
| 61 |
-
|
| 62 |
-
interface FaqResponse {
|
| 63 |
-
id: number;
|
| 64 |
-
question: string;
|
| 65 |
-
answer: string;
|
| 66 |
-
is_active: boolean;
|
| 67 |
-
created_at: string; // ISO format date
|
| 68 |
-
updated_at: string; // ISO format date
|
| 69 |
-
}
|
| 70 |
-
|
| 71 |
-
### Emergency Response Model
|
| 72 |
-
|
| 73 |
-
interface EmergencyResponse {
|
| 74 |
-
id: number;
|
| 75 |
-
name: string;
|
| 76 |
-
phone_number: string;
|
| 77 |
-
description: string;
|
| 78 |
-
address: string;
|
| 79 |
-
priority: number;
|
| 80 |
-
is_active: boolean;
|
| 81 |
-
created_at: string; // ISO format date
|
| 82 |
-
updated_at: string; // ISO format date
|
| 83 |
-
}
|
| 84 |
-
|
| 85 |
-
## Example Usage (React)
|
| 86 |
-
|
| 87 |
-
### Fetching Events
|
| 88 |
-
|
| 89 |
-
import { useState, useEffect } from 'react';
|
| 90 |
-
import axios from 'axios';
|
| 91 |
-
|
| 92 |
-
const API_BASE_URL = 'http://localhost:8000';
|
| 93 |
-
|
| 94 |
-
function EventList() {
|
| 95 |
-
const [events, setEvents] = useState([]);
|
| 96 |
-
const [loading, setLoading] = useState(true);
|
| 97 |
-
const [error, setError] = useState(null);
|
| 98 |
-
|
| 99 |
-
useEffect(() => {
|
| 100 |
-
const fetchEvents = async () => {
|
| 101 |
-
try {
|
| 102 |
-
setLoading(true);
|
| 103 |
-
const response = await axios.get(`${API_BASE_URL}/postgres/events/`);
|
| 104 |
-
setEvents(response.data);
|
| 105 |
-
setLoading(false);
|
| 106 |
-
} catch (err) {
|
| 107 |
-
setError('Failed to fetch events');
|
| 108 |
-
setLoading(false);
|
| 109 |
-
console.error('Error fetching events:', err);
|
| 110 |
-
}
|
| 111 |
-
};
|
| 112 |
-
|
| 113 |
-
fetchEvents();
|
| 114 |
-
}, []);
|
| 115 |
-
|
| 116 |
-
if (loading) return <p>Loading events...</p>;
|
| 117 |
-
if (error) return <p>{error}</p>;
|
| 118 |
-
|
| 119 |
-
return (
|
| 120 |
-
<div>
|
| 121 |
-
<h1>Events</h1>
|
| 122 |
-
<div className="event-list">
|
| 123 |
-
{events.map(event => (
|
| 124 |
-
<div key={event.id} className="event-card">
|
| 125 |
-
<h2>{event.name}</h2>
|
| 126 |
-
<p>{event.description}</p>
|
| 127 |
-
<p>
|
| 128 |
-
<strong>When:</strong> {new Date(event.date_start).toLocaleDateString()} - {new Date(event.date_end).toLocaleDateString()}
|
| 129 |
-
</p>
|
| 130 |
-
<p><strong>Where:</strong> {event.location}</p>
|
| 131 |
-
<p><strong>Price:</strong> {event.price.amount} {event.price.currency}</p>
|
| 132 |
-
{event.featured && <span className="featured-badge">Featured</span>}
|
| 133 |
-
</div>
|
| 134 |
-
))}
|
| 135 |
-
</div>
|
| 136 |
-
</div>
|
| 137 |
-
);
|
| 138 |
-
}
|
| 139 |
-
|
| 140 |
-
### Creating an Event
|
| 141 |
-
|
| 142 |
-
import { useState } from 'react';
|
| 143 |
-
import axios from 'axios';
|
| 144 |
-
|
| 145 |
-
function CreateEvent() {
|
| 146 |
-
const [eventData, setEventData] = useState({
|
| 147 |
-
name: '',
|
| 148 |
-
description: '',
|
| 149 |
-
date_start: '',
|
| 150 |
-
date_end: '',
|
| 151 |
-
location: '',
|
| 152 |
-
image_url: '',
|
| 153 |
-
price: {
|
| 154 |
-
currency: 'USD',
|
| 155 |
-
amount: '0'
|
| 156 |
-
},
|
| 157 |
-
featured: false,
|
| 158 |
-
is_active: true
|
| 159 |
-
});
|
| 160 |
-
const [loading, setLoading] = useState(false);
|
| 161 |
-
const [error, setError] = useState(null);
|
| 162 |
-
const [success, setSuccess] = useState(false);
|
| 163 |
-
|
| 164 |
-
const handleChange = (e) => {
|
| 165 |
-
const { name, value, type, checked } = e.target;
|
| 166 |
-
|
| 167 |
-
if (name === 'price_amount') {
|
| 168 |
-
setEventData(prev => ({
|
| 169 |
-
...prev,
|
| 170 |
-
price: {
|
| 171 |
-
...prev.price,
|
| 172 |
-
amount: value
|
| 173 |
-
}
|
| 174 |
-
}));
|
| 175 |
-
} else if (name === 'price_currency') {
|
| 176 |
-
setEventData(prev => ({
|
| 177 |
-
...prev,
|
| 178 |
-
price: {
|
| 179 |
-
...prev.price,
|
| 180 |
-
currency: value
|
| 181 |
-
}
|
| 182 |
-
}));
|
| 183 |
-
} else {
|
| 184 |
-
setEventData(prev => ({
|
| 185 |
-
...prev,
|
| 186 |
-
[name]: type === 'checkbox' ? checked : value
|
| 187 |
-
}));
|
| 188 |
-
}
|
| 189 |
-
};
|
| 190 |
-
|
| 191 |
-
const handleSubmit = async (e) => {
|
| 192 |
-
e.preventDefault();
|
| 193 |
-
try {
|
| 194 |
-
setLoading(true);
|
| 195 |
-
setError(null);
|
| 196 |
-
setSuccess(false);
|
| 197 |
-
|
| 198 |
-
const response = await axios.post(`${API_BASE_URL}/postgres/events/`, eventData);
|
| 199 |
-
setSuccess(true);
|
| 200 |
-
setEventData({
|
| 201 |
-
name: '',
|
| 202 |
-
description: '',
|
| 203 |
-
date_start: '',
|
| 204 |
-
date_end: '',
|
| 205 |
-
location: '',
|
| 206 |
-
image_url: '',
|
| 207 |
-
price: {
|
| 208 |
-
currency: 'USD',
|
| 209 |
-
amount: '0'
|
| 210 |
-
},
|
| 211 |
-
featured: false,
|
| 212 |
-
is_active: true
|
| 213 |
-
});
|
| 214 |
-
setLoading(false);
|
| 215 |
-
} catch (err) {
|
| 216 |
-
setError('Failed to create event');
|
| 217 |
-
setLoading(false);
|
| 218 |
-
console.error('Error creating event:', err);
|
| 219 |
-
}
|
| 220 |
-
};
|
| 221 |
-
|
| 222 |
-
return (
|
| 223 |
-
<div>
|
| 224 |
-
<h1>Create New Event</h1>
|
| 225 |
-
{success && <div className="success-message">Event created successfully!</div>}
|
| 226 |
-
{error && <div className="error-message">{error}</div>}
|
| 227 |
-
<form onSubmit={handleSubmit}>
|
| 228 |
-
{/* Form fields would go here */}
|
| 229 |
-
<button type="submit" disabled={loading}>
|
| 230 |
-
{loading ? 'Creating...' : 'Create Event'}
|
| 231 |
-
</button>
|
| 232 |
-
</form>
|
| 233 |
-
</div>
|
| 234 |
-
);
|
| 235 |
-
}
|
| 236 |
-
|
| 237 |
-
## Performance Optimizations
|
| 238 |
-
|
| 239 |
-
The API now includes several performance optimizations:
|
| 240 |
-
|
| 241 |
-
### Caching
|
| 242 |
-
|
| 243 |
-
The server implements caching for read operations, which significantly improves response times for repeated requests. The average cache improvement is over 70%.
|
| 244 |
-
|
| 245 |
-
Frontend considerations:
|
| 246 |
-
No need to implement client-side caching for data that doesn't change frequently
|
| 247 |
-
For real-time data, consider adding a refresh button in the UI
|
| 248 |
-
If data might be updated by other users, consider adding a polling mechanism or websocket for updates
|
| 249 |
-
|
| 250 |
-
|
| 251 |
-
### Error Handling
|
| 252 |
-
|
| 253 |
-
The API returns standardized error responses. Example:
|
| 254 |
-
|
| 255 |
-
async function fetchData(url) {
|
| 256 |
-
try {
|
| 257 |
-
const response = await fetch(url);
|
| 258 |
-
if (!response.ok) {
|
| 259 |
-
const errorData = await response.json();
|
| 260 |
-
throw new Error(errorData.detail || 'An error occurred');
|
| 261 |
-
}
|
| 262 |
-
return await response.json();
|
| 263 |
-
} catch (error) {
|
| 264 |
-
console.error('API request failed:', error);
|
| 265 |
-
// Handle error in UI
|
| 266 |
-
return null;
|
| 267 |
-
}
|
| 268 |
-
}
|
| 269 |
-
|
| 270 |
-
### Price Field Handling
|
| 271 |
-
|
| 272 |
-
The price field of events is a JSON object with currency and amount properties. When creating or updating events, ensure this is properly formatted:
|
| 273 |
-
|
| 274 |
-
// Correct format for price field
|
| 275 |
-
const eventData = {
|
| 276 |
-
// other fields...
|
| 277 |
-
price: {
|
| 278 |
-
currency: 'USD',
|
| 279 |
-
amount: '10.99'
|
| 280 |
-
}
|
| 281 |
-
};
|
| 282 |
-
|
| 283 |
-
// When displaying price
|
| 284 |
-
function formatPrice(price) {
|
| 285 |
-
if (!price) return 'Free';
|
| 286 |
-
if (typeof price === 'string') {
|
| 287 |
-
try {
|
| 288 |
-
price = JSON.parse(price);
|
| 289 |
-
} catch {
|
| 290 |
-
return price;
|
| 291 |
-
}
|
| 292 |
-
}
|
| 293 |
-
return `${price.amount} ${price.currency}`;
|
| 294 |
-
}
|
| 295 |
-
|
| 296 |
-
## CORS Configuration
|
| 297 |
-
|
| 298 |
-
The API has CORS enabled for frontend applications. If you're experiencing CORS issues, ensure your frontend domain is allowed in the server configuration.
|
| 299 |
-
|
| 300 |
-
For local development, the following origins are typically allowed:
|
| 301 |
-
- http://localhost:3000
|
| 302 |
-
- http://localhost:5000
|
| 303 |
-
- http://localhost:8080
|
| 304 |
-
|
| 305 |
-
## Status Codes
|
| 306 |
-
|
| 307 |
-
| Status Code | Description |
|
| 308 |
-
|-------------|-------------|
|
| 309 |
-
| 200 | Success - The request was successful |
|
| 310 |
-
| 201 | Created - A new resource was successfully created |
|
| 311 |
-
| 400 | Bad Request - The request could not be understood or was missing required parameters |
|
| 312 |
-
| 404 | Not Found - Resource not found |
|
| 313 |
-
| 422 | Validation Error - Request data failed validation |
|
| 314 |
-
| 500 | Internal Server Error - An error occurred on the server |
|
| 315 |
-
|
| 316 |
-
## Questions?
|
| 317 |
-
|
| 318 |
-
For further inquiries about the API, please contact the development team.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
app.py
CHANGED
|
@@ -64,11 +64,9 @@ async def lifespan(app: FastAPI):
|
|
| 64 |
# Startup: kiểm tra kết nối các database
|
| 65 |
logger.info("Starting application...")
|
| 66 |
db_status = check_database_connections()
|
| 67 |
-
if all(db_status.values()):
|
| 68 |
-
logger.info("All database connections are working")
|
| 69 |
|
| 70 |
# Khởi tạo bảng trong cơ sở dữ liệu (nếu chưa tồn tại)
|
| 71 |
-
if DEBUG: # Chỉ khởi tạo bảng trong chế độ debug
|
| 72 |
from app.database.postgresql import create_tables
|
| 73 |
if create_tables():
|
| 74 |
logger.info("Database tables created or already exist")
|
|
@@ -84,6 +82,7 @@ try:
|
|
| 84 |
from app.api.postgresql_routes import router as postgresql_router
|
| 85 |
from app.api.rag_routes import router as rag_router
|
| 86 |
from app.api.websocket_routes import router as websocket_router
|
|
|
|
| 87 |
|
| 88 |
# Import middlewares
|
| 89 |
from app.utils.middleware import RequestLoggingMiddleware, ErrorHandlingMiddleware, DatabaseCheckMiddleware
|
|
@@ -91,6 +90,9 @@ try:
|
|
| 91 |
# Import debug utilities
|
| 92 |
from app.utils.debug_utils import debug_view, DebugInfo, error_tracker, performance_monitor
|
| 93 |
|
|
|
|
|
|
|
|
|
|
| 94 |
except ImportError as e:
|
| 95 |
logger.error(f"Error importing routes or middlewares: {e}")
|
| 96 |
raise
|
|
@@ -126,6 +128,7 @@ app.include_router(mongodb_router)
|
|
| 126 |
app.include_router(postgresql_router)
|
| 127 |
app.include_router(rag_router)
|
| 128 |
app.include_router(websocket_router)
|
|
|
|
| 129 |
|
| 130 |
# Root endpoint
|
| 131 |
@app.get("/")
|
|
@@ -149,6 +152,25 @@ def health_check():
|
|
| 149 |
"databases": db_status
|
| 150 |
}
|
| 151 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 152 |
# Debug endpoints (chỉ có trong chế độ debug)
|
| 153 |
if DEBUG:
|
| 154 |
@app.get("/debug/config")
|
|
@@ -190,6 +212,29 @@ if DEBUG:
|
|
| 190 |
def debug_full_report(request: Request):
|
| 191 |
"""Hiển thị báo cáo debug đầy đủ (chỉ trong chế độ debug)"""
|
| 192 |
return debug_view(request)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 193 |
|
| 194 |
# Run the app with uvicorn when executed directly
|
| 195 |
if __name__ == "__main__":
|
|
|
|
| 64 |
# Startup: kiểm tra kết nối các database
|
| 65 |
logger.info("Starting application...")
|
| 66 |
db_status = check_database_connections()
|
|
|
|
|
|
|
| 67 |
|
| 68 |
# Khởi tạo bảng trong cơ sở dữ liệu (nếu chưa tồn tại)
|
| 69 |
+
if DEBUG and all(db_status.values()): # Chỉ khởi tạo bảng trong chế độ debug và khi tất cả kết nối DB thành công
|
| 70 |
from app.database.postgresql import create_tables
|
| 71 |
if create_tables():
|
| 72 |
logger.info("Database tables created or already exist")
|
|
|
|
| 82 |
from app.api.postgresql_routes import router as postgresql_router
|
| 83 |
from app.api.rag_routes import router as rag_router
|
| 84 |
from app.api.websocket_routes import router as websocket_router
|
| 85 |
+
from app.api.pdf_routes import router as pdf_router
|
| 86 |
|
| 87 |
# Import middlewares
|
| 88 |
from app.utils.middleware import RequestLoggingMiddleware, ErrorHandlingMiddleware, DatabaseCheckMiddleware
|
|
|
|
| 90 |
# Import debug utilities
|
| 91 |
from app.utils.debug_utils import debug_view, DebugInfo, error_tracker, performance_monitor
|
| 92 |
|
| 93 |
+
# Import cache
|
| 94 |
+
from app.utils.cache import get_cache
|
| 95 |
+
|
| 96 |
except ImportError as e:
|
| 97 |
logger.error(f"Error importing routes or middlewares: {e}")
|
| 98 |
raise
|
|
|
|
| 128 |
app.include_router(postgresql_router)
|
| 129 |
app.include_router(rag_router)
|
| 130 |
app.include_router(websocket_router)
|
| 131 |
+
app.include_router(pdf_router)
|
| 132 |
|
| 133 |
# Root endpoint
|
| 134 |
@app.get("/")
|
|
|
|
| 152 |
"databases": db_status
|
| 153 |
}
|
| 154 |
|
| 155 |
+
@app.get("/api/ping")
|
| 156 |
+
async def ping():
|
| 157 |
+
return {"status": "pong"}
|
| 158 |
+
|
| 159 |
+
# Cache stats endpoint
|
| 160 |
+
@app.get("/cache/stats")
|
| 161 |
+
def cache_stats():
|
| 162 |
+
"""Trả về thống kê về cache"""
|
| 163 |
+
cache = get_cache()
|
| 164 |
+
return cache.stats()
|
| 165 |
+
|
| 166 |
+
# Cache clear endpoint
|
| 167 |
+
@app.delete("/cache/clear")
|
| 168 |
+
def cache_clear():
|
| 169 |
+
"""Xóa tất cả dữ liệu trong cache"""
|
| 170 |
+
cache = get_cache()
|
| 171 |
+
cache.clear()
|
| 172 |
+
return {"message": "Cache cleared successfully"}
|
| 173 |
+
|
| 174 |
# Debug endpoints (chỉ có trong chế độ debug)
|
| 175 |
if DEBUG:
|
| 176 |
@app.get("/debug/config")
|
|
|
|
| 212 |
def debug_full_report(request: Request):
|
| 213 |
"""Hiển thị báo cáo debug đầy đủ (chỉ trong chế độ debug)"""
|
| 214 |
return debug_view(request)
|
| 215 |
+
|
| 216 |
+
@app.get("/debug/cache")
|
| 217 |
+
def debug_cache():
|
| 218 |
+
"""Hiển thị thông tin chi tiết về cache (chỉ trong chế độ debug)"""
|
| 219 |
+
cache = get_cache()
|
| 220 |
+
cache_stats = cache.stats()
|
| 221 |
+
|
| 222 |
+
# Thêm thông tin chi tiết về các key trong cache
|
| 223 |
+
cache_keys = list(cache.cache.keys())
|
| 224 |
+
history_users = list(cache.user_history_queues.keys())
|
| 225 |
+
|
| 226 |
+
return {
|
| 227 |
+
"stats": cache_stats,
|
| 228 |
+
"keys": cache_keys,
|
| 229 |
+
"history_users": history_users,
|
| 230 |
+
"config": {
|
| 231 |
+
"ttl": cache.ttl,
|
| 232 |
+
"cleanup_interval": cache.cleanup_interval,
|
| 233 |
+
"max_size": cache.max_size,
|
| 234 |
+
"history_queue_size": os.getenv("HISTORY_QUEUE_SIZE", "10"),
|
| 235 |
+
"history_cache_ttl": os.getenv("HISTORY_CACHE_TTL", "3600"),
|
| 236 |
+
}
|
| 237 |
+
}
|
| 238 |
|
| 239 |
# Run the app with uvicorn when executed directly
|
| 240 |
if __name__ == "__main__":
|
app/__init__.py
CHANGED
|
@@ -11,7 +11,9 @@ import os
|
|
| 11 |
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
| 12 |
|
| 13 |
try:
|
| 14 |
-
|
|
|
|
|
|
|
| 15 |
except ImportError:
|
| 16 |
# Thử cách khác nếu import trực tiếp không hoạt động
|
| 17 |
import importlib.util
|
|
|
|
| 11 |
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
| 12 |
|
| 13 |
try:
|
| 14 |
+
# Sửa lại cách import đúng - 'app.py' không phải là module hợp lệ
|
| 15 |
+
# 'app' là tên module, '.py' là phần mở rộng tệp
|
| 16 |
+
from app import app
|
| 17 |
except ImportError:
|
| 18 |
# Thử cách khác nếu import trực tiếp không hoạt động
|
| 19 |
import importlib.util
|
app/api/mongodb_routes.py
CHANGED
|
@@ -8,7 +8,7 @@ import asyncio
|
|
| 8 |
|
| 9 |
from app.database.mongodb import (
|
| 10 |
save_session,
|
| 11 |
-
|
| 12 |
update_session_response,
|
| 13 |
check_db_connection,
|
| 14 |
session_collection
|
|
@@ -178,7 +178,7 @@ async def get_history(user_id: str, n: int = Query(3, ge=1, le=10)):
|
|
| 178 |
)
|
| 179 |
|
| 180 |
# Get user history from MongoDB
|
| 181 |
-
history_data =
|
| 182 |
|
| 183 |
# Convert to response model
|
| 184 |
return HistoryResponse(history=history_data)
|
|
|
|
| 8 |
|
| 9 |
from app.database.mongodb import (
|
| 10 |
save_session,
|
| 11 |
+
get_chat_history,
|
| 12 |
update_session_response,
|
| 13 |
check_db_connection,
|
| 14 |
session_collection
|
|
|
|
| 178 |
)
|
| 179 |
|
| 180 |
# Get user history from MongoDB
|
| 181 |
+
history_data = get_chat_history(user_id=user_id, n=n)
|
| 182 |
|
| 183 |
# Convert to response model
|
| 184 |
return HistoryResponse(history=history_data)
|
app/api/pdf_routes.py
ADDED
|
@@ -0,0 +1,233 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import shutil
|
| 3 |
+
import uuid
|
| 4 |
+
from fastapi import APIRouter, UploadFile, File, Form, HTTPException, BackgroundTasks
|
| 5 |
+
from fastapi.responses import JSONResponse
|
| 6 |
+
from typing import Optional, List, Dict, Any
|
| 7 |
+
|
| 8 |
+
from app.utils.pdf_processor import PDFProcessor
|
| 9 |
+
from app.models.pdf_models import PDFResponse, DeleteDocumentRequest, DocumentsListResponse
|
| 10 |
+
from app.api.pdf_websocket import (
|
| 11 |
+
send_pdf_upload_started,
|
| 12 |
+
send_pdf_upload_progress,
|
| 13 |
+
send_pdf_upload_completed,
|
| 14 |
+
send_pdf_upload_failed,
|
| 15 |
+
send_pdf_delete_started,
|
| 16 |
+
send_pdf_delete_completed,
|
| 17 |
+
send_pdf_delete_failed
|
| 18 |
+
)
|
| 19 |
+
|
| 20 |
+
# Khởi tạo router
|
| 21 |
+
router = APIRouter(
|
| 22 |
+
prefix="/pdf",
|
| 23 |
+
tags=["PDF Processing"],
|
| 24 |
+
)
|
| 25 |
+
|
| 26 |
+
# Thư mục lưu file tạm - sử dụng /tmp để tránh lỗi quyền truy cập
|
| 27 |
+
TEMP_UPLOAD_DIR = "/tmp/uploads/temp"
|
| 28 |
+
STORAGE_DIR = "/tmp/uploads/pdfs"
|
| 29 |
+
|
| 30 |
+
# Đảm bảo thư mục upload tồn tại
|
| 31 |
+
os.makedirs(TEMP_UPLOAD_DIR, exist_ok=True)
|
| 32 |
+
os.makedirs(STORAGE_DIR, exist_ok=True)
|
| 33 |
+
|
| 34 |
+
# Endpoint upload và xử lý PDF
|
| 35 |
+
@router.post("/upload", response_model=PDFResponse)
|
| 36 |
+
async def upload_pdf(
|
| 37 |
+
file: UploadFile = File(...),
|
| 38 |
+
namespace: str = Form("Default"),
|
| 39 |
+
index_name: str = Form("testbot768"),
|
| 40 |
+
title: Optional[str] = Form(None),
|
| 41 |
+
description: Optional[str] = Form(None),
|
| 42 |
+
user_id: Optional[str] = Form(None),
|
| 43 |
+
background_tasks: BackgroundTasks = None
|
| 44 |
+
):
|
| 45 |
+
"""
|
| 46 |
+
Upload và xử lý file PDF để tạo embeddings và lưu vào Pinecone
|
| 47 |
+
|
| 48 |
+
- **file**: File PDF cần xử lý
|
| 49 |
+
- **namespace**: Namespace trong Pinecone để lưu embeddings (mặc định: "Default")
|
| 50 |
+
- **index_name**: Tên index Pinecone (mặc định: "testbot768")
|
| 51 |
+
- **title**: Tiêu đề của tài liệu (tùy chọn)
|
| 52 |
+
- **description**: Mô tả về tài liệu (tùy chọn)
|
| 53 |
+
- **user_id**: ID của người dùng để cập nhật trạng thái qua WebSocket
|
| 54 |
+
"""
|
| 55 |
+
try:
|
| 56 |
+
# Kiểm tra file có phải PDF không
|
| 57 |
+
if not file.filename.lower().endswith('.pdf'):
|
| 58 |
+
raise HTTPException(status_code=400, detail="Chỉ chấp nhận file PDF")
|
| 59 |
+
|
| 60 |
+
# Tạo file_id và lưu file tạm
|
| 61 |
+
file_id = str(uuid.uuid4())
|
| 62 |
+
temp_file_path = os.path.join(TEMP_UPLOAD_DIR, f"{file_id}.pdf")
|
| 63 |
+
|
| 64 |
+
# Gửi thông báo bắt đầu xử lý qua WebSocket nếu có user_id
|
| 65 |
+
if user_id:
|
| 66 |
+
await send_pdf_upload_started(user_id, file.filename, file_id)
|
| 67 |
+
|
| 68 |
+
# Lưu file
|
| 69 |
+
with open(temp_file_path, "wb") as buffer:
|
| 70 |
+
shutil.copyfileobj(file.file, buffer)
|
| 71 |
+
|
| 72 |
+
# Tạo metadata
|
| 73 |
+
metadata = {
|
| 74 |
+
"filename": file.filename,
|
| 75 |
+
"content_type": file.content_type
|
| 76 |
+
}
|
| 77 |
+
|
| 78 |
+
if title:
|
| 79 |
+
metadata["title"] = title
|
| 80 |
+
if description:
|
| 81 |
+
metadata["description"] = description
|
| 82 |
+
|
| 83 |
+
# Gửi thông báo tiến độ qua WebSocket
|
| 84 |
+
if user_id:
|
| 85 |
+
await send_pdf_upload_progress(
|
| 86 |
+
user_id,
|
| 87 |
+
file_id,
|
| 88 |
+
"file_preparation",
|
| 89 |
+
0.2,
|
| 90 |
+
"File saved, preparing for processing"
|
| 91 |
+
)
|
| 92 |
+
|
| 93 |
+
# Khởi tạo PDF processor
|
| 94 |
+
processor = PDFProcessor(index_name=index_name, namespace=namespace)
|
| 95 |
+
|
| 96 |
+
# Gửi thông báo bắt đầu embedding qua WebSocket
|
| 97 |
+
if user_id:
|
| 98 |
+
await send_pdf_upload_progress(
|
| 99 |
+
user_id,
|
| 100 |
+
file_id,
|
| 101 |
+
"embedding_start",
|
| 102 |
+
0.4,
|
| 103 |
+
"Starting to process PDF and create embeddings"
|
| 104 |
+
)
|
| 105 |
+
|
| 106 |
+
# Xử lý PDF và tạo embeddings
|
| 107 |
+
# Tạo callback function để xử lý cập nhật tiến độ
|
| 108 |
+
async def progress_callback_wrapper(step, progress, message):
|
| 109 |
+
if user_id:
|
| 110 |
+
await send_progress_update(user_id, file_id, step, progress, message)
|
| 111 |
+
|
| 112 |
+
# Xử lý PDF và tạo embeddings với callback đã được xử lý đúng cách
|
| 113 |
+
result = await processor.process_pdf(
|
| 114 |
+
file_path=temp_file_path,
|
| 115 |
+
document_id=file_id,
|
| 116 |
+
metadata=metadata,
|
| 117 |
+
progress_callback=progress_callback_wrapper
|
| 118 |
+
)
|
| 119 |
+
|
| 120 |
+
# Nếu thành công, chuyển file vào storage
|
| 121 |
+
if result.get('success'):
|
| 122 |
+
storage_path = os.path.join(STORAGE_DIR, f"{file_id}.pdf")
|
| 123 |
+
shutil.move(temp_file_path, storage_path)
|
| 124 |
+
|
| 125 |
+
# Gửi thông báo hoàn thành qua WebSocket
|
| 126 |
+
if user_id:
|
| 127 |
+
await send_pdf_upload_completed(
|
| 128 |
+
user_id,
|
| 129 |
+
file_id,
|
| 130 |
+
file.filename,
|
| 131 |
+
result.get('chunks_processed', 0)
|
| 132 |
+
)
|
| 133 |
+
else:
|
| 134 |
+
# Gửi thông báo lỗi qua WebSocket
|
| 135 |
+
if user_id:
|
| 136 |
+
await send_pdf_upload_failed(
|
| 137 |
+
user_id,
|
| 138 |
+
file_id,
|
| 139 |
+
file.filename,
|
| 140 |
+
result.get('error', 'Unknown error')
|
| 141 |
+
)
|
| 142 |
+
|
| 143 |
+
# Dọn dẹp: xóa file tạm nếu vẫn còn
|
| 144 |
+
if os.path.exists(temp_file_path):
|
| 145 |
+
os.remove(temp_file_path)
|
| 146 |
+
|
| 147 |
+
return result
|
| 148 |
+
except Exception as e:
|
| 149 |
+
# Dọn dẹp nếu có lỗi
|
| 150 |
+
if 'temp_file_path' in locals() and os.path.exists(temp_file_path):
|
| 151 |
+
os.remove(temp_file_path)
|
| 152 |
+
|
| 153 |
+
# Gửi thông báo lỗi qua WebSocket
|
| 154 |
+
if 'user_id' in locals() and user_id and 'file_id' in locals():
|
| 155 |
+
await send_pdf_upload_failed(
|
| 156 |
+
user_id,
|
| 157 |
+
file_id,
|
| 158 |
+
file.filename,
|
| 159 |
+
str(e)
|
| 160 |
+
)
|
| 161 |
+
|
| 162 |
+
return PDFResponse(
|
| 163 |
+
success=False,
|
| 164 |
+
error=str(e)
|
| 165 |
+
)
|
| 166 |
+
|
| 167 |
+
# Function để gửi cập nhật tiến độ - được sử dụng trong callback
|
| 168 |
+
async def send_progress_update(user_id, document_id, step, progress, message):
|
| 169 |
+
if user_id:
|
| 170 |
+
await send_pdf_upload_progress(user_id, document_id, step, progress, message)
|
| 171 |
+
|
| 172 |
+
# Endpoint xóa tài liệu
|
| 173 |
+
@router.delete("/namespace", response_model=PDFResponse)
|
| 174 |
+
async def delete_namespace(
|
| 175 |
+
namespace: str = "Default",
|
| 176 |
+
index_name: str = "testbot768",
|
| 177 |
+
user_id: Optional[str] = None
|
| 178 |
+
):
|
| 179 |
+
"""
|
| 180 |
+
Xóa toàn bộ embeddings trong một namespace từ Pinecone (tương ứng xoá namespace)
|
| 181 |
+
|
| 182 |
+
- **namespace**: Namespace trong Pinecone (mặc định: "Default")
|
| 183 |
+
- **index_name**: Tên index Pinecone (mặc định: "testbot768")
|
| 184 |
+
- **user_id**: ID của người dùng để cập nhật trạng thái qua WebSocket
|
| 185 |
+
"""
|
| 186 |
+
try:
|
| 187 |
+
# Gửi thông báo bắt đầu xóa qua WebSocket
|
| 188 |
+
if user_id:
|
| 189 |
+
await send_pdf_delete_started(user_id, namespace)
|
| 190 |
+
|
| 191 |
+
processor = PDFProcessor(index_name=index_name, namespace=namespace)
|
| 192 |
+
result = await processor.delete_namespace()
|
| 193 |
+
|
| 194 |
+
# Gửi thông báo kết quả qua WebSocket
|
| 195 |
+
if user_id:
|
| 196 |
+
if result.get('success'):
|
| 197 |
+
await send_pdf_delete_completed(user_id, namespace)
|
| 198 |
+
else:
|
| 199 |
+
await send_pdf_delete_failed(user_id, namespace, result.get('error', 'Unknown error'))
|
| 200 |
+
|
| 201 |
+
return result
|
| 202 |
+
except Exception as e:
|
| 203 |
+
# Gửi thông báo lỗi qua WebSocket
|
| 204 |
+
if user_id:
|
| 205 |
+
await send_pdf_delete_failed(user_id, namespace, str(e))
|
| 206 |
+
|
| 207 |
+
return PDFResponse(
|
| 208 |
+
success=False,
|
| 209 |
+
error=str(e)
|
| 210 |
+
)
|
| 211 |
+
|
| 212 |
+
# Endpoint lấy danh sách tài liệu
|
| 213 |
+
@router.get("/documents", response_model=DocumentsListResponse)
|
| 214 |
+
async def get_documents(namespace: str = "Default", index_name: str = "testbot768"):
|
| 215 |
+
"""
|
| 216 |
+
Lấy thông tin về tất cả tài liệu đã được embed
|
| 217 |
+
|
| 218 |
+
- **namespace**: Namespace trong Pinecone (mặc định: "Default")
|
| 219 |
+
- **index_name**: Tên index Pinecone (mặc định: "testbot768")
|
| 220 |
+
"""
|
| 221 |
+
try:
|
| 222 |
+
# Khởi tạo PDF processor
|
| 223 |
+
processor = PDFProcessor(index_name=index_name, namespace=namespace)
|
| 224 |
+
|
| 225 |
+
# Lấy danh sách documents
|
| 226 |
+
result = await processor.list_documents()
|
| 227 |
+
|
| 228 |
+
return result
|
| 229 |
+
except Exception as e:
|
| 230 |
+
return DocumentsListResponse(
|
| 231 |
+
success=False,
|
| 232 |
+
error=str(e)
|
| 233 |
+
)
|
app/api/pdf_websocket.py
ADDED
|
@@ -0,0 +1,263 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
from typing import Dict, List, Optional, Any
|
| 3 |
+
from fastapi import WebSocket, WebSocketDisconnect, APIRouter
|
| 4 |
+
from pydantic import BaseModel
|
| 5 |
+
import json
|
| 6 |
+
import time
|
| 7 |
+
|
| 8 |
+
# Cấu hình logging
|
| 9 |
+
logger = logging.getLogger(__name__)
|
| 10 |
+
|
| 11 |
+
# Models cho Swagger documentation
|
| 12 |
+
class ConnectionStatus(BaseModel):
|
| 13 |
+
user_id: str
|
| 14 |
+
active: bool
|
| 15 |
+
connection_count: int
|
| 16 |
+
last_activity: Optional[float] = None
|
| 17 |
+
|
| 18 |
+
class UserConnection(BaseModel):
|
| 19 |
+
user_id: str
|
| 20 |
+
connection_count: int
|
| 21 |
+
|
| 22 |
+
class AllConnectionsStatus(BaseModel):
|
| 23 |
+
total_users: int
|
| 24 |
+
total_connections: int
|
| 25 |
+
users: List[UserConnection]
|
| 26 |
+
|
| 27 |
+
# Khởi tạo router
|
| 28 |
+
router = APIRouter(
|
| 29 |
+
prefix="/ws",
|
| 30 |
+
tags=["WebSockets"],
|
| 31 |
+
)
|
| 32 |
+
|
| 33 |
+
class ConnectionManager:
|
| 34 |
+
"""Quản lý các kết nối WebSocket"""
|
| 35 |
+
|
| 36 |
+
def __init__(self):
|
| 37 |
+
# Lưu trữ các kết nối theo user_id
|
| 38 |
+
self.active_connections: Dict[str, List[WebSocket]] = {}
|
| 39 |
+
|
| 40 |
+
async def connect(self, websocket: WebSocket, user_id: str):
|
| 41 |
+
"""Kết nối một WebSocket mới"""
|
| 42 |
+
await websocket.accept()
|
| 43 |
+
if user_id not in self.active_connections:
|
| 44 |
+
self.active_connections[user_id] = []
|
| 45 |
+
self.active_connections[user_id].append(websocket)
|
| 46 |
+
logger.info(f"New WebSocket connection for user {user_id}. Total connections: {len(self.active_connections[user_id])}")
|
| 47 |
+
|
| 48 |
+
def disconnect(self, websocket: WebSocket, user_id: str):
|
| 49 |
+
"""Ngắt kết nối WebSocket"""
|
| 50 |
+
if user_id in self.active_connections:
|
| 51 |
+
if websocket in self.active_connections[user_id]:
|
| 52 |
+
self.active_connections[user_id].remove(websocket)
|
| 53 |
+
# Xóa user_id khỏi dict nếu không còn kết nối nào
|
| 54 |
+
if not self.active_connections[user_id]:
|
| 55 |
+
del self.active_connections[user_id]
|
| 56 |
+
logger.info(f"WebSocket disconnected for user {user_id}")
|
| 57 |
+
|
| 58 |
+
async def send_message(self, message: Dict[str, Any], user_id: str):
|
| 59 |
+
"""Gửi tin nhắn tới tất cả kết nối của một user"""
|
| 60 |
+
if user_id in self.active_connections:
|
| 61 |
+
disconnected_websockets = []
|
| 62 |
+
for websocket in self.active_connections[user_id]:
|
| 63 |
+
try:
|
| 64 |
+
await websocket.send_text(json.dumps(message))
|
| 65 |
+
except Exception as e:
|
| 66 |
+
logger.error(f"Error sending message to WebSocket: {str(e)}")
|
| 67 |
+
disconnected_websockets.append(websocket)
|
| 68 |
+
|
| 69 |
+
# Xóa các kết nối bị ngắt
|
| 70 |
+
for websocket in disconnected_websockets:
|
| 71 |
+
self.disconnect(websocket, user_id)
|
| 72 |
+
|
| 73 |
+
def get_connection_status(self, user_id: str = None) -> Dict[str, Any]:
|
| 74 |
+
"""Lấy thông tin về trạng thái kết nối WebSocket"""
|
| 75 |
+
if user_id:
|
| 76 |
+
# Trả về thông tin kết nối cho user cụ thể
|
| 77 |
+
if user_id in self.active_connections:
|
| 78 |
+
return {
|
| 79 |
+
"user_id": user_id,
|
| 80 |
+
"active": True,
|
| 81 |
+
"connection_count": len(self.active_connections[user_id]),
|
| 82 |
+
"last_activity": time.time()
|
| 83 |
+
}
|
| 84 |
+
else:
|
| 85 |
+
return {
|
| 86 |
+
"user_id": user_id,
|
| 87 |
+
"active": False,
|
| 88 |
+
"connection_count": 0,
|
| 89 |
+
"last_activity": None
|
| 90 |
+
}
|
| 91 |
+
else:
|
| 92 |
+
# Trả về thông tin tất cả kết nối
|
| 93 |
+
result = {
|
| 94 |
+
"total_users": len(self.active_connections),
|
| 95 |
+
"total_connections": sum(len(connections) for connections in self.active_connections.values()),
|
| 96 |
+
"users": []
|
| 97 |
+
}
|
| 98 |
+
|
| 99 |
+
for uid, connections in self.active_connections.items():
|
| 100 |
+
result["users"].append({
|
| 101 |
+
"user_id": uid,
|
| 102 |
+
"connection_count": len(connections)
|
| 103 |
+
})
|
| 104 |
+
|
| 105 |
+
return result
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
# Tạo instance của ConnectionManager
|
| 109 |
+
manager = ConnectionManager()
|
| 110 |
+
|
| 111 |
+
@router.websocket("/pdf/{user_id}")
|
| 112 |
+
async def websocket_endpoint(websocket: WebSocket, user_id: str):
|
| 113 |
+
"""Endpoint WebSocket để cập nhật tiến trình xử lý PDF"""
|
| 114 |
+
await manager.connect(websocket, user_id)
|
| 115 |
+
try:
|
| 116 |
+
while True:
|
| 117 |
+
# Đợi tin nhắn từ client (chỉ để giữ kết nối)
|
| 118 |
+
await websocket.receive_text()
|
| 119 |
+
except WebSocketDisconnect:
|
| 120 |
+
manager.disconnect(websocket, user_id)
|
| 121 |
+
except Exception as e:
|
| 122 |
+
logger.error(f"WebSocket error: {str(e)}")
|
| 123 |
+
manager.disconnect(websocket, user_id)
|
| 124 |
+
|
| 125 |
+
# API endpoints để kiểm tra trạng thái WebSocket
|
| 126 |
+
@router.get("/status", response_model=AllConnectionsStatus, responses={
|
| 127 |
+
200: {
|
| 128 |
+
"description": "Successful response",
|
| 129 |
+
"content": {
|
| 130 |
+
"application/json": {
|
| 131 |
+
"example": {
|
| 132 |
+
"total_users": 2,
|
| 133 |
+
"total_connections": 3,
|
| 134 |
+
"users": [
|
| 135 |
+
{"user_id": "user1", "connection_count": 2},
|
| 136 |
+
{"user_id": "user2", "connection_count": 1}
|
| 137 |
+
]
|
| 138 |
+
}
|
| 139 |
+
}
|
| 140 |
+
}
|
| 141 |
+
}
|
| 142 |
+
})
|
| 143 |
+
async def get_all_websocket_connections():
|
| 144 |
+
"""
|
| 145 |
+
Lấy thông tin về tất cả kết nối WebSocket hiện tại.
|
| 146 |
+
|
| 147 |
+
Endpoint này trả về:
|
| 148 |
+
- Tổng số người dùng đang kết nối
|
| 149 |
+
- Tổng số kết nối WebSocket
|
| 150 |
+
- Danh sách người dùng kèm theo số lượng kết nối của mỗi người
|
| 151 |
+
"""
|
| 152 |
+
return manager.get_connection_status()
|
| 153 |
+
|
| 154 |
+
@router.get("/status/{user_id}", response_model=ConnectionStatus, responses={
|
| 155 |
+
200: {
|
| 156 |
+
"description": "Successful response for active connection",
|
| 157 |
+
"content": {
|
| 158 |
+
"application/json": {
|
| 159 |
+
"examples": {
|
| 160 |
+
"active_connection": {
|
| 161 |
+
"summary": "Active connection",
|
| 162 |
+
"value": {
|
| 163 |
+
"user_id": "user123",
|
| 164 |
+
"active": True,
|
| 165 |
+
"connection_count": 2,
|
| 166 |
+
"last_activity": 1634567890.123
|
| 167 |
+
}
|
| 168 |
+
},
|
| 169 |
+
"no_connection": {
|
| 170 |
+
"summary": "No active connection",
|
| 171 |
+
"value": {
|
| 172 |
+
"user_id": "user456",
|
| 173 |
+
"active": False,
|
| 174 |
+
"connection_count": 0,
|
| 175 |
+
"last_activity": None
|
| 176 |
+
}
|
| 177 |
+
}
|
| 178 |
+
}
|
| 179 |
+
}
|
| 180 |
+
}
|
| 181 |
+
}
|
| 182 |
+
})
|
| 183 |
+
async def get_user_websocket_status(user_id: str):
|
| 184 |
+
"""
|
| 185 |
+
Lấy thông tin về kết nối WebSocket của một người dùng cụ thể.
|
| 186 |
+
|
| 187 |
+
Parameters:
|
| 188 |
+
- **user_id**: ID của người dùng cần kiểm tra
|
| 189 |
+
|
| 190 |
+
Returns:
|
| 191 |
+
- Thông tin về trạng thái kết nối, bao gồm:
|
| 192 |
+
- active: Có đang kết nối hay không
|
| 193 |
+
- connection_count: Số lượng kết nối hiện tại
|
| 194 |
+
- last_activity: Thời gian hoạt động gần nhất
|
| 195 |
+
"""
|
| 196 |
+
return manager.get_connection_status(user_id)
|
| 197 |
+
|
| 198 |
+
# Các hàm gửi thông báo cập nhật trạng thái
|
| 199 |
+
|
| 200 |
+
async def send_pdf_upload_started(user_id: str, filename: str, document_id: str):
|
| 201 |
+
"""Gửi thông báo bắt đầu upload PDF"""
|
| 202 |
+
await manager.send_message({
|
| 203 |
+
"type": "pdf_upload_started",
|
| 204 |
+
"document_id": document_id,
|
| 205 |
+
"filename": filename,
|
| 206 |
+
"timestamp": int(time.time())
|
| 207 |
+
}, user_id)
|
| 208 |
+
|
| 209 |
+
async def send_pdf_upload_progress(user_id: str, document_id: str, step: str, progress: float, message: str):
|
| 210 |
+
"""Gửi thông báo tiến độ upload PDF"""
|
| 211 |
+
await manager.send_message({
|
| 212 |
+
"type": "pdf_upload_progress",
|
| 213 |
+
"document_id": document_id,
|
| 214 |
+
"step": step,
|
| 215 |
+
"progress": progress,
|
| 216 |
+
"message": message,
|
| 217 |
+
"timestamp": int(time.time())
|
| 218 |
+
}, user_id)
|
| 219 |
+
|
| 220 |
+
async def send_pdf_upload_completed(user_id: str, document_id: str, filename: str, chunks: int):
|
| 221 |
+
"""Gửi thông báo hoàn thành upload PDF"""
|
| 222 |
+
await manager.send_message({
|
| 223 |
+
"type": "pdf_upload_completed",
|
| 224 |
+
"document_id": document_id,
|
| 225 |
+
"filename": filename,
|
| 226 |
+
"chunks": chunks,
|
| 227 |
+
"timestamp": int(time.time())
|
| 228 |
+
}, user_id)
|
| 229 |
+
|
| 230 |
+
async def send_pdf_upload_failed(user_id: str, document_id: str, filename: str, error: str):
|
| 231 |
+
"""Gửi thông báo lỗi upload PDF"""
|
| 232 |
+
await manager.send_message({
|
| 233 |
+
"type": "pdf_upload_failed",
|
| 234 |
+
"document_id": document_id,
|
| 235 |
+
"filename": filename,
|
| 236 |
+
"error": error,
|
| 237 |
+
"timestamp": int(time.time())
|
| 238 |
+
}, user_id)
|
| 239 |
+
|
| 240 |
+
async def send_pdf_delete_started(user_id: str, namespace: str):
|
| 241 |
+
"""Gửi thông báo bắt đầu xóa PDF"""
|
| 242 |
+
await manager.send_message({
|
| 243 |
+
"type": "pdf_delete_started",
|
| 244 |
+
"namespace": namespace,
|
| 245 |
+
"timestamp": int(time.time())
|
| 246 |
+
}, user_id)
|
| 247 |
+
|
| 248 |
+
async def send_pdf_delete_completed(user_id: str, namespace: str):
|
| 249 |
+
"""Gửi thông báo hoàn thành xóa PDF"""
|
| 250 |
+
await manager.send_message({
|
| 251 |
+
"type": "pdf_delete_completed",
|
| 252 |
+
"namespace": namespace,
|
| 253 |
+
"timestamp": int(time.time())
|
| 254 |
+
}, user_id)
|
| 255 |
+
|
| 256 |
+
async def send_pdf_delete_failed(user_id: str, namespace: str, error: str):
|
| 257 |
+
"""Gửi thông báo lỗi xóa PDF"""
|
| 258 |
+
await manager.send_message({
|
| 259 |
+
"type": "pdf_delete_failed",
|
| 260 |
+
"namespace": namespace,
|
| 261 |
+
"error": error,
|
| 262 |
+
"timestamp": int(time.time())
|
| 263 |
+
}, user_id)
|
app/api/postgresql_routes.py
CHANGED
|
The diff for this file is too large to render.
See raw diff
|
|
|
app/api/rag_routes.py
CHANGED
|
@@ -11,9 +11,9 @@ import google.generativeai as genai
|
|
| 11 |
from datetime import datetime
|
| 12 |
from langchain.prompts import PromptTemplate
|
| 13 |
from langchain_google_genai import GoogleGenerativeAIEmbeddings
|
| 14 |
-
from app.utils.utils import
|
| 15 |
|
| 16 |
-
from app.database.mongodb import
|
| 17 |
from app.database.pinecone import (
|
| 18 |
search_vectors,
|
| 19 |
get_chain,
|
|
@@ -33,32 +33,6 @@ from app.models.rag_models import (
|
|
| 33 |
UserMessageModel
|
| 34 |
)
|
| 35 |
|
| 36 |
-
# Sử dụng bộ nhớ đệm thay vì Redis
|
| 37 |
-
class SimpleCache:
|
| 38 |
-
def __init__(self):
|
| 39 |
-
self.cache = {}
|
| 40 |
-
self.expiration = {}
|
| 41 |
-
|
| 42 |
-
async def get(self, key):
|
| 43 |
-
if key in self.cache:
|
| 44 |
-
# Kiểm tra xem cache đã hết hạn chưa
|
| 45 |
-
if key in self.expiration and self.expiration[key] > time.time():
|
| 46 |
-
return self.cache[key]
|
| 47 |
-
else:
|
| 48 |
-
# Xóa cache đã hết hạn
|
| 49 |
-
if key in self.cache:
|
| 50 |
-
del self.cache[key]
|
| 51 |
-
if key in self.expiration:
|
| 52 |
-
del self.expiration[key]
|
| 53 |
-
return None
|
| 54 |
-
|
| 55 |
-
async def set(self, key, value, ex=300): # Mặc định 5 phút
|
| 56 |
-
self.cache[key] = value
|
| 57 |
-
self.expiration[key] = time.time() + ex
|
| 58 |
-
|
| 59 |
-
# Khởi tạo SimpleCache
|
| 60 |
-
redis_client = SimpleCache()
|
| 61 |
-
|
| 62 |
# Configure logging
|
| 63 |
logger = logging.getLogger(__name__)
|
| 64 |
|
|
@@ -72,6 +46,29 @@ router = APIRouter(
|
|
| 72 |
tags=["RAG"],
|
| 73 |
)
|
| 74 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 75 |
# Create a prompt template with conversation history
|
| 76 |
prompt = PromptTemplate(
|
| 77 |
template = """Goal:
|
|
@@ -87,7 +84,7 @@ Warning:
|
|
| 87 |
Let's support users like a real tour guide, not a bot. The information in core knowledge is your own knowledge.
|
| 88 |
Your knowledge is provided in the Core Knowledge. All of information in Core Knowledge is about Da Nang, Vietnam.
|
| 89 |
You just care about current time that user mention when user ask about Solana event.
|
| 90 |
-
If you do not have enough information to answer user's question, please reply with "I
|
| 91 |
|
| 92 |
Core knowledge:
|
| 93 |
{context}
|
|
@@ -162,102 +159,18 @@ async def chat(request: ChatRequest, background_tasks: BackgroundTasks):
|
|
| 162 |
"""
|
| 163 |
start_time = time.time()
|
| 164 |
try:
|
| 165 |
-
# Create cache key for request
|
| 166 |
-
cache_key = f"rag_chat:{request.user_id}:{request.question}:{request.include_history}:{request.use_rag}:{request.similarity_top_k}:{request.limit_k}:{request.similarity_metric}:{request.similarity_threshold}"
|
| 167 |
-
|
| 168 |
-
# Check cache using redis_client instead of cache
|
| 169 |
-
cached_response = await redis_client.get(cache_key)
|
| 170 |
-
if cached_response is not None:
|
| 171 |
-
logger.info(f"Cache hit for RAG chat request from user {request.user_id}")
|
| 172 |
-
try:
|
| 173 |
-
# If cached_response is string (JSON), parse it
|
| 174 |
-
if isinstance(cached_response, str):
|
| 175 |
-
cached_data = json.loads(cached_response)
|
| 176 |
-
return ChatResponse(
|
| 177 |
-
answer=cached_data.get("answer", ""),
|
| 178 |
-
processing_time=cached_data.get("processing_time", 0.0)
|
| 179 |
-
)
|
| 180 |
-
# If cached_response is object with sources, extract answer and processing_time
|
| 181 |
-
elif hasattr(cached_response, 'sources'):
|
| 182 |
-
return ChatResponse(
|
| 183 |
-
answer=cached_response.answer,
|
| 184 |
-
processing_time=cached_response.processing_time
|
| 185 |
-
)
|
| 186 |
-
# Otherwise, return cached response as is
|
| 187 |
-
return cached_response
|
| 188 |
-
except Exception as e:
|
| 189 |
-
logger.error(f"Error parsing cached response: {e}")
|
| 190 |
-
# Continue processing if cache parsing fails
|
| 191 |
-
|
| 192 |
# Save user message first (so it's available for user history)
|
| 193 |
session_id = request.session_id or f"{request.user_id}_{datetime.now().strftime('%Y-%m-%d_%H:%M:%S')}"
|
| 194 |
-
logger.info(f"Processing chat request for user {request.user_id}, session {session_id}")
|
| 195 |
-
|
| 196 |
-
|
| 197 |
-
|
| 198 |
-
|
| 199 |
-
|
| 200 |
-
|
| 201 |
-
|
| 202 |
-
|
| 203 |
-
|
| 204 |
-
last_name=getattr(request, 'last_name', ""),
|
| 205 |
-
message=request.question,
|
| 206 |
-
user_id=request.user_id,
|
| 207 |
-
username=getattr(request, 'username', ""),
|
| 208 |
-
response=None # No response yet
|
| 209 |
-
)
|
| 210 |
-
logger.info(f"User message saved for session {session_id}")
|
| 211 |
-
except Exception as e:
|
| 212 |
-
logger.error(f"Error saving user message to session: {e}")
|
| 213 |
-
# Continue processing even if saving fails
|
| 214 |
-
|
| 215 |
-
# Use the RAG pipeline
|
| 216 |
-
if request.use_rag:
|
| 217 |
-
# Get the retriever with custom parameters
|
| 218 |
-
retriever = get_chain(
|
| 219 |
-
top_k=request.similarity_top_k,
|
| 220 |
-
limit_k=request.limit_k,
|
| 221 |
-
similarity_metric=request.similarity_metric,
|
| 222 |
-
similarity_threshold=request.similarity_threshold
|
| 223 |
-
)
|
| 224 |
-
if not retriever:
|
| 225 |
-
raise HTTPException(status_code=500, detail="Failed to initialize retriever")
|
| 226 |
-
|
| 227 |
-
# Get request history for context
|
| 228 |
-
context_query = get_request_history(request.user_id) if request.include_history else request.question
|
| 229 |
-
logger.info(f"Using context query for retrieval: {context_query[:100]}...")
|
| 230 |
-
|
| 231 |
-
# Retrieve relevant documents
|
| 232 |
-
retrieved_docs = retriever.invoke(context_query)
|
| 233 |
-
context = "\n".join([doc.page_content for doc in retrieved_docs])
|
| 234 |
-
|
| 235 |
-
# Prepare sources
|
| 236 |
-
sources = []
|
| 237 |
-
for doc in retrieved_docs:
|
| 238 |
-
source = None
|
| 239 |
-
metadata = {}
|
| 240 |
-
|
| 241 |
-
if hasattr(doc, 'metadata'):
|
| 242 |
-
source = doc.metadata.get('source', None)
|
| 243 |
-
# Extract score information
|
| 244 |
-
score = doc.metadata.get('score', None)
|
| 245 |
-
normalized_score = doc.metadata.get('normalized_score', None)
|
| 246 |
-
# Remove score info from metadata to avoid duplication
|
| 247 |
-
metadata = {k: v for k, v in doc.metadata.items()
|
| 248 |
-
if k not in ['text', 'source', 'score', 'normalized_score']}
|
| 249 |
-
|
| 250 |
-
sources.append(SourceDocument(
|
| 251 |
-
text=doc.page_content,
|
| 252 |
-
source=source,
|
| 253 |
-
score=score,
|
| 254 |
-
normalized_score=normalized_score,
|
| 255 |
-
metadata=metadata
|
| 256 |
-
))
|
| 257 |
-
else:
|
| 258 |
-
# No RAG
|
| 259 |
-
context = ""
|
| 260 |
-
sources = None
|
| 261 |
|
| 262 |
# Get chat history
|
| 263 |
chat_history = get_chat_history(request.user_id) if request.include_history else ""
|
|
@@ -295,11 +208,50 @@ async def chat(request: ChatRequest, background_tasks: BackgroundTasks):
|
|
| 295 |
generation_config=generation_config,
|
| 296 |
safety_settings=safety_settings
|
| 297 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 298 |
|
| 299 |
# Generate the prompt using template
|
| 300 |
prompt_text = prompt.format(
|
| 301 |
context=context,
|
| 302 |
-
question=
|
| 303 |
chat_history=chat_history
|
| 304 |
)
|
| 305 |
logger.info(f"Full prompt with history and context: {prompt_text}")
|
|
@@ -308,59 +260,11 @@ async def chat(request: ChatRequest, background_tasks: BackgroundTasks):
|
|
| 308 |
response = model.generate_content(prompt_text)
|
| 309 |
answer = response.text
|
| 310 |
|
| 311 |
-
# Save the RAG response
|
| 312 |
-
try:
|
| 313 |
-
# Now save the RAG response with the same session_id
|
| 314 |
-
save_session(
|
| 315 |
-
session_id=session_id,
|
| 316 |
-
factor="rag",
|
| 317 |
-
action="RAG_response",
|
| 318 |
-
first_name=getattr(request, 'first_name', "User"),
|
| 319 |
-
last_name=getattr(request, 'last_name', ""),
|
| 320 |
-
message=request.question,
|
| 321 |
-
user_id=request.user_id,
|
| 322 |
-
username=getattr(request, 'username', ""),
|
| 323 |
-
response=answer
|
| 324 |
-
)
|
| 325 |
-
logger.info(f"RAG response saved for session {session_id}")
|
| 326 |
-
|
| 327 |
-
# Check if the response starts with "I don't know" and trigger notification
|
| 328 |
-
if answer.strip().lower().startswith("i don't know"):
|
| 329 |
-
from app.api.websocket_routes import send_notification
|
| 330 |
-
notification_data = {
|
| 331 |
-
"session_id": session_id,
|
| 332 |
-
"factor": "rag",
|
| 333 |
-
"action": "RAG_response",
|
| 334 |
-
"message": request.question,
|
| 335 |
-
"user_id": request.user_id,
|
| 336 |
-
"username": getattr(request, 'username', ""),
|
| 337 |
-
"first_name": getattr(request, 'first_name', "User"),
|
| 338 |
-
"last_name": getattr(request, 'last_name', ""),
|
| 339 |
-
"response": answer,
|
| 340 |
-
"created_at": datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
| 341 |
-
}
|
| 342 |
-
background_tasks.add_task(send_notification, notification_data)
|
| 343 |
-
logger.info(f"Notification queued for session {session_id} - response starts with 'I don't know'")
|
| 344 |
-
except Exception as e:
|
| 345 |
-
logger.error(f"Error saving RAG response to session: {e}")
|
| 346 |
-
# Continue processing even if saving fails
|
| 347 |
-
|
| 348 |
# Calculate processing time
|
| 349 |
processing_time = time.time() - start_time
|
| 350 |
|
| 351 |
-
# Create internal response object with sources for logging
|
| 352 |
-
internal_response = ChatResponseInternal(
|
| 353 |
-
answer=answer,
|
| 354 |
-
sources=sources,
|
| 355 |
-
processing_time=processing_time
|
| 356 |
-
)
|
| 357 |
-
|
| 358 |
# Log full response with sources
|
| 359 |
-
logger.info(f"Generated response for user {request.user_id}: {answer}")
|
| 360 |
-
if sources:
|
| 361 |
-
logger.info(f"Sources used: {len(sources)} documents")
|
| 362 |
-
for i, source in enumerate(sources):
|
| 363 |
-
logger.info(f"Source {i+1}: {source.source or 'Unknown'} (score: {source.score})")
|
| 364 |
|
| 365 |
# Create response object for API (without sources)
|
| 366 |
chat_response = ChatResponse(
|
|
@@ -368,18 +272,6 @@ async def chat(request: ChatRequest, background_tasks: BackgroundTasks):
|
|
| 368 |
processing_time=processing_time
|
| 369 |
)
|
| 370 |
|
| 371 |
-
# Cache result using redis_client instead of cache
|
| 372 |
-
try:
|
| 373 |
-
# Convert to JSON to ensure it can be cached
|
| 374 |
-
cache_data = {
|
| 375 |
-
"answer": answer,
|
| 376 |
-
"processing_time": processing_time
|
| 377 |
-
}
|
| 378 |
-
await redis_client.set(cache_key, json.dumps(cache_data), ex=300)
|
| 379 |
-
except Exception as e:
|
| 380 |
-
logger.error(f"Error caching response: {e}")
|
| 381 |
-
# Continue even if caching fails
|
| 382 |
-
|
| 383 |
# Return response
|
| 384 |
return chat_response
|
| 385 |
except Exception as e:
|
|
@@ -443,96 +335,4 @@ async def health_check():
|
|
| 443 |
"services": services,
|
| 444 |
"retrieval_config": retrieval_config,
|
| 445 |
"timestamp": datetime.now().isoformat()
|
| 446 |
-
}
|
| 447 |
-
|
| 448 |
-
@router.post("/rag")
|
| 449 |
-
async def process_rag(request: Request, user_data: UserMessageModel, background_tasks: BackgroundTasks):
|
| 450 |
-
"""
|
| 451 |
-
Process a user message through the RAG pipeline and return a response.
|
| 452 |
-
|
| 453 |
-
Parameters:
|
| 454 |
-
- **user_id**: User ID from the client application
|
| 455 |
-
- **session_id**: Session ID for tracking the conversation
|
| 456 |
-
- **message**: User's message/question
|
| 457 |
-
- **similarity_top_k**: (Optional) Number of top similar documents to return after filtering
|
| 458 |
-
- **limit_k**: (Optional) Maximum number of documents to retrieve from vector store
|
| 459 |
-
- **similarity_metric**: (Optional) Similarity metric to use (cosine, dotproduct, euclidean)
|
| 460 |
-
- **similarity_threshold**: (Optional) Threshold for vector similarity (0-1)
|
| 461 |
-
"""
|
| 462 |
-
try:
|
| 463 |
-
# Extract request data
|
| 464 |
-
user_id = user_data.user_id
|
| 465 |
-
session_id = user_data.session_id
|
| 466 |
-
message = user_data.message
|
| 467 |
-
|
| 468 |
-
# Extract retrieval parameters (use defaults if not provided)
|
| 469 |
-
top_k = user_data.similarity_top_k or DEFAULT_TOP_K
|
| 470 |
-
limit_k = user_data.limit_k or DEFAULT_LIMIT_K
|
| 471 |
-
similarity_metric = user_data.similarity_metric or DEFAULT_SIMILARITY_METRIC
|
| 472 |
-
similarity_threshold = user_data.similarity_threshold or DEFAULT_SIMILARITY_THRESHOLD
|
| 473 |
-
|
| 474 |
-
logger.info(f"RAG request received for user_id={user_id}, session_id={session_id}")
|
| 475 |
-
logger.info(f"Message: {message[:100]}..." if len(message) > 100 else f"Message: {message}")
|
| 476 |
-
logger.info(f"Retrieval parameters: top_k={top_k}, limit_k={limit_k}, metric={similarity_metric}, threshold={similarity_threshold}")
|
| 477 |
-
|
| 478 |
-
# Create a cache key for this request to avoid reprocessing identical questions
|
| 479 |
-
cache_key = f"rag_{user_id}_{session_id}_{hashlib.md5(message.encode()).hexdigest()}_{top_k}_{limit_k}_{similarity_metric}_{similarity_threshold}"
|
| 480 |
-
|
| 481 |
-
# Check if we have this response cached
|
| 482 |
-
cached_result = await redis_client.get(cache_key)
|
| 483 |
-
if cached_result:
|
| 484 |
-
logger.info(f"Cache hit for key: {cache_key}")
|
| 485 |
-
if isinstance(cached_result, str): # If stored as JSON string
|
| 486 |
-
return json.loads(cached_result)
|
| 487 |
-
return cached_result
|
| 488 |
-
|
| 489 |
-
# Save user message to MongoDB
|
| 490 |
-
try:
|
| 491 |
-
# Save user's question
|
| 492 |
-
save_session(
|
| 493 |
-
session_id=session_id,
|
| 494 |
-
factor="user",
|
| 495 |
-
action="asking_freely",
|
| 496 |
-
first_name="User", # You can update this with actual data if available
|
| 497 |
-
last_name="",
|
| 498 |
-
message=message,
|
| 499 |
-
user_id=user_id,
|
| 500 |
-
username="",
|
| 501 |
-
response=None # No response yet
|
| 502 |
-
)
|
| 503 |
-
logger.info(f"User message saved to MongoDB with session_id: {session_id}")
|
| 504 |
-
except Exception as e:
|
| 505 |
-
logger.error(f"Error saving user message: {e}")
|
| 506 |
-
# Continue anyway to try to get a response
|
| 507 |
-
|
| 508 |
-
# Create a ChatRequest object to reuse the existing chat endpoint
|
| 509 |
-
chat_request = ChatRequest(
|
| 510 |
-
user_id=user_id,
|
| 511 |
-
question=message,
|
| 512 |
-
include_history=True,
|
| 513 |
-
use_rag=True,
|
| 514 |
-
similarity_top_k=top_k,
|
| 515 |
-
limit_k=limit_k,
|
| 516 |
-
similarity_metric=similarity_metric,
|
| 517 |
-
similarity_threshold=similarity_threshold,
|
| 518 |
-
session_id=session_id
|
| 519 |
-
)
|
| 520 |
-
|
| 521 |
-
# Process through the chat endpoint
|
| 522 |
-
response = await chat(chat_request, background_tasks)
|
| 523 |
-
|
| 524 |
-
# Cache the response
|
| 525 |
-
try:
|
| 526 |
-
await redis_client.set(cache_key, json.dumps({
|
| 527 |
-
"answer": response.answer,
|
| 528 |
-
"processing_time": response.processing_time
|
| 529 |
-
}))
|
| 530 |
-
logger.info(f"Cached response for key: {cache_key}")
|
| 531 |
-
except Exception as e:
|
| 532 |
-
logger.error(f"Failed to cache response: {e}")
|
| 533 |
-
|
| 534 |
-
return response
|
| 535 |
-
except Exception as e:
|
| 536 |
-
logger.error(f"Error processing RAG request: {e}")
|
| 537 |
-
logger.error(traceback.format_exc())
|
| 538 |
-
raise HTTPException(status_code=500, detail=f"Error processing request: {str(e)}")
|
|
|
|
| 11 |
from datetime import datetime
|
| 12 |
from langchain.prompts import PromptTemplate
|
| 13 |
from langchain_google_genai import GoogleGenerativeAIEmbeddings
|
| 14 |
+
from app.utils.utils import timer_decorator
|
| 15 |
|
| 16 |
+
from app.database.mongodb import get_chat_history, get_request_history, session_collection
|
| 17 |
from app.database.pinecone import (
|
| 18 |
search_vectors,
|
| 19 |
get_chain,
|
|
|
|
| 33 |
UserMessageModel
|
| 34 |
)
|
| 35 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 36 |
# Configure logging
|
| 37 |
logger = logging.getLogger(__name__)
|
| 38 |
|
|
|
|
| 46 |
tags=["RAG"],
|
| 47 |
)
|
| 48 |
|
| 49 |
+
fix_request = PromptTemplate(
|
| 50 |
+
template = """Goal:
|
| 51 |
+
Your task is fixing user'srequest to get all information of history chat.
|
| 52 |
+
You will received a conversation history and current request of user.
|
| 53 |
+
Generate a new request that make sense if current request related to history conversation.
|
| 54 |
+
|
| 55 |
+
Return Format:
|
| 56 |
+
Only return the fully users' request with all the important keywords.
|
| 57 |
+
If the current message is NOT related to the conversation history or there is no chat history: Return user's current request.
|
| 58 |
+
If the current message IS related to the conversation history: Return new request based on information from the conversation history and the current request.
|
| 59 |
+
|
| 60 |
+
Warning:
|
| 61 |
+
Only use history chat if current request is truly relevant to the previous conversation.
|
| 62 |
+
|
| 63 |
+
Conversation History:
|
| 64 |
+
{chat_history}
|
| 65 |
+
|
| 66 |
+
User current message:
|
| 67 |
+
{question}
|
| 68 |
+
""",
|
| 69 |
+
input_variables = ["chat_history", "question"],
|
| 70 |
+
)
|
| 71 |
+
|
| 72 |
# Create a prompt template with conversation history
|
| 73 |
prompt = PromptTemplate(
|
| 74 |
template = """Goal:
|
|
|
|
| 84 |
Let's support users like a real tour guide, not a bot. The information in core knowledge is your own knowledge.
|
| 85 |
Your knowledge is provided in the Core Knowledge. All of information in Core Knowledge is about Da Nang, Vietnam.
|
| 86 |
You just care about current time that user mention when user ask about Solana event.
|
| 87 |
+
Only use core knowledge to answer. If you do not have enough information to answer user's question, please reply with "I'm sorry. I don't have information about that" and Give users some more options to ask.
|
| 88 |
|
| 89 |
Core knowledge:
|
| 90 |
{context}
|
|
|
|
| 159 |
"""
|
| 160 |
start_time = time.time()
|
| 161 |
try:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 162 |
# Save user message first (so it's available for user history)
|
| 163 |
session_id = request.session_id or f"{request.user_id}_{datetime.now().strftime('%Y-%m-%d_%H:%M:%S')}"
|
| 164 |
+
# logger.info(f"Processing chat request for user {request.user_id}, session {session_id}")
|
| 165 |
+
|
| 166 |
+
retriever = get_chain(
|
| 167 |
+
top_k=request.similarity_top_k,
|
| 168 |
+
limit_k=request.limit_k,
|
| 169 |
+
similarity_metric=request.similarity_metric,
|
| 170 |
+
similarity_threshold=request.similarity_threshold
|
| 171 |
+
)
|
| 172 |
+
if not retriever:
|
| 173 |
+
raise HTTPException(status_code=500, detail="Failed to initialize retriever")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 174 |
|
| 175 |
# Get chat history
|
| 176 |
chat_history = get_chat_history(request.user_id) if request.include_history else ""
|
|
|
|
| 208 |
generation_config=generation_config,
|
| 209 |
safety_settings=safety_settings
|
| 210 |
)
|
| 211 |
+
|
| 212 |
+
prompt_request = fix_request.format(
|
| 213 |
+
question=request.question,
|
| 214 |
+
chat_history=chat_history
|
| 215 |
+
)
|
| 216 |
+
|
| 217 |
+
# Log thời gian bắt đầu final_request
|
| 218 |
+
final_request_start_time = time.time()
|
| 219 |
+
final_request = model.generate_content(prompt_request)
|
| 220 |
+
# Log thời gian hoàn thành final_request
|
| 221 |
+
logger.info(f"Fixed Request: {final_request.text}")
|
| 222 |
+
logger.info(f"Final request generation time: {time.time() - final_request_start_time:.2f} seconds")
|
| 223 |
+
# print(final_request.text)
|
| 224 |
+
|
| 225 |
+
retrieved_docs = retriever.invoke(final_request.text)
|
| 226 |
+
logger.info(f"Retrieve: {retrieved_docs}")
|
| 227 |
+
context = "\n".join([doc.page_content for doc in retrieved_docs])
|
| 228 |
+
|
| 229 |
+
sources = []
|
| 230 |
+
for doc in retrieved_docs:
|
| 231 |
+
source = None
|
| 232 |
+
metadata = {}
|
| 233 |
+
|
| 234 |
+
if hasattr(doc, 'metadata'):
|
| 235 |
+
source = doc.metadata.get('source', None)
|
| 236 |
+
# Extract score information
|
| 237 |
+
score = doc.metadata.get('score', None)
|
| 238 |
+
normalized_score = doc.metadata.get('normalized_score', None)
|
| 239 |
+
# Remove score info from metadata to avoid duplication
|
| 240 |
+
metadata = {k: v for k, v in doc.metadata.items()
|
| 241 |
+
if k not in ['text', 'source', 'score', 'normalized_score']}
|
| 242 |
+
|
| 243 |
+
sources.append(SourceDocument(
|
| 244 |
+
text=doc.page_content,
|
| 245 |
+
source=source,
|
| 246 |
+
score=score,
|
| 247 |
+
normalized_score=normalized_score,
|
| 248 |
+
metadata=metadata
|
| 249 |
+
))
|
| 250 |
|
| 251 |
# Generate the prompt using template
|
| 252 |
prompt_text = prompt.format(
|
| 253 |
context=context,
|
| 254 |
+
question=final_request.text,
|
| 255 |
chat_history=chat_history
|
| 256 |
)
|
| 257 |
logger.info(f"Full prompt with history and context: {prompt_text}")
|
|
|
|
| 260 |
response = model.generate_content(prompt_text)
|
| 261 |
answer = response.text
|
| 262 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 263 |
# Calculate processing time
|
| 264 |
processing_time = time.time() - start_time
|
| 265 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 266 |
# Log full response with sources
|
| 267 |
+
# logger.info(f"Generated response for user {request.user_id}: {answer}")
|
|
|
|
|
|
|
|
|
|
|
|
|
| 268 |
|
| 269 |
# Create response object for API (without sources)
|
| 270 |
chat_response = ChatResponse(
|
|
|
|
| 272 |
processing_time=processing_time
|
| 273 |
)
|
| 274 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 275 |
# Return response
|
| 276 |
return chat_response
|
| 277 |
except Exception as e:
|
|
|
|
| 335 |
"services": services,
|
| 336 |
"retrieval_config": retrieval_config,
|
| 337 |
"timestamp": datetime.now().isoformat()
|
| 338 |
+
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
app/database/models.py
CHANGED
|
@@ -25,6 +25,8 @@ class EmergencyItem(Base):
|
|
| 25 |
location = Column(String, nullable=True) # Will be converted to/from PostGIS POINT type
|
| 26 |
priority = Column(Integer, default=0)
|
| 27 |
is_active = Column(Boolean, default=True)
|
|
|
|
|
|
|
| 28 |
created_at = Column(DateTime, server_default=func.now())
|
| 29 |
updated_at = Column(DateTime, server_default=func.now(), onupdate=func.now())
|
| 30 |
|
|
@@ -39,11 +41,36 @@ class EventItem(Base):
|
|
| 39 |
date_start = Column(DateTime, nullable=False)
|
| 40 |
date_end = Column(DateTime, nullable=True)
|
| 41 |
price = Column(JSON, nullable=True)
|
|
|
|
| 42 |
is_active = Column(Boolean, default=True)
|
| 43 |
featured = Column(Boolean, default=False)
|
| 44 |
created_at = Column(DateTime, server_default=func.now())
|
| 45 |
updated_at = Column(DateTime, server_default=func.now(), onupdate=func.now())
|
| 46 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 47 |
class VectorDatabase(Base):
|
| 48 |
__tablename__ = "vector_database"
|
| 49 |
|
|
|
|
| 25 |
location = Column(String, nullable=True) # Will be converted to/from PostGIS POINT type
|
| 26 |
priority = Column(Integer, default=0)
|
| 27 |
is_active = Column(Boolean, default=True)
|
| 28 |
+
section = Column(String, nullable=True) # Section field (16.1, 16.2.1, 16.2.2, 16.3)
|
| 29 |
+
section_id = Column(Integer, nullable=True) # Numeric identifier for section
|
| 30 |
created_at = Column(DateTime, server_default=func.now())
|
| 31 |
updated_at = Column(DateTime, server_default=func.now(), onupdate=func.now())
|
| 32 |
|
|
|
|
| 41 |
date_start = Column(DateTime, nullable=False)
|
| 42 |
date_end = Column(DateTime, nullable=True)
|
| 43 |
price = Column(JSON, nullable=True)
|
| 44 |
+
url = Column(String, nullable=True)
|
| 45 |
is_active = Column(Boolean, default=True)
|
| 46 |
featured = Column(Boolean, default=False)
|
| 47 |
created_at = Column(DateTime, server_default=func.now())
|
| 48 |
updated_at = Column(DateTime, server_default=func.now(), onupdate=func.now())
|
| 49 |
|
| 50 |
+
class AboutPixity(Base):
|
| 51 |
+
__tablename__ = "about_pixity"
|
| 52 |
+
|
| 53 |
+
id = Column(Integer, primary_key=True, index=True)
|
| 54 |
+
content = Column(Text, nullable=False)
|
| 55 |
+
created_at = Column(DateTime, server_default=func.now())
|
| 56 |
+
updated_at = Column(DateTime, server_default=func.now(), onupdate=func.now())
|
| 57 |
+
|
| 58 |
+
class SolanaSummit(Base):
|
| 59 |
+
__tablename__ = "solana_summit"
|
| 60 |
+
|
| 61 |
+
id = Column(Integer, primary_key=True, index=True)
|
| 62 |
+
content = Column(Text, nullable=False)
|
| 63 |
+
created_at = Column(DateTime, server_default=func.now())
|
| 64 |
+
updated_at = Column(DateTime, server_default=func.now(), onupdate=func.now())
|
| 65 |
+
|
| 66 |
+
class DaNangBucketList(Base):
|
| 67 |
+
__tablename__ = "danang_bucket_list"
|
| 68 |
+
|
| 69 |
+
id = Column(Integer, primary_key=True, index=True)
|
| 70 |
+
content = Column(Text, nullable=False)
|
| 71 |
+
created_at = Column(DateTime, server_default=func.now())
|
| 72 |
+
updated_at = Column(DateTime, server_default=func.now(), onupdate=func.now())
|
| 73 |
+
|
| 74 |
class VectorDatabase(Base):
|
| 75 |
__tablename__ = "vector_database"
|
| 76 |
|
app/database/mongodb.py
CHANGED
|
@@ -20,6 +20,10 @@ COLLECTION_NAME = os.getenv("COLLECTION_NAME", "session_chat")
|
|
| 20 |
# Set timeout for MongoDB connection
|
| 21 |
MONGODB_TIMEOUT = int(os.getenv("MONGODB_TIMEOUT", "5000")) # 5 seconds by default
|
| 22 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 23 |
# Create MongoDB connection with timeout
|
| 24 |
try:
|
| 25 |
client = MongoClient(MONGODB_URL, serverSelectionTimeoutMS=MONGODB_TIMEOUT)
|
|
@@ -82,6 +86,7 @@ def save_session(session_id, factor, action, first_name, last_name, message, use
|
|
| 82 |
}
|
| 83 |
result = session_collection.insert_one(session_data)
|
| 84 |
logger.info(f"Session saved with ID: {result.inserted_id}")
|
|
|
|
| 85 |
return {
|
| 86 |
"acknowledged": result.acknowledged,
|
| 87 |
"inserted_id": str(result.inserted_id),
|
|
@@ -94,15 +99,18 @@ def save_session(session_id, factor, action, first_name, last_name, message, use
|
|
| 94 |
def update_session_response(session_id, response):
|
| 95 |
"""Update a session with response"""
|
| 96 |
try:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 97 |
result = session_collection.update_one(
|
| 98 |
{"session_id": session_id},
|
| 99 |
{"$set": {"response": response}}
|
| 100 |
)
|
| 101 |
|
| 102 |
-
if result.matched_count == 0:
|
| 103 |
-
logger.warning(f"No session found with ID: {session_id}")
|
| 104 |
-
return False
|
| 105 |
-
|
| 106 |
logger.info(f"Session {session_id} updated with response")
|
| 107 |
return True
|
| 108 |
except Exception as e:
|
|
@@ -112,80 +120,61 @@ def update_session_response(session_id, response):
|
|
| 112 |
def get_recent_sessions(user_id, action, n=3):
|
| 113 |
"""Get n most recent sessions for a specific user and action"""
|
| 114 |
try:
|
| 115 |
-
|
|
|
|
| 116 |
session_collection.find(
|
| 117 |
{"user_id": user_id, "action": action},
|
| 118 |
{"_id": 0, "message": 1, "response": 1}
|
| 119 |
).sort("created_at_datetime", -1).limit(n)
|
| 120 |
)
|
| 121 |
-
except Exception as e:
|
| 122 |
-
logger.error(f"Error getting recent sessions: {e}")
|
| 123 |
-
return []
|
| 124 |
-
|
| 125 |
-
def get_user_history(user_id, n=3):
|
| 126 |
-
"""Get user history for a specific user"""
|
| 127 |
-
try:
|
| 128 |
-
# Find all messages of this user
|
| 129 |
-
user_messages = list(
|
| 130 |
-
session_collection.find(
|
| 131 |
-
{
|
| 132 |
-
"user_id": user_id,
|
| 133 |
-
"message": {"$exists": True, "$ne": None},
|
| 134 |
-
# Include all user messages regardless of action type
|
| 135 |
-
}
|
| 136 |
-
).sort("created_at_datetime", -1).limit(n * 2) # Get more to ensure we have enough pairs
|
| 137 |
-
)
|
| 138 |
-
|
| 139 |
-
# Group messages by session_id to find pairs
|
| 140 |
-
session_dict = {}
|
| 141 |
-
for msg in user_messages:
|
| 142 |
-
session_id = msg.get("session_id")
|
| 143 |
-
if session_id not in session_dict:
|
| 144 |
-
session_dict[session_id] = {}
|
| 145 |
-
|
| 146 |
-
if msg.get("factor", "").lower() == "user":
|
| 147 |
-
session_dict[session_id]["question"] = msg.get("message", "")
|
| 148 |
-
session_dict[session_id]["timestamp"] = msg.get("created_at_datetime")
|
| 149 |
-
elif msg.get("factor", "").lower() == "rag":
|
| 150 |
-
session_dict[session_id]["answer"] = msg.get("response", "")
|
| 151 |
-
|
| 152 |
-
# Build history from complete pairs only (with both question and answer)
|
| 153 |
-
history = []
|
| 154 |
-
for session_id, data in session_dict.items():
|
| 155 |
-
if "question" in data and "answer" in data and data.get("answer"):
|
| 156 |
-
history.append({
|
| 157 |
-
"question": data["question"],
|
| 158 |
-
"answer": data["answer"]
|
| 159 |
-
})
|
| 160 |
-
|
| 161 |
-
# Sort by timestamp and limit to n
|
| 162 |
-
history = sorted(history, key=lambda x: x.get("timestamp", 0), reverse=True)[:n]
|
| 163 |
|
| 164 |
-
logger.
|
| 165 |
-
return
|
| 166 |
except Exception as e:
|
| 167 |
-
logger.error(f"Error getting
|
| 168 |
return []
|
| 169 |
|
| 170 |
-
|
| 171 |
-
|
| 172 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 173 |
try:
|
| 174 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 175 |
|
| 176 |
-
|
| 177 |
-
|
| 178 |
-
for
|
| 179 |
-
|
|
|
|
|
|
|
|
|
|
| 180 |
|
| 181 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 182 |
except Exception as e:
|
| 183 |
-
logger.error(f"
|
| 184 |
return ""
|
| 185 |
|
| 186 |
def get_request_history(user_id, n=3):
|
| 187 |
"""Get the most recent user requests to use as context for retrieval"""
|
| 188 |
try:
|
|
|
|
| 189 |
history = get_user_history(user_id, n)
|
| 190 |
|
| 191 |
# Just extract the questions for context
|
|
|
|
| 20 |
# Set timeout for MongoDB connection
|
| 21 |
MONGODB_TIMEOUT = int(os.getenv("MONGODB_TIMEOUT", "5000")) # 5 seconds by default
|
| 22 |
|
| 23 |
+
# Legacy cache settings - now only used for configuration purposes
|
| 24 |
+
HISTORY_CACHE_TTL = int(os.getenv("HISTORY_CACHE_TTL", "3600")) # 1 hour by default
|
| 25 |
+
HISTORY_QUEUE_SIZE = int(os.getenv("HISTORY_QUEUE_SIZE", "10")) # 10 items by default
|
| 26 |
+
|
| 27 |
# Create MongoDB connection with timeout
|
| 28 |
try:
|
| 29 |
client = MongoClient(MONGODB_URL, serverSelectionTimeoutMS=MONGODB_TIMEOUT)
|
|
|
|
| 86 |
}
|
| 87 |
result = session_collection.insert_one(session_data)
|
| 88 |
logger.info(f"Session saved with ID: {result.inserted_id}")
|
| 89 |
+
|
| 90 |
return {
|
| 91 |
"acknowledged": result.acknowledged,
|
| 92 |
"inserted_id": str(result.inserted_id),
|
|
|
|
| 99 |
def update_session_response(session_id, response):
|
| 100 |
"""Update a session with response"""
|
| 101 |
try:
|
| 102 |
+
# Lấy session hiện có
|
| 103 |
+
existing_session = session_collection.find_one({"session_id": session_id})
|
| 104 |
+
|
| 105 |
+
if not existing_session:
|
| 106 |
+
logger.warning(f"No session found with ID: {session_id}")
|
| 107 |
+
return False
|
| 108 |
+
|
| 109 |
result = session_collection.update_one(
|
| 110 |
{"session_id": session_id},
|
| 111 |
{"$set": {"response": response}}
|
| 112 |
)
|
| 113 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 114 |
logger.info(f"Session {session_id} updated with response")
|
| 115 |
return True
|
| 116 |
except Exception as e:
|
|
|
|
| 120 |
def get_recent_sessions(user_id, action, n=3):
|
| 121 |
"""Get n most recent sessions for a specific user and action"""
|
| 122 |
try:
|
| 123 |
+
# Truy vấn trực tiếp từ MongoDB
|
| 124 |
+
result = list(
|
| 125 |
session_collection.find(
|
| 126 |
{"user_id": user_id, "action": action},
|
| 127 |
{"_id": 0, "message": 1, "response": 1}
|
| 128 |
).sort("created_at_datetime", -1).limit(n)
|
| 129 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 130 |
|
| 131 |
+
logger.debug(f"Retrieved {len(result)} recent sessions for user {user_id}, action {action}")
|
| 132 |
+
return result
|
| 133 |
except Exception as e:
|
| 134 |
+
logger.error(f"Error getting recent sessions: {e}")
|
| 135 |
return []
|
| 136 |
|
| 137 |
+
def get_chat_history(user_id, n = 5) -> str:
|
| 138 |
+
"""
|
| 139 |
+
Lấy lịch sử chat cho user_id từ MongoDB và ghép thành chuỗi theo định dạng:
|
| 140 |
+
|
| 141 |
+
User: ...
|
| 142 |
+
Bot: ...
|
| 143 |
+
User: ...
|
| 144 |
+
Bot: ...
|
| 145 |
+
"""
|
| 146 |
try:
|
| 147 |
+
# Truy vấn các document có user_id, sắp xếp theo created_at tăng dần
|
| 148 |
+
# Get the 4 most recent documents first, then sort them in ascending order
|
| 149 |
+
docs = list(session_collection.find({"user_id": str(user_id)}).sort("created_at", -1).limit(n))
|
| 150 |
+
# Reverse the list to get chronological order (oldest to newest)
|
| 151 |
+
docs.reverse()
|
| 152 |
+
if not docs:
|
| 153 |
+
logger.info(f"Không tìm thấy dữ liệu cho user_id: {user_id}")
|
| 154 |
+
return ""
|
| 155 |
|
| 156 |
+
conversation_lines = []
|
| 157 |
+
# Xử lý từng document theo cấu trúc mới
|
| 158 |
+
for doc in docs:
|
| 159 |
+
factor = doc.get("factor", "").lower()
|
| 160 |
+
action = doc.get("action", "").lower()
|
| 161 |
+
message = doc.get("message", "")
|
| 162 |
+
response = doc.get("response", "")
|
| 163 |
|
| 164 |
+
if factor == "user" and action == "asking_freely":
|
| 165 |
+
conversation_lines.append(f"User: {message}")
|
| 166 |
+
conversation_lines.append(f"Bot: {response}")
|
| 167 |
+
|
| 168 |
+
# Ghép các dòng thành chuỗi
|
| 169 |
+
return "\n".join(conversation_lines)
|
| 170 |
except Exception as e:
|
| 171 |
+
logger.error(f"Lỗi khi lấy lịch sử chat cho user_id {user_id}: {e}")
|
| 172 |
return ""
|
| 173 |
|
| 174 |
def get_request_history(user_id, n=3):
|
| 175 |
"""Get the most recent user requests to use as context for retrieval"""
|
| 176 |
try:
|
| 177 |
+
# Lấy lịch sử trực tiếp từ MongoDB (thông qua get_user_history đã sửa đổi)
|
| 178 |
history = get_user_history(user_id, n)
|
| 179 |
|
| 180 |
# Just extract the questions for context
|
app/database/pinecone.py
CHANGED
|
@@ -6,7 +6,6 @@ from typing import Optional, List, Dict, Any, Union, Tuple
|
|
| 6 |
import time
|
| 7 |
from langchain_google_genai import GoogleGenerativeAIEmbeddings
|
| 8 |
import google.generativeai as genai
|
| 9 |
-
from app.utils.utils import cache
|
| 10 |
from langchain_core.retrievers import BaseRetriever
|
| 11 |
from langchain.callbacks.manager import Callbacks
|
| 12 |
from langchain_core.documents import Document
|
|
@@ -73,23 +72,39 @@ def init_pinecone():
|
|
| 73 |
if pc is None:
|
| 74 |
logger.info(f"Initializing Pinecone connection to index {PINECONE_INDEX_NAME}...")
|
| 75 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 76 |
# Initialize Pinecone client using the new API
|
| 77 |
pc = Pinecone(api_key=PINECONE_API_KEY)
|
| 78 |
|
| 79 |
-
|
| 80 |
-
|
| 81 |
-
|
| 82 |
-
|
| 83 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 84 |
return None
|
| 85 |
|
| 86 |
-
# Get existing index
|
| 87 |
-
index = pc.Index(PINECONE_INDEX_NAME)
|
| 88 |
-
logger.info(f"Pinecone connection established to index {PINECONE_INDEX_NAME}")
|
| 89 |
-
|
| 90 |
return index
|
|
|
|
|
|
|
|
|
|
| 91 |
except Exception as e:
|
| 92 |
-
logger.error(f"
|
| 93 |
return None
|
| 94 |
|
| 95 |
# Get Pinecone index singleton
|
|
@@ -184,7 +199,7 @@ async def search_vectors(
|
|
| 184 |
limit_k: int = DEFAULT_LIMIT_K,
|
| 185 |
similarity_metric: str = DEFAULT_SIMILARITY_METRIC,
|
| 186 |
similarity_threshold: float = DEFAULT_SIMILARITY_THRESHOLD,
|
| 187 |
-
namespace: str = "",
|
| 188 |
filter: Optional[Dict] = None
|
| 189 |
) -> Dict:
|
| 190 |
"""
|
|
@@ -211,23 +226,13 @@ async def search_vectors(
|
|
| 211 |
if limit_k < top_k:
|
| 212 |
logger.warning(f"limit_k ({limit_k}) must be greater than or equal to top_k ({top_k}). Setting limit_k to {top_k}")
|
| 213 |
limit_k = top_k
|
| 214 |
-
|
| 215 |
-
# Create cache key from parameters
|
| 216 |
-
vector_hash = hash(str(query_vector))
|
| 217 |
-
cache_key = f"pinecone_search:{vector_hash}:{limit_k}:{similarity_metric}:{similarity_threshold}:{namespace}:{filter}"
|
| 218 |
-
|
| 219 |
-
# Check cache first
|
| 220 |
-
cached_result = cache.get(cache_key)
|
| 221 |
-
if cached_result is not None:
|
| 222 |
-
logger.info("Returning cached Pinecone search results")
|
| 223 |
-
return cached_result
|
| 224 |
|
| 225 |
-
#
|
| 226 |
pinecone_index = get_pinecone_index()
|
| 227 |
if pinecone_index is None:
|
| 228 |
logger.error("Failed to get Pinecone index for search")
|
| 229 |
return None
|
| 230 |
-
|
| 231 |
# Query Pinecone with the provided metric and higher limit_k to allow for threshold filtering
|
| 232 |
results = pinecone_index.query(
|
| 233 |
vector=query_vector,
|
|
@@ -250,10 +255,7 @@ async def search_vectors(
|
|
| 250 |
|
| 251 |
# Log search result metrics
|
| 252 |
match_count = len(filtered_matches)
|
| 253 |
-
logger.info(f"Pinecone search returned {match_count} matches after threshold filtering (metric: {similarity_metric}, threshold: {similarity_threshold})")
|
| 254 |
-
|
| 255 |
-
# Store result in cache with 5 minute TTL
|
| 256 |
-
cache.set(cache_key, results, ttl=300)
|
| 257 |
|
| 258 |
return results
|
| 259 |
except Exception as e:
|
|
@@ -261,7 +263,7 @@ async def search_vectors(
|
|
| 261 |
return None
|
| 262 |
|
| 263 |
# Upsert vectors to Pinecone
|
| 264 |
-
async def upsert_vectors(vectors, namespace=""):
|
| 265 |
"""Upsert vectors to Pinecone index"""
|
| 266 |
try:
|
| 267 |
pinecone_index = get_pinecone_index()
|
|
@@ -284,7 +286,7 @@ async def upsert_vectors(vectors, namespace=""):
|
|
| 284 |
return None
|
| 285 |
|
| 286 |
# Delete vectors from Pinecone
|
| 287 |
-
async def delete_vectors(ids, namespace=""):
|
| 288 |
"""Delete vectors from Pinecone index"""
|
| 289 |
try:
|
| 290 |
pinecone_index = get_pinecone_index()
|
|
@@ -304,7 +306,7 @@ async def delete_vectors(ids, namespace=""):
|
|
| 304 |
return False
|
| 305 |
|
| 306 |
# Fetch vector metadata from Pinecone
|
| 307 |
-
async def fetch_metadata(ids, namespace=""):
|
| 308 |
"""Fetch metadata for specific vector IDs"""
|
| 309 |
try:
|
| 310 |
pinecone_index = get_pinecone_index()
|
|
@@ -336,7 +338,8 @@ class ThresholdRetriever(BaseRetriever):
|
|
| 336 |
limit_k: int = Field(default=DEFAULT_LIMIT_K, description="Maximum number of results to retrieve from Pinecone")
|
| 337 |
similarity_metric: str = Field(default=DEFAULT_SIMILARITY_METRIC, description="Similarity metric to use")
|
| 338 |
similarity_threshold: float = Field(default=DEFAULT_SIMILARITY_THRESHOLD, description="Threshold for similarity")
|
| 339 |
-
|
|
|
|
| 340 |
class Config:
|
| 341 |
"""Configuration for this pydantic object."""
|
| 342 |
arbitrary_types_allowed = True
|
|
@@ -347,7 +350,7 @@ class ThresholdRetriever(BaseRetriever):
|
|
| 347 |
limit_k: int = DEFAULT_LIMIT_K,
|
| 348 |
similarity_metric: str = DEFAULT_SIMILARITY_METRIC,
|
| 349 |
similarity_threshold: float = DEFAULT_SIMILARITY_THRESHOLD,
|
| 350 |
-
namespace: str = "",
|
| 351 |
filter: Optional[Dict] = None
|
| 352 |
) -> Dict:
|
| 353 |
"""Synchronous wrapper for search_vectors"""
|
|
@@ -440,8 +443,8 @@ class ThresholdRetriever(BaseRetriever):
|
|
| 440 |
limit_k=self.limit_k,
|
| 441 |
similarity_metric=self.similarity_metric,
|
| 442 |
similarity_threshold=self.similarity_threshold,
|
| 443 |
-
namespace=
|
| 444 |
-
filter=self.search_kwargs.get("filter", None)
|
| 445 |
))
|
| 446 |
|
| 447 |
# Run the async function in a thread
|
|
@@ -455,8 +458,8 @@ class ThresholdRetriever(BaseRetriever):
|
|
| 455 |
limit_k=self.limit_k,
|
| 456 |
similarity_metric=self.similarity_metric,
|
| 457 |
similarity_threshold=self.similarity_threshold,
|
| 458 |
-
namespace=
|
| 459 |
-
filter=self.search_kwargs.get("filter", None)
|
| 460 |
))
|
| 461 |
|
| 462 |
# Convert to documents
|
|
@@ -517,14 +520,6 @@ def get_chain(
|
|
| 517 |
if _retriever_instance is not None:
|
| 518 |
return _retriever_instance
|
| 519 |
|
| 520 |
-
# Check if chain has been cached
|
| 521 |
-
cache_key = f"pinecone_retriever:{index_name}:{namespace}:{top_k}:{limit_k}:{similarity_metric}:{similarity_threshold}"
|
| 522 |
-
cached_retriever = cache.get(cache_key)
|
| 523 |
-
if cached_retriever is not None:
|
| 524 |
-
_retriever_instance = cached_retriever
|
| 525 |
-
logger.info("Retrieved cached Pinecone retriever")
|
| 526 |
-
return _retriever_instance
|
| 527 |
-
|
| 528 |
start_time = time.time()
|
| 529 |
logger.info("Initializing new retriever chain with threshold-based filtering")
|
| 530 |
|
|
@@ -572,9 +567,6 @@ def get_chain(
|
|
| 572 |
|
| 573 |
logger.info(f"Pinecone retriever initialized in {time.time() - start_time:.2f} seconds")
|
| 574 |
|
| 575 |
-
# Cache the retriever with longer TTL (1 hour) since it rarely changes
|
| 576 |
-
cache.set(cache_key, _retriever_instance, ttl=3600)
|
| 577 |
-
|
| 578 |
return _retriever_instance
|
| 579 |
except Exception as e:
|
| 580 |
logger.error(f"Error creating retrieval chain: {e}")
|
|
|
|
| 6 |
import time
|
| 7 |
from langchain_google_genai import GoogleGenerativeAIEmbeddings
|
| 8 |
import google.generativeai as genai
|
|
|
|
| 9 |
from langchain_core.retrievers import BaseRetriever
|
| 10 |
from langchain.callbacks.manager import Callbacks
|
| 11 |
from langchain_core.documents import Document
|
|
|
|
| 72 |
if pc is None:
|
| 73 |
logger.info(f"Initializing Pinecone connection to index {PINECONE_INDEX_NAME}...")
|
| 74 |
|
| 75 |
+
# Check if API key and index name are set
|
| 76 |
+
if not PINECONE_API_KEY:
|
| 77 |
+
logger.error("PINECONE_API_KEY is not set in environment variables")
|
| 78 |
+
return None
|
| 79 |
+
|
| 80 |
+
if not PINECONE_INDEX_NAME:
|
| 81 |
+
logger.error("PINECONE_INDEX_NAME is not set in environment variables")
|
| 82 |
+
return None
|
| 83 |
+
|
| 84 |
# Initialize Pinecone client using the new API
|
| 85 |
pc = Pinecone(api_key=PINECONE_API_KEY)
|
| 86 |
|
| 87 |
+
try:
|
| 88 |
+
# Check if index exists
|
| 89 |
+
index_list = pc.list_indexes()
|
| 90 |
+
|
| 91 |
+
if not hasattr(index_list, 'names') or PINECONE_INDEX_NAME not in index_list.names():
|
| 92 |
+
logger.error(f"Index {PINECONE_INDEX_NAME} does not exist in Pinecone")
|
| 93 |
+
return None
|
| 94 |
+
|
| 95 |
+
# Get existing index
|
| 96 |
+
index = pc.Index(PINECONE_INDEX_NAME)
|
| 97 |
+
logger.info(f"Pinecone connection established to index {PINECONE_INDEX_NAME}")
|
| 98 |
+
except Exception as connection_error:
|
| 99 |
+
logger.error(f"Error connecting to Pinecone index: {connection_error}")
|
| 100 |
return None
|
| 101 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 102 |
return index
|
| 103 |
+
except ImportError as e:
|
| 104 |
+
logger.error(f"Required package for Pinecone is missing: {e}")
|
| 105 |
+
return None
|
| 106 |
except Exception as e:
|
| 107 |
+
logger.error(f"Unexpected error initializing Pinecone: {e}")
|
| 108 |
return None
|
| 109 |
|
| 110 |
# Get Pinecone index singleton
|
|
|
|
| 199 |
limit_k: int = DEFAULT_LIMIT_K,
|
| 200 |
similarity_metric: str = DEFAULT_SIMILARITY_METRIC,
|
| 201 |
similarity_threshold: float = DEFAULT_SIMILARITY_THRESHOLD,
|
| 202 |
+
namespace: str = "Default",
|
| 203 |
filter: Optional[Dict] = None
|
| 204 |
) -> Dict:
|
| 205 |
"""
|
|
|
|
| 226 |
if limit_k < top_k:
|
| 227 |
logger.warning(f"limit_k ({limit_k}) must be greater than or equal to top_k ({top_k}). Setting limit_k to {top_k}")
|
| 228 |
limit_k = top_k
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 229 |
|
| 230 |
+
# Perform search directly without cache
|
| 231 |
pinecone_index = get_pinecone_index()
|
| 232 |
if pinecone_index is None:
|
| 233 |
logger.error("Failed to get Pinecone index for search")
|
| 234 |
return None
|
| 235 |
+
|
| 236 |
# Query Pinecone with the provided metric and higher limit_k to allow for threshold filtering
|
| 237 |
results = pinecone_index.query(
|
| 238 |
vector=query_vector,
|
|
|
|
| 255 |
|
| 256 |
# Log search result metrics
|
| 257 |
match_count = len(filtered_matches)
|
| 258 |
+
logger.info(f"Pinecone search returned {match_count} matches after threshold filtering (metric: {similarity_metric}, threshold: {similarity_threshold}, namespace: {namespace})")
|
|
|
|
|
|
|
|
|
|
| 259 |
|
| 260 |
return results
|
| 261 |
except Exception as e:
|
|
|
|
| 263 |
return None
|
| 264 |
|
| 265 |
# Upsert vectors to Pinecone
|
| 266 |
+
async def upsert_vectors(vectors, namespace="Default"):
|
| 267 |
"""Upsert vectors to Pinecone index"""
|
| 268 |
try:
|
| 269 |
pinecone_index = get_pinecone_index()
|
|
|
|
| 286 |
return None
|
| 287 |
|
| 288 |
# Delete vectors from Pinecone
|
| 289 |
+
async def delete_vectors(ids, namespace="Default"):
|
| 290 |
"""Delete vectors from Pinecone index"""
|
| 291 |
try:
|
| 292 |
pinecone_index = get_pinecone_index()
|
|
|
|
| 306 |
return False
|
| 307 |
|
| 308 |
# Fetch vector metadata from Pinecone
|
| 309 |
+
async def fetch_metadata(ids, namespace="Default"):
|
| 310 |
"""Fetch metadata for specific vector IDs"""
|
| 311 |
try:
|
| 312 |
pinecone_index = get_pinecone_index()
|
|
|
|
| 338 |
limit_k: int = Field(default=DEFAULT_LIMIT_K, description="Maximum number of results to retrieve from Pinecone")
|
| 339 |
similarity_metric: str = Field(default=DEFAULT_SIMILARITY_METRIC, description="Similarity metric to use")
|
| 340 |
similarity_threshold: float = Field(default=DEFAULT_SIMILARITY_THRESHOLD, description="Threshold for similarity")
|
| 341 |
+
namespace: str = "Default"
|
| 342 |
+
|
| 343 |
class Config:
|
| 344 |
"""Configuration for this pydantic object."""
|
| 345 |
arbitrary_types_allowed = True
|
|
|
|
| 350 |
limit_k: int = DEFAULT_LIMIT_K,
|
| 351 |
similarity_metric: str = DEFAULT_SIMILARITY_METRIC,
|
| 352 |
similarity_threshold: float = DEFAULT_SIMILARITY_THRESHOLD,
|
| 353 |
+
namespace: str = "Default",
|
| 354 |
filter: Optional[Dict] = None
|
| 355 |
) -> Dict:
|
| 356 |
"""Synchronous wrapper for search_vectors"""
|
|
|
|
| 443 |
limit_k=self.limit_k,
|
| 444 |
similarity_metric=self.similarity_metric,
|
| 445 |
similarity_threshold=self.similarity_threshold,
|
| 446 |
+
namespace=self.namespace,
|
| 447 |
+
# filter=self.search_kwargs.get("filter", None)
|
| 448 |
))
|
| 449 |
|
| 450 |
# Run the async function in a thread
|
|
|
|
| 458 |
limit_k=self.limit_k,
|
| 459 |
similarity_metric=self.similarity_metric,
|
| 460 |
similarity_threshold=self.similarity_threshold,
|
| 461 |
+
namespace=self.namespace,
|
| 462 |
+
# filter=self.search_kwargs.get("filter", None)
|
| 463 |
))
|
| 464 |
|
| 465 |
# Convert to documents
|
|
|
|
| 520 |
if _retriever_instance is not None:
|
| 521 |
return _retriever_instance
|
| 522 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 523 |
start_time = time.time()
|
| 524 |
logger.info("Initializing new retriever chain with threshold-based filtering")
|
| 525 |
|
|
|
|
| 567 |
|
| 568 |
logger.info(f"Pinecone retriever initialized in {time.time() - start_time:.2f} seconds")
|
| 569 |
|
|
|
|
|
|
|
|
|
|
| 570 |
return _retriever_instance
|
| 571 |
except Exception as e:
|
| 572 |
logger.error(f"Error creating retrieval chain: {e}")
|
app/database/postgresql.py
CHANGED
|
@@ -6,7 +6,7 @@ from sqlalchemy.exc import SQLAlchemyError, OperationalError
|
|
| 6 |
from dotenv import load_dotenv
|
| 7 |
import logging
|
| 8 |
|
| 9 |
-
#
|
| 10 |
logger = logging.getLogger(__name__)
|
| 11 |
|
| 12 |
# Load environment variables
|
|
@@ -24,66 +24,76 @@ else:
|
|
| 24 |
|
| 25 |
if not DATABASE_URL:
|
| 26 |
logger.error("No database URL configured. Please set AIVEN_DB_URL environment variable.")
|
| 27 |
-
DATABASE_URL = "postgresql://localhost/test" # Fallback
|
| 28 |
|
| 29 |
-
# Create SQLAlchemy engine
|
| 30 |
try:
|
| 31 |
engine = create_engine(
|
| 32 |
DATABASE_URL,
|
| 33 |
-
pool_pre_ping=True,
|
| 34 |
-
pool_recycle=300,
|
| 35 |
-
pool_size=
|
| 36 |
-
max_overflow=
|
|
|
|
| 37 |
connect_args={
|
| 38 |
-
"connect_timeout":
|
| 39 |
-
"keepalives": 1,
|
| 40 |
-
"keepalives_idle": 30,
|
| 41 |
-
"keepalives_interval": 10, #
|
| 42 |
-
"keepalives_count": 5 #
|
|
|
|
| 43 |
},
|
| 44 |
-
#
|
| 45 |
-
isolation_level="READ COMMITTED", #
|
| 46 |
-
echo=False,
|
| 47 |
-
echo_pool=False
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 48 |
)
|
| 49 |
-
logger.info("PostgreSQL engine initialized")
|
| 50 |
except Exception as e:
|
| 51 |
logger.error(f"Failed to initialize PostgreSQL engine: {e}")
|
| 52 |
-
#
|
| 53 |
|
| 54 |
-
# Create session factory
|
| 55 |
SessionLocal = sessionmaker(
|
| 56 |
autocommit=False,
|
| 57 |
autoflush=False,
|
| 58 |
bind=engine,
|
| 59 |
-
expire_on_commit=False #
|
| 60 |
)
|
| 61 |
|
| 62 |
# Base class for declarative models - use sqlalchemy.orm for SQLAlchemy 2.0 compatibility
|
| 63 |
from sqlalchemy.orm import declarative_base
|
| 64 |
Base = declarative_base()
|
| 65 |
|
| 66 |
-
#
|
| 67 |
def check_db_connection():
|
| 68 |
-
"""
|
| 69 |
try:
|
| 70 |
-
#
|
| 71 |
with engine.connect() as connection:
|
| 72 |
-
connection.execute(text("SELECT 1"))
|
| 73 |
-
logger.info("PostgreSQL connection
|
| 74 |
return True
|
| 75 |
except OperationalError as e:
|
| 76 |
logger.error(f"PostgreSQL connection failed: {e}")
|
| 77 |
return False
|
| 78 |
except Exception as e:
|
| 79 |
-
logger.error(f"Unknown error
|
| 80 |
return False
|
| 81 |
|
| 82 |
-
# Dependency to get DB session
|
| 83 |
def get_db():
|
| 84 |
"""Get database session dependency for FastAPI endpoints"""
|
| 85 |
db = SessionLocal()
|
| 86 |
try:
|
|
|
|
|
|
|
| 87 |
yield db
|
| 88 |
except SQLAlchemyError as e:
|
| 89 |
logger.error(f"Database session error: {e}")
|
|
@@ -92,13 +102,92 @@ def get_db():
|
|
| 92 |
finally:
|
| 93 |
db.close()
|
| 94 |
|
| 95 |
-
#
|
| 96 |
def create_tables():
|
| 97 |
-
"""
|
| 98 |
try:
|
| 99 |
Base.metadata.create_all(bind=engine)
|
| 100 |
logger.info("Database tables created or already exist")
|
| 101 |
return True
|
| 102 |
except SQLAlchemyError as e:
|
| 103 |
-
logger.error(f"Failed to create database tables: {e}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 104 |
return False
|
|
|
|
| 6 |
from dotenv import load_dotenv
|
| 7 |
import logging
|
| 8 |
|
| 9 |
+
# Configure logging
|
| 10 |
logger = logging.getLogger(__name__)
|
| 11 |
|
| 12 |
# Load environment variables
|
|
|
|
| 24 |
|
| 25 |
if not DATABASE_URL:
|
| 26 |
logger.error("No database URL configured. Please set AIVEN_DB_URL environment variable.")
|
| 27 |
+
DATABASE_URL = "postgresql://localhost/test" # Fallback to avoid crash on startup
|
| 28 |
|
| 29 |
+
# Create SQLAlchemy engine with optimized settings
|
| 30 |
try:
|
| 31 |
engine = create_engine(
|
| 32 |
DATABASE_URL,
|
| 33 |
+
pool_pre_ping=True, # Enable connection health checks
|
| 34 |
+
pool_recycle=300, # Recycle connections every 5 minutes
|
| 35 |
+
pool_size=20, # Increase pool size for more concurrent connections
|
| 36 |
+
max_overflow=30, # Allow more overflow connections
|
| 37 |
+
pool_timeout=30, # Timeout for getting connection from pool
|
| 38 |
connect_args={
|
| 39 |
+
"connect_timeout": 5, # Connection timeout in seconds
|
| 40 |
+
"keepalives": 1, # Enable TCP keepalives
|
| 41 |
+
"keepalives_idle": 30, # Time before sending keepalives
|
| 42 |
+
"keepalives_interval": 10, # Time between keepalives
|
| 43 |
+
"keepalives_count": 5, # Number of keepalive probes
|
| 44 |
+
"application_name": "pixagent_api" # Identify app in PostgreSQL logs
|
| 45 |
},
|
| 46 |
+
# Performance optimizations
|
| 47 |
+
isolation_level="READ COMMITTED", # Lower isolation level for better performance
|
| 48 |
+
echo=False, # Disable SQL echo to reduce overhead
|
| 49 |
+
echo_pool=False, # Disable pool logging
|
| 50 |
+
future=True, # Use SQLAlchemy 2.0 features
|
| 51 |
+
# Execution options for common queries
|
| 52 |
+
execution_options={
|
| 53 |
+
"compiled_cache": {}, # Use an empty dict for compiled query caching
|
| 54 |
+
"logging_token": "SQL", # Tag for query logging
|
| 55 |
+
}
|
| 56 |
)
|
| 57 |
+
logger.info("PostgreSQL engine initialized with optimized settings")
|
| 58 |
except Exception as e:
|
| 59 |
logger.error(f"Failed to initialize PostgreSQL engine: {e}")
|
| 60 |
+
# Don't raise exception to avoid crash on startup
|
| 61 |
|
| 62 |
+
# Create optimized session factory
|
| 63 |
SessionLocal = sessionmaker(
|
| 64 |
autocommit=False,
|
| 65 |
autoflush=False,
|
| 66 |
bind=engine,
|
| 67 |
+
expire_on_commit=False # Prevent automatic reloading after commit
|
| 68 |
)
|
| 69 |
|
| 70 |
# Base class for declarative models - use sqlalchemy.orm for SQLAlchemy 2.0 compatibility
|
| 71 |
from sqlalchemy.orm import declarative_base
|
| 72 |
Base = declarative_base()
|
| 73 |
|
| 74 |
+
# Check PostgreSQL connection
|
| 75 |
def check_db_connection():
|
| 76 |
+
"""Check PostgreSQL connection status"""
|
| 77 |
try:
|
| 78 |
+
# Simple query to verify connection
|
| 79 |
with engine.connect() as connection:
|
| 80 |
+
connection.execute(text("SELECT 1")).fetchone()
|
| 81 |
+
logger.info("PostgreSQL connection successful")
|
| 82 |
return True
|
| 83 |
except OperationalError as e:
|
| 84 |
logger.error(f"PostgreSQL connection failed: {e}")
|
| 85 |
return False
|
| 86 |
except Exception as e:
|
| 87 |
+
logger.error(f"Unknown error checking PostgreSQL connection: {e}")
|
| 88 |
return False
|
| 89 |
|
| 90 |
+
# Dependency to get DB session with improved error handling
|
| 91 |
def get_db():
|
| 92 |
"""Get database session dependency for FastAPI endpoints"""
|
| 93 |
db = SessionLocal()
|
| 94 |
try:
|
| 95 |
+
# Test connection is valid before returning
|
| 96 |
+
db.execute(text("SELECT 1")).fetchone()
|
| 97 |
yield db
|
| 98 |
except SQLAlchemyError as e:
|
| 99 |
logger.error(f"Database session error: {e}")
|
|
|
|
| 102 |
finally:
|
| 103 |
db.close()
|
| 104 |
|
| 105 |
+
# Create tables in database if they don't exist
|
| 106 |
def create_tables():
|
| 107 |
+
"""Create tables in database"""
|
| 108 |
try:
|
| 109 |
Base.metadata.create_all(bind=engine)
|
| 110 |
logger.info("Database tables created or already exist")
|
| 111 |
return True
|
| 112 |
except SQLAlchemyError as e:
|
| 113 |
+
logger.error(f"Failed to create database tables (SQLAlchemy error): {e}")
|
| 114 |
+
return False
|
| 115 |
+
except Exception as e:
|
| 116 |
+
logger.error(f"Failed to create database tables (unexpected error): {e}")
|
| 117 |
+
return False
|
| 118 |
+
|
| 119 |
+
# Function to create indexes for better performance
|
| 120 |
+
def create_indexes():
|
| 121 |
+
"""Create indexes for better query performance"""
|
| 122 |
+
try:
|
| 123 |
+
with engine.connect() as conn:
|
| 124 |
+
try:
|
| 125 |
+
# Index for featured events - use try-except to handle if index already exists
|
| 126 |
+
conn.execute(text("""
|
| 127 |
+
CREATE INDEX idx_event_featured
|
| 128 |
+
ON event_item(featured)
|
| 129 |
+
"""))
|
| 130 |
+
except SQLAlchemyError:
|
| 131 |
+
logger.info("Index idx_event_featured already exists")
|
| 132 |
+
|
| 133 |
+
try:
|
| 134 |
+
# Index for active events
|
| 135 |
+
conn.execute(text("""
|
| 136 |
+
CREATE INDEX idx_event_active
|
| 137 |
+
ON event_item(is_active)
|
| 138 |
+
"""))
|
| 139 |
+
except SQLAlchemyError:
|
| 140 |
+
logger.info("Index idx_event_active already exists")
|
| 141 |
+
|
| 142 |
+
try:
|
| 143 |
+
# Index for date filtering
|
| 144 |
+
conn.execute(text("""
|
| 145 |
+
CREATE INDEX idx_event_date_start
|
| 146 |
+
ON event_item(date_start)
|
| 147 |
+
"""))
|
| 148 |
+
except SQLAlchemyError:
|
| 149 |
+
logger.info("Index idx_event_date_start already exists")
|
| 150 |
+
|
| 151 |
+
try:
|
| 152 |
+
# Composite index for combined filtering
|
| 153 |
+
conn.execute(text("""
|
| 154 |
+
CREATE INDEX idx_event_featured_active
|
| 155 |
+
ON event_item(featured, is_active)
|
| 156 |
+
"""))
|
| 157 |
+
except SQLAlchemyError:
|
| 158 |
+
logger.info("Index idx_event_featured_active already exists")
|
| 159 |
+
|
| 160 |
+
# Indexes for FAQ and Emergency tables
|
| 161 |
+
try:
|
| 162 |
+
# FAQ active flag index
|
| 163 |
+
conn.execute(text("""
|
| 164 |
+
CREATE INDEX idx_faq_active
|
| 165 |
+
ON faq_item(is_active)
|
| 166 |
+
"""))
|
| 167 |
+
except SQLAlchemyError:
|
| 168 |
+
logger.info("Index idx_faq_active already exists")
|
| 169 |
+
|
| 170 |
+
try:
|
| 171 |
+
# Emergency contact active flag and priority indexes
|
| 172 |
+
conn.execute(text("""
|
| 173 |
+
CREATE INDEX idx_emergency_active
|
| 174 |
+
ON emergency_item(is_active)
|
| 175 |
+
"""))
|
| 176 |
+
except SQLAlchemyError:
|
| 177 |
+
logger.info("Index idx_emergency_active already exists")
|
| 178 |
+
|
| 179 |
+
try:
|
| 180 |
+
conn.execute(text("""
|
| 181 |
+
CREATE INDEX idx_emergency_priority
|
| 182 |
+
ON emergency_item(priority)
|
| 183 |
+
"""))
|
| 184 |
+
except SQLAlchemyError:
|
| 185 |
+
logger.info("Index idx_emergency_priority already exists")
|
| 186 |
+
|
| 187 |
+
conn.commit()
|
| 188 |
+
|
| 189 |
+
logger.info("Database indexes created or verified")
|
| 190 |
+
return True
|
| 191 |
+
except SQLAlchemyError as e:
|
| 192 |
+
logger.error(f"Failed to create indexes: {e}")
|
| 193 |
return False
|
app/models/pdf_models.py
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from pydantic import BaseModel, Field
|
| 2 |
+
from typing import Optional, List, Dict, Any
|
| 3 |
+
|
| 4 |
+
class PDFUploadRequest(BaseModel):
|
| 5 |
+
"""Request model cho upload PDF"""
|
| 6 |
+
namespace: Optional[str] = Field("Default", description="Namespace trong Pinecone")
|
| 7 |
+
index_name: Optional[str] = Field("testbot768", description="Tên index trong Pinecone")
|
| 8 |
+
title: Optional[str] = Field(None, description="Tiêu đề của tài liệu")
|
| 9 |
+
description: Optional[str] = Field(None, description="Mô tả về tài liệu")
|
| 10 |
+
|
| 11 |
+
class PDFResponse(BaseModel):
|
| 12 |
+
"""Response model cho xử lý PDF"""
|
| 13 |
+
success: bool = Field(..., description="Trạng thái xử lý thành công hay không")
|
| 14 |
+
document_id: Optional[str] = Field(None, description="ID của tài liệu")
|
| 15 |
+
chunks_processed: Optional[int] = Field(None, description="Số lượng chunks đã xử lý")
|
| 16 |
+
total_text_length: Optional[int] = Field(None, description="Tổng độ dài văn bản")
|
| 17 |
+
error: Optional[str] = Field(None, description="Thông báo lỗi nếu có")
|
| 18 |
+
|
| 19 |
+
class Config:
|
| 20 |
+
schema_extra = {
|
| 21 |
+
"example": {
|
| 22 |
+
"success": True,
|
| 23 |
+
"document_id": "550e8400-e29b-41d4-a716-446655440000",
|
| 24 |
+
"chunks_processed": 25,
|
| 25 |
+
"total_text_length": 50000
|
| 26 |
+
}
|
| 27 |
+
}
|
| 28 |
+
|
| 29 |
+
class DeleteDocumentRequest(BaseModel):
|
| 30 |
+
"""Request model cho xóa document"""
|
| 31 |
+
document_id: str = Field(..., description="ID của tài liệu cần xóa")
|
| 32 |
+
namespace: Optional[str] = Field("Default", description="Namespace trong Pinecone")
|
| 33 |
+
index_name: Optional[str] = Field("testbot768", description="Tên index trong Pinecone")
|
| 34 |
+
|
| 35 |
+
class DocumentsListResponse(BaseModel):
|
| 36 |
+
"""Response model cho lấy danh sách tài liệu"""
|
| 37 |
+
success: bool = Field(..., description="Trạng thái xử lý thành công hay không")
|
| 38 |
+
total_vectors: Optional[int] = Field(None, description="Tổng số vectors trong index")
|
| 39 |
+
namespace: Optional[str] = Field(None, description="Namespace đang sử dụng")
|
| 40 |
+
index_name: Optional[str] = Field(None, description="Tên index đang sử dụng")
|
| 41 |
+
error: Optional[str] = Field(None, description="Thông báo lỗi nếu có")
|
| 42 |
+
|
| 43 |
+
class Config:
|
| 44 |
+
schema_extra = {
|
| 45 |
+
"example": {
|
| 46 |
+
"success": True,
|
| 47 |
+
"total_vectors": 5000,
|
| 48 |
+
"namespace": "Default",
|
| 49 |
+
"index_name": "testbot768"
|
| 50 |
+
}
|
| 51 |
+
}
|
app/utils/cache.py
ADDED
|
@@ -0,0 +1,271 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import time
|
| 3 |
+
import threading
|
| 4 |
+
import logging
|
| 5 |
+
from typing import Dict, Any, Optional, Tuple, List, Callable, Generic, TypeVar, Union
|
| 6 |
+
from datetime import datetime
|
| 7 |
+
from dotenv import load_dotenv
|
| 8 |
+
import json
|
| 9 |
+
|
| 10 |
+
# Thiết lập logging
|
| 11 |
+
logger = logging.getLogger(__name__)
|
| 12 |
+
|
| 13 |
+
# Load biến môi trường
|
| 14 |
+
load_dotenv()
|
| 15 |
+
|
| 16 |
+
# Cấu hình cache từ biến môi trường
|
| 17 |
+
DEFAULT_CACHE_TTL = int(os.getenv("CACHE_TTL_SECONDS", "300")) # Mặc định 5 phút
|
| 18 |
+
DEFAULT_CACHE_CLEANUP_INTERVAL = int(os.getenv("CACHE_CLEANUP_INTERVAL", "60")) # Mặc định 1 phút
|
| 19 |
+
DEFAULT_CACHE_MAX_SIZE = int(os.getenv("CACHE_MAX_SIZE", "1000")) # Mặc định 1000 phần tử
|
| 20 |
+
DEFAULT_HISTORY_QUEUE_SIZE = int(os.getenv("HISTORY_QUEUE_SIZE", "10")) # Mặc định queue size là 10
|
| 21 |
+
DEFAULT_HISTORY_CACHE_TTL = int(os.getenv("HISTORY_CACHE_TTL", "3600")) # Mặc định 1 giờ
|
| 22 |
+
|
| 23 |
+
# Generic type để có thể sử dụng cho nhiều loại giá trị khác nhau
|
| 24 |
+
T = TypeVar('T')
|
| 25 |
+
|
| 26 |
+
# Cấu trúc cho một phần tử trong cache
|
| 27 |
+
class CacheItem(Generic[T]):
|
| 28 |
+
def __init__(self, value: T, ttl: int = DEFAULT_CACHE_TTL):
|
| 29 |
+
self.value = value
|
| 30 |
+
self.expire_at = time.time() + ttl
|
| 31 |
+
self.last_accessed = time.time()
|
| 32 |
+
|
| 33 |
+
def is_expired(self) -> bool:
|
| 34 |
+
"""Kiểm tra xem item có hết hạn chưa"""
|
| 35 |
+
return time.time() > self.expire_at
|
| 36 |
+
|
| 37 |
+
def touch(self) -> None:
|
| 38 |
+
"""Cập nhật thời gian truy cập lần cuối"""
|
| 39 |
+
self.last_accessed = time.time()
|
| 40 |
+
|
| 41 |
+
def extend(self, ttl: int = DEFAULT_CACHE_TTL) -> None:
|
| 42 |
+
"""Gia hạn thời gian sống của item"""
|
| 43 |
+
self.expire_at = time.time() + ttl
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
# Lớp HistoryQueue để lưu trữ lịch sử người dùng
|
| 47 |
+
class HistoryQueue:
|
| 48 |
+
def __init__(self, max_size: int = DEFAULT_HISTORY_QUEUE_SIZE, ttl: int = DEFAULT_HISTORY_CACHE_TTL):
|
| 49 |
+
self.items: List[Dict[str, Any]] = []
|
| 50 |
+
self.max_size = max_size
|
| 51 |
+
self.ttl = ttl
|
| 52 |
+
self.expire_at = time.time() + ttl
|
| 53 |
+
|
| 54 |
+
def add(self, item: Dict[str, Any]) -> None:
|
| 55 |
+
"""Thêm một item vào queue, nếu đã đầy thì loại bỏ item cũ nhất"""
|
| 56 |
+
if len(self.items) >= self.max_size:
|
| 57 |
+
self.items.pop(0)
|
| 58 |
+
self.items.append(item)
|
| 59 |
+
# Mỗi khi thêm item mới, cập nhật thời gian hết hạn
|
| 60 |
+
self.refresh_expiry()
|
| 61 |
+
|
| 62 |
+
def get_all(self) -> List[Dict[str, Any]]:
|
| 63 |
+
"""Lấy tất cả items trong queue"""
|
| 64 |
+
return self.items
|
| 65 |
+
|
| 66 |
+
def is_expired(self) -> bool:
|
| 67 |
+
"""Kiểm tra xem queue có hết hạn chưa"""
|
| 68 |
+
return time.time() > self.expire_at
|
| 69 |
+
|
| 70 |
+
def refresh_expiry(self) -> None:
|
| 71 |
+
"""Làm mới thời gian hết hạn"""
|
| 72 |
+
self.expire_at = time.time() + self.ttl
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
# Lớp cache chính
|
| 76 |
+
class InMemoryCache:
|
| 77 |
+
def __init__(
|
| 78 |
+
self,
|
| 79 |
+
ttl: int = DEFAULT_CACHE_TTL,
|
| 80 |
+
cleanup_interval: int = DEFAULT_CACHE_CLEANUP_INTERVAL,
|
| 81 |
+
max_size: int = DEFAULT_CACHE_MAX_SIZE
|
| 82 |
+
):
|
| 83 |
+
self.cache: Dict[str, CacheItem] = {}
|
| 84 |
+
self.ttl = ttl
|
| 85 |
+
self.cleanup_interval = cleanup_interval
|
| 86 |
+
self.max_size = max_size
|
| 87 |
+
self.user_history_queues: Dict[str, HistoryQueue] = {}
|
| 88 |
+
self.lock = threading.RLock() # Sử dụng RLock để tránh deadlock
|
| 89 |
+
|
| 90 |
+
# Khởi động thread dọn dẹp cache định kỳ (active expiration)
|
| 91 |
+
self.cleanup_thread = threading.Thread(target=self._cleanup_task, daemon=True)
|
| 92 |
+
self.cleanup_thread.start()
|
| 93 |
+
|
| 94 |
+
def set(self, key: str, value: Any, ttl: Optional[int] = None) -> None:
|
| 95 |
+
"""Lưu một giá trị vào cache"""
|
| 96 |
+
with self.lock:
|
| 97 |
+
ttl_value = ttl if ttl is not None else self.ttl
|
| 98 |
+
|
| 99 |
+
# Nếu cache đã đầy, xóa bớt các item ít được truy cập nhất
|
| 100 |
+
if len(self.cache) >= self.max_size and key not in self.cache:
|
| 101 |
+
self._evict_lru_items()
|
| 102 |
+
|
| 103 |
+
self.cache[key] = CacheItem(value, ttl_value)
|
| 104 |
+
logger.debug(f"Cache set: {key} (expires in {ttl_value}s)")
|
| 105 |
+
|
| 106 |
+
def get(self, key: str, default: Any = None) -> Any:
|
| 107 |
+
"""
|
| 108 |
+
Lấy giá trị từ cache. Nếu key không tồn tại hoặc đã hết hạn, trả về giá trị mặc định.
|
| 109 |
+
Áp dụng lazy expiration: kiểm tra và xóa các item hết hạn khi truy cập.
|
| 110 |
+
"""
|
| 111 |
+
with self.lock:
|
| 112 |
+
item = self.cache.get(key)
|
| 113 |
+
|
| 114 |
+
# Nếu không tìm thấy key hoặc item đã hết hạn
|
| 115 |
+
if item is None or item.is_expired():
|
| 116 |
+
# Nếu item tồn tại nhưng đã hết hạn, xóa nó (lazy expiration)
|
| 117 |
+
if item is not None:
|
| 118 |
+
logger.debug(f"Cache miss (expired): {key}")
|
| 119 |
+
del self.cache[key]
|
| 120 |
+
else:
|
| 121 |
+
logger.debug(f"Cache miss (not found): {key}")
|
| 122 |
+
return default
|
| 123 |
+
|
| 124 |
+
# Cập nhật thời gian truy cập
|
| 125 |
+
item.touch()
|
| 126 |
+
logger.debug(f"Cache hit: {key}")
|
| 127 |
+
return item.value
|
| 128 |
+
|
| 129 |
+
def delete(self, key: str) -> bool:
|
| 130 |
+
"""Xóa một key khỏi cache"""
|
| 131 |
+
with self.lock:
|
| 132 |
+
if key in self.cache:
|
| 133 |
+
del self.cache[key]
|
| 134 |
+
logger.debug(f"Cache delete: {key}")
|
| 135 |
+
return True
|
| 136 |
+
return False
|
| 137 |
+
|
| 138 |
+
def clear(self) -> None:
|
| 139 |
+
"""Xóa tất cả dữ liệu trong cache"""
|
| 140 |
+
with self.lock:
|
| 141 |
+
self.cache.clear()
|
| 142 |
+
logger.debug("Cache cleared")
|
| 143 |
+
|
| 144 |
+
def get_or_set(self, key: str, callback: Callable[[], T], ttl: Optional[int] = None) -> T:
|
| 145 |
+
"""
|
| 146 |
+
Lấy giá trị từ cache nếu tồn tại, nếu không thì gọi callback để lấy giá trị
|
| 147 |
+
và lưu vào cache trước khi trả về.
|
| 148 |
+
"""
|
| 149 |
+
with self.lock:
|
| 150 |
+
value = self.get(key)
|
| 151 |
+
if value is None:
|
| 152 |
+
value = callback()
|
| 153 |
+
self.set(key, value, ttl)
|
| 154 |
+
return value
|
| 155 |
+
|
| 156 |
+
def _cleanup_task(self) -> None:
|
| 157 |
+
"""Thread để dọn dẹp các item đã hết hạn (active expiration)"""
|
| 158 |
+
while True:
|
| 159 |
+
time.sleep(self.cleanup_interval)
|
| 160 |
+
try:
|
| 161 |
+
self._remove_expired_items()
|
| 162 |
+
except Exception as e:
|
| 163 |
+
logger.error(f"Error in cache cleanup task: {e}")
|
| 164 |
+
|
| 165 |
+
def _remove_expired_items(self) -> None:
|
| 166 |
+
"""Xóa tất cả các item đã hết hạn trong cache"""
|
| 167 |
+
with self.lock:
|
| 168 |
+
now = time.time()
|
| 169 |
+
expired_keys = [k for k, v in self.cache.items() if v.is_expired()]
|
| 170 |
+
for key in expired_keys:
|
| 171 |
+
del self.cache[key]
|
| 172 |
+
|
| 173 |
+
# Xóa các user history queue đã hết hạn
|
| 174 |
+
expired_user_ids = [uid for uid, queue in self.user_history_queues.items() if queue.is_expired()]
|
| 175 |
+
for user_id in expired_user_ids:
|
| 176 |
+
del self.user_history_queues[user_id]
|
| 177 |
+
|
| 178 |
+
if expired_keys or expired_user_ids:
|
| 179 |
+
logger.debug(f"Cleaned up {len(expired_keys)} expired cache items and {len(expired_user_ids)} expired history queues")
|
| 180 |
+
|
| 181 |
+
def _evict_lru_items(self, count: int = 1) -> None:
|
| 182 |
+
"""Xóa bỏ các item ít được truy cập nhất khi cache đầy"""
|
| 183 |
+
items = sorted(self.cache.items(), key=lambda x: x[1].last_accessed)
|
| 184 |
+
for i in range(min(count, len(items))):
|
| 185 |
+
del self.cache[items[i][0]]
|
| 186 |
+
logger.debug(f"Evicted {min(count, len(items))} least recently used items from cache")
|
| 187 |
+
|
| 188 |
+
def stats(self) -> Dict[str, Any]:
|
| 189 |
+
"""Trả về thống kê về cache"""
|
| 190 |
+
with self.lock:
|
| 191 |
+
now = time.time()
|
| 192 |
+
total_items = len(self.cache)
|
| 193 |
+
expired_items = sum(1 for item in self.cache.values() if item.is_expired())
|
| 194 |
+
memory_usage = self._estimate_memory_usage()
|
| 195 |
+
return {
|
| 196 |
+
"total_items": total_items,
|
| 197 |
+
"expired_items": expired_items,
|
| 198 |
+
"active_items": total_items - expired_items,
|
| 199 |
+
"memory_usage_bytes": memory_usage,
|
| 200 |
+
"memory_usage_mb": memory_usage / (1024 * 1024),
|
| 201 |
+
"max_size": self.max_size,
|
| 202 |
+
"history_queues": len(self.user_history_queues)
|
| 203 |
+
}
|
| 204 |
+
|
| 205 |
+
def _estimate_memory_usage(self) -> int:
|
| 206 |
+
"""Ước tính dung lượng bộ nhớ của cache (gần đúng)"""
|
| 207 |
+
# Ước tính dựa trên kích thước của các key và giá trị
|
| 208 |
+
cache_size = sum(len(k) for k in self.cache.keys())
|
| 209 |
+
for item in self.cache.values():
|
| 210 |
+
try:
|
| 211 |
+
# Ước tính kích thước của value (gần đúng)
|
| 212 |
+
if isinstance(item.value, (str, bytes)):
|
| 213 |
+
cache_size += len(item.value)
|
| 214 |
+
elif isinstance(item.value, (dict, list)):
|
| 215 |
+
cache_size += len(json.dumps(item.value))
|
| 216 |
+
else:
|
| 217 |
+
# Giá trị mặc định cho các loại dữ liệu khác
|
| 218 |
+
cache_size += 100
|
| 219 |
+
except:
|
| 220 |
+
cache_size += 100
|
| 221 |
+
|
| 222 |
+
# Ước tính kích thước của user history queues
|
| 223 |
+
for queue in self.user_history_queues.values():
|
| 224 |
+
try:
|
| 225 |
+
cache_size += len(json.dumps(queue.items)) + 100 # 100 bytes cho metadata
|
| 226 |
+
except:
|
| 227 |
+
cache_size += 100
|
| 228 |
+
|
| 229 |
+
return cache_size
|
| 230 |
+
|
| 231 |
+
# Các phương thức chuyên biệt cho việc quản lý lịch sử người dùng
|
| 232 |
+
def add_user_history(self, user_id: str, item: Dict[str, Any], queue_size: Optional[int] = None, ttl: Optional[int] = None) -> None:
|
| 233 |
+
"""Thêm một item vào history queue của người dùng"""
|
| 234 |
+
with self.lock:
|
| 235 |
+
# Tạo queue nếu chưa tồn tại
|
| 236 |
+
if user_id not in self.user_history_queues:
|
| 237 |
+
queue_size_value = queue_size if queue_size is not None else DEFAULT_HISTORY_QUEUE_SIZE
|
| 238 |
+
ttl_value = ttl if ttl is not None else DEFAULT_HISTORY_CACHE_TTL
|
| 239 |
+
self.user_history_queues[user_id] = HistoryQueue(max_size=queue_size_value, ttl=ttl_value)
|
| 240 |
+
|
| 241 |
+
# Thêm item vào queue
|
| 242 |
+
self.user_history_queues[user_id].add(item)
|
| 243 |
+
logger.debug(f"Added history item for user {user_id}")
|
| 244 |
+
|
| 245 |
+
def get_user_history(self, user_id: str, default: Any = None) -> List[Dict[str, Any]]:
|
| 246 |
+
"""Lấy lịch sử của người dùng từ cache"""
|
| 247 |
+
with self.lock:
|
| 248 |
+
queue = self.user_history_queues.get(user_id)
|
| 249 |
+
|
| 250 |
+
# Nếu không tìm thấy queue hoặc queue đã hết hạn
|
| 251 |
+
if queue is None or queue.is_expired():
|
| 252 |
+
if queue is not None and queue.is_expired():
|
| 253 |
+
del self.user_history_queues[user_id]
|
| 254 |
+
logger.debug(f"User history queue expired: {user_id}")
|
| 255 |
+
return default if default is not None else []
|
| 256 |
+
|
| 257 |
+
# Làm mới thời gian hết hạn
|
| 258 |
+
queue.refresh_expiry()
|
| 259 |
+
logger.debug(f"Retrieved history for user {user_id}: {len(queue.items)} items")
|
| 260 |
+
return queue.get_all()
|
| 261 |
+
|
| 262 |
+
|
| 263 |
+
# Singleton instance
|
| 264 |
+
_cache_instance = None
|
| 265 |
+
|
| 266 |
+
def get_cache() -> InMemoryCache:
|
| 267 |
+
"""Trả về instance singleton của InMemoryCache"""
|
| 268 |
+
global _cache_instance
|
| 269 |
+
if _cache_instance is None:
|
| 270 |
+
_cache_instance = InMemoryCache()
|
| 271 |
+
return _cache_instance
|
app/utils/pdf_processor.py
ADDED
|
@@ -0,0 +1,211 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import time
|
| 3 |
+
import uuid
|
| 4 |
+
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
| 5 |
+
from langchain_community.document_loaders import PyPDFLoader
|
| 6 |
+
from langchain_google_genai import GoogleGenerativeAIEmbeddings
|
| 7 |
+
import logging
|
| 8 |
+
|
| 9 |
+
from app.database.pinecone import get_pinecone_index, init_pinecone
|
| 10 |
+
|
| 11 |
+
# Cấu hình logging
|
| 12 |
+
logger = logging.getLogger(__name__)
|
| 13 |
+
|
| 14 |
+
# Khởi tạo embeddings model
|
| 15 |
+
embeddings_model = GoogleGenerativeAIEmbeddings(model="models/embedding-001")
|
| 16 |
+
|
| 17 |
+
class PDFProcessor:
|
| 18 |
+
"""Lớp xử lý file PDF và tạo embeddings"""
|
| 19 |
+
|
| 20 |
+
def __init__(self, index_name="testbot768", namespace="Default"):
|
| 21 |
+
"""Khởi tạo với tên index và namespace Pinecone mặc định"""
|
| 22 |
+
self.index_name = index_name
|
| 23 |
+
self.namespace = namespace
|
| 24 |
+
self.pinecone_index = None
|
| 25 |
+
|
| 26 |
+
def _init_pinecone_connection(self):
|
| 27 |
+
"""Khởi tạo kết nối đến Pinecone"""
|
| 28 |
+
try:
|
| 29 |
+
# Sử dụng singleton pattern từ module database.pinecone
|
| 30 |
+
self.pinecone_index = get_pinecone_index()
|
| 31 |
+
if not self.pinecone_index:
|
| 32 |
+
logger.error("Không thể kết nối đến Pinecone")
|
| 33 |
+
return False
|
| 34 |
+
return True
|
| 35 |
+
except Exception as e:
|
| 36 |
+
logger.error(f"Lỗi khi kết nối Pinecone: {str(e)}")
|
| 37 |
+
return False
|
| 38 |
+
|
| 39 |
+
async def process_pdf(self, file_path, document_id=None, metadata=None, progress_callback=None):
|
| 40 |
+
"""
|
| 41 |
+
Xử lý file PDF, chia thành chunks và tạo embeddings
|
| 42 |
+
|
| 43 |
+
Args:
|
| 44 |
+
file_path (str): Đường dẫn tới file PDF
|
| 45 |
+
document_id (str, optional): ID của tài liệu, nếu không cung cấp sẽ tạo ID mới
|
| 46 |
+
metadata (dict, optional): Metadata bổ sung cho tài liệu
|
| 47 |
+
progress_callback (callable, optional): Callback function để cập nhật tiến độ
|
| 48 |
+
|
| 49 |
+
Returns:
|
| 50 |
+
dict: Thông tin kết quả xử lý gồm document_id và số chunks đã xử lý
|
| 51 |
+
"""
|
| 52 |
+
try:
|
| 53 |
+
# Khởi tạo kết nối Pinecone nếu chưa có
|
| 54 |
+
if not self.pinecone_index:
|
| 55 |
+
if not self._init_pinecone_connection():
|
| 56 |
+
return {"success": False, "error": "Không thể kết nối đến Pinecone"}
|
| 57 |
+
|
| 58 |
+
# Tạo document_id nếu không có
|
| 59 |
+
if not document_id:
|
| 60 |
+
document_id = str(uuid.uuid4())
|
| 61 |
+
|
| 62 |
+
# Đọc file PDF bằng PyPDFLoader
|
| 63 |
+
logger.info(f"Đang đọc file PDF: {file_path}")
|
| 64 |
+
if progress_callback:
|
| 65 |
+
await progress_callback("pdf_loading", 0.5, "Loading PDF file")
|
| 66 |
+
|
| 67 |
+
loader = PyPDFLoader(file_path)
|
| 68 |
+
pages = loader.load()
|
| 69 |
+
|
| 70 |
+
# Trích xuất và nối text từ tất cả các trang
|
| 71 |
+
all_text = ""
|
| 72 |
+
for page in pages:
|
| 73 |
+
all_text += page.page_content + "\n"
|
| 74 |
+
|
| 75 |
+
if progress_callback:
|
| 76 |
+
await progress_callback("text_extraction", 0.6, "Extracted text from PDF")
|
| 77 |
+
|
| 78 |
+
# Chia văn bản thành các chunk
|
| 79 |
+
text_splitter = RecursiveCharacterTextSplitter(chunk_size=800, chunk_overlap=300)
|
| 80 |
+
chunks = text_splitter.split_text(all_text)
|
| 81 |
+
|
| 82 |
+
logger.info(f"Đã chia file PDF thành {len(chunks)} chunks")
|
| 83 |
+
if progress_callback:
|
| 84 |
+
await progress_callback("chunking", 0.7, f"Split document into {len(chunks)} chunks")
|
| 85 |
+
|
| 86 |
+
# Xử lý embedding cho từng chunk và upsert lên Pinecone
|
| 87 |
+
vectors = []
|
| 88 |
+
for i, chunk in enumerate(chunks):
|
| 89 |
+
# Cập nhật tiến độ embedding
|
| 90 |
+
if progress_callback and i % 5 == 0: # Cập nhật sau mỗi 5 chunks để tránh quá nhiều thông báo
|
| 91 |
+
embedding_progress = 0.7 + (0.3 * (i / len(chunks)))
|
| 92 |
+
await progress_callback("embedding", embedding_progress, f"Processing chunk {i+1}/{len(chunks)}")
|
| 93 |
+
|
| 94 |
+
# Tạo vector embedding cho từng chunk
|
| 95 |
+
vector = embeddings_model.embed_query(chunk)
|
| 96 |
+
|
| 97 |
+
# Chuẩn bị metadata cho vector
|
| 98 |
+
vector_metadata = {
|
| 99 |
+
"document_id": document_id,
|
| 100 |
+
"chunk_index": i,
|
| 101 |
+
"text": chunk
|
| 102 |
+
}
|
| 103 |
+
|
| 104 |
+
# Thêm metadata bổ sung nếu có
|
| 105 |
+
if metadata:
|
| 106 |
+
for key, value in metadata.items():
|
| 107 |
+
if key not in vector_metadata:
|
| 108 |
+
vector_metadata[key] = value
|
| 109 |
+
|
| 110 |
+
# Thêm vector vào danh sách để upsert
|
| 111 |
+
vectors.append({
|
| 112 |
+
"id": f"{document_id}_{i}",
|
| 113 |
+
"values": vector,
|
| 114 |
+
"metadata": vector_metadata
|
| 115 |
+
})
|
| 116 |
+
|
| 117 |
+
# Upsert mỗi 100 vectors để tránh quá lớn
|
| 118 |
+
if len(vectors) >= 100:
|
| 119 |
+
await self._upsert_vectors(vectors)
|
| 120 |
+
vectors = []
|
| 121 |
+
|
| 122 |
+
# Upsert các vectors còn lại
|
| 123 |
+
if vectors:
|
| 124 |
+
await self._upsert_vectors(vectors)
|
| 125 |
+
|
| 126 |
+
logger.info(f"Đã embedding và lưu {len(chunks)} chunks từ PDF với document_id: {document_id}")
|
| 127 |
+
|
| 128 |
+
# Final progress update
|
| 129 |
+
if progress_callback:
|
| 130 |
+
await progress_callback("completed", 1.0, "PDF processing complete")
|
| 131 |
+
|
| 132 |
+
return {
|
| 133 |
+
"success": True,
|
| 134 |
+
"document_id": document_id,
|
| 135 |
+
"chunks_processed": len(chunks),
|
| 136 |
+
"total_text_length": len(all_text)
|
| 137 |
+
}
|
| 138 |
+
|
| 139 |
+
except Exception as e:
|
| 140 |
+
logger.error(f"Lỗi khi xử lý PDF: {str(e)}")
|
| 141 |
+
if progress_callback:
|
| 142 |
+
await progress_callback("error", 0, f"Error processing PDF: {str(e)}")
|
| 143 |
+
return {
|
| 144 |
+
"success": False,
|
| 145 |
+
"error": str(e)
|
| 146 |
+
}
|
| 147 |
+
|
| 148 |
+
async def _upsert_vectors(self, vectors):
|
| 149 |
+
"""Upsert vectors vào Pinecone"""
|
| 150 |
+
try:
|
| 151 |
+
if not vectors:
|
| 152 |
+
return
|
| 153 |
+
|
| 154 |
+
result = self.pinecone_index.upsert(
|
| 155 |
+
vectors=vectors,
|
| 156 |
+
namespace=self.namespace
|
| 157 |
+
)
|
| 158 |
+
|
| 159 |
+
logger.info(f"Đã upsert {len(vectors)} vectors vào Pinecone")
|
| 160 |
+
return result
|
| 161 |
+
except Exception as e:
|
| 162 |
+
logger.error(f"Lỗi khi upsert vectors: {str(e)}")
|
| 163 |
+
raise
|
| 164 |
+
|
| 165 |
+
async def delete_namespace(self):
|
| 166 |
+
"""
|
| 167 |
+
Xóa toàn bộ vectors trong namespace hiện tại (tương đương xoá namespace).
|
| 168 |
+
"""
|
| 169 |
+
# Khởi tạo kết nối nếu cần
|
| 170 |
+
if not self.pinecone_index and not self._init_pinecone_connection():
|
| 171 |
+
return {"success": False, "error": "Không thể kết nối đến Pinecone"}
|
| 172 |
+
|
| 173 |
+
try:
|
| 174 |
+
# delete_all=True sẽ xóa toàn bộ vectors trong namespace
|
| 175 |
+
result = self.pinecone_index.delete(
|
| 176 |
+
delete_all=True,
|
| 177 |
+
namespace=self.namespace
|
| 178 |
+
)
|
| 179 |
+
logger.info(f"Đã xóa namespace '{self.namespace}' (tất cả vectors).")
|
| 180 |
+
return {"success": True, "detail": result}
|
| 181 |
+
except Exception as e:
|
| 182 |
+
logger.error(f"Lỗi khi xóa namespace '{self.namespace}': {e}")
|
| 183 |
+
return {"success": False, "error": str(e)}
|
| 184 |
+
|
| 185 |
+
async def list_documents(self):
|
| 186 |
+
"""Lấy danh sách tất cả document_id từ Pinecone"""
|
| 187 |
+
try:
|
| 188 |
+
# Khởi tạo kết nối Pinecone nếu chưa có
|
| 189 |
+
if not self.pinecone_index:
|
| 190 |
+
if not self._init_pinecone_connection():
|
| 191 |
+
return {"success": False, "error": "Không thể kết nối đến Pinecone"}
|
| 192 |
+
|
| 193 |
+
# Lấy thông tin index
|
| 194 |
+
stats = self.pinecone_index.describe_index_stats()
|
| 195 |
+
|
| 196 |
+
# Thực hiện truy vấn để lấy danh sách tất cả document_id duy nhất
|
| 197 |
+
# Phương pháp này có thể không hiệu quả với dataset lớn, nhưng là cách đơn giản nhất
|
| 198 |
+
# Trong thực tế, nên lưu danh sách document_id trong một database riêng
|
| 199 |
+
|
| 200 |
+
return {
|
| 201 |
+
"success": True,
|
| 202 |
+
"total_vectors": stats.get('total_vector_count', 0),
|
| 203 |
+
"namespace": self.namespace,
|
| 204 |
+
"index_name": self.index_name
|
| 205 |
+
}
|
| 206 |
+
except Exception as e:
|
| 207 |
+
logger.error(f"Lỗi khi lấy danh sách documents: {str(e)}")
|
| 208 |
+
return {
|
| 209 |
+
"success": False,
|
| 210 |
+
"error": str(e)
|
| 211 |
+
}
|
app/utils/utils.py
CHANGED
|
@@ -2,10 +2,13 @@ import logging
|
|
| 2 |
import time
|
| 3 |
import uuid
|
| 4 |
import threading
|
|
|
|
| 5 |
from functools import wraps
|
| 6 |
from datetime import datetime, timedelta
|
| 7 |
import pytz
|
| 8 |
-
from typing import Callable, Any, Dict, Optional
|
|
|
|
|
|
|
| 9 |
|
| 10 |
# Configure logging
|
| 11 |
logging.basicConfig(
|
|
@@ -70,46 +73,395 @@ def truncate_text(text, max_length=100):
|
|
| 70 |
return text
|
| 71 |
return text[:max_length] + "..."
|
| 72 |
|
| 73 |
-
|
| 74 |
-
|
| 75 |
-
|
| 76 |
-
|
| 77 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 78 |
|
| 79 |
-
def
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 80 |
"""Get value from cache if it exists and hasn't expired"""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 81 |
if key in self._cache:
|
| 82 |
-
#
|
| 83 |
-
|
| 84 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 85 |
else:
|
| 86 |
-
#
|
| 87 |
-
|
| 88 |
-
|
| 89 |
-
|
| 90 |
-
|
| 91 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 92 |
|
| 93 |
def set(self, key: str, value: Any, ttl: int = 300) -> None:
|
| 94 |
"""Set a value in the cache with TTL in seconds"""
|
| 95 |
-
|
| 96 |
-
# Set expiry time
|
| 97 |
-
self._expiry[key] = datetime.now() + timedelta(seconds=ttl)
|
| 98 |
|
| 99 |
def delete(self, key: str) -> None:
|
| 100 |
"""Delete a key from the cache"""
|
| 101 |
-
|
| 102 |
-
del self._cache[key]
|
| 103 |
-
if key in self._expiry:
|
| 104 |
-
del self._expiry[key]
|
| 105 |
|
| 106 |
def clear(self) -> None:
|
| 107 |
"""Clear the entire cache"""
|
| 108 |
-
|
| 109 |
-
self._expiry.clear()
|
| 110 |
-
|
| 111 |
-
# Initialize cache
|
| 112 |
-
cache = SimpleCache()
|
| 113 |
|
| 114 |
def get_host_url(request) -> str:
|
| 115 |
"""
|
|
|
|
| 2 |
import time
|
| 3 |
import uuid
|
| 4 |
import threading
|
| 5 |
+
import os
|
| 6 |
from functools import wraps
|
| 7 |
from datetime import datetime, timedelta
|
| 8 |
import pytz
|
| 9 |
+
from typing import Callable, Any, Dict, Optional, List, Tuple, Set
|
| 10 |
+
import gc
|
| 11 |
+
import heapq
|
| 12 |
|
| 13 |
# Configure logging
|
| 14 |
logging.basicConfig(
|
|
|
|
| 73 |
return text
|
| 74 |
return text[:max_length] + "..."
|
| 75 |
|
| 76 |
+
class CacheStrategy:
|
| 77 |
+
"""Cache loading strategy enumeration"""
|
| 78 |
+
LAZY = "lazy" # Only load items into cache when requested
|
| 79 |
+
EAGER = "eager" # Preload items into cache at initialization
|
| 80 |
+
MIXED = "mixed" # Preload high-priority items, lazy load others
|
| 81 |
+
|
| 82 |
+
class CacheItem:
|
| 83 |
+
"""Represents an item in the cache with metadata"""
|
| 84 |
+
def __init__(self, key: str, value: Any, ttl: int = 300, priority: int = 1):
|
| 85 |
+
self.key = key
|
| 86 |
+
self.value = value
|
| 87 |
+
self.expiry = datetime.now() + timedelta(seconds=ttl)
|
| 88 |
+
self.priority = priority # Higher number = higher priority
|
| 89 |
+
self.access_count = 0 # Track number of accesses
|
| 90 |
+
self.last_accessed = datetime.now()
|
| 91 |
+
|
| 92 |
+
def is_expired(self) -> bool:
|
| 93 |
+
"""Check if the item is expired"""
|
| 94 |
+
return datetime.now() > self.expiry
|
| 95 |
|
| 96 |
+
def touch(self):
|
| 97 |
+
"""Update last accessed time and access count"""
|
| 98 |
+
self.last_accessed = datetime.now()
|
| 99 |
+
self.access_count += 1
|
| 100 |
+
|
| 101 |
+
def __lt__(self, other):
|
| 102 |
+
"""For heap comparisons - lower priority items are evicted first"""
|
| 103 |
+
# First compare priority
|
| 104 |
+
if self.priority != other.priority:
|
| 105 |
+
return self.priority < other.priority
|
| 106 |
+
# Then compare access frequency (less frequently accessed items are evicted first)
|
| 107 |
+
if self.access_count != other.access_count:
|
| 108 |
+
return self.access_count < other.access_count
|
| 109 |
+
# Finally compare last access time (oldest accessed first)
|
| 110 |
+
return self.last_accessed < other.last_accessed
|
| 111 |
+
|
| 112 |
+
def get_size(self) -> int:
|
| 113 |
+
"""Approximate memory size of the cache item in bytes"""
|
| 114 |
+
try:
|
| 115 |
+
import sys
|
| 116 |
+
return sys.getsizeof(self.value) + sys.getsizeof(self.key) + 64 # Additional overhead
|
| 117 |
+
except:
|
| 118 |
+
# Default estimate if we can't get the size
|
| 119 |
+
return 1024
|
| 120 |
+
|
| 121 |
+
# Enhanced in-memory cache implementation
|
| 122 |
+
class EnhancedCache:
|
| 123 |
+
def __init__(self,
|
| 124 |
+
strategy: str = "lazy",
|
| 125 |
+
max_items: int = 10000,
|
| 126 |
+
max_size_mb: int = 100,
|
| 127 |
+
cleanup_interval: int = 60,
|
| 128 |
+
stats_enabled: bool = True):
|
| 129 |
+
"""
|
| 130 |
+
Initialize enhanced cache with configurable strategy.
|
| 131 |
+
|
| 132 |
+
Args:
|
| 133 |
+
strategy: Cache loading strategy (lazy, eager, mixed)
|
| 134 |
+
max_items: Maximum number of items to store in cache
|
| 135 |
+
max_size_mb: Maximum size of cache in MB
|
| 136 |
+
cleanup_interval: Interval in seconds to run cleanup
|
| 137 |
+
stats_enabled: Whether to collect cache statistics
|
| 138 |
+
"""
|
| 139 |
+
self._cache: Dict[str, CacheItem] = {}
|
| 140 |
+
self._namespace_cache: Dict[str, Set[str]] = {} # Tracking keys by namespace
|
| 141 |
+
self._strategy = strategy
|
| 142 |
+
self._max_items = max_items
|
| 143 |
+
self._max_size_bytes = max_size_mb * 1024 * 1024
|
| 144 |
+
self._current_size_bytes = 0
|
| 145 |
+
self._stats_enabled = stats_enabled
|
| 146 |
+
|
| 147 |
+
# Statistics
|
| 148 |
+
self._hits = 0
|
| 149 |
+
self._misses = 0
|
| 150 |
+
self._evictions = 0
|
| 151 |
+
self._total_get_time = 0
|
| 152 |
+
self._total_set_time = 0
|
| 153 |
+
|
| 154 |
+
# Setup cleanup thread
|
| 155 |
+
self._last_cleanup = datetime.now()
|
| 156 |
+
self._cleanup_interval = cleanup_interval
|
| 157 |
+
self._lock = threading.RLock()
|
| 158 |
+
|
| 159 |
+
if cleanup_interval > 0:
|
| 160 |
+
self._start_cleanup_thread(cleanup_interval)
|
| 161 |
+
|
| 162 |
+
logger.info(f"Enhanced cache initialized with strategy={strategy}, max_items={max_items}, max_size={max_size_mb}MB")
|
| 163 |
+
|
| 164 |
+
def _start_cleanup_thread(self, interval: int):
|
| 165 |
+
"""Start background thread for periodic cleanup"""
|
| 166 |
+
def cleanup_worker():
|
| 167 |
+
while True:
|
| 168 |
+
time.sleep(interval)
|
| 169 |
+
try:
|
| 170 |
+
self.cleanup()
|
| 171 |
+
except Exception as e:
|
| 172 |
+
logger.error(f"Error in cache cleanup: {e}")
|
| 173 |
+
|
| 174 |
+
thread = threading.Thread(target=cleanup_worker, daemon=True)
|
| 175 |
+
thread.start()
|
| 176 |
+
logger.info(f"Cache cleanup thread started with interval {interval}s")
|
| 177 |
+
|
| 178 |
+
def get(self, key: str, namespace: str = None) -> Optional[Any]:
|
| 179 |
"""Get value from cache if it exists and hasn't expired"""
|
| 180 |
+
if self._stats_enabled:
|
| 181 |
+
start_time = time.time()
|
| 182 |
+
|
| 183 |
+
# Use namespaced key if namespace is provided
|
| 184 |
+
cache_key = f"{namespace}:{key}" if namespace else key
|
| 185 |
+
|
| 186 |
+
with self._lock:
|
| 187 |
+
cache_item = self._cache.get(cache_key)
|
| 188 |
+
|
| 189 |
+
if cache_item:
|
| 190 |
+
if cache_item.is_expired():
|
| 191 |
+
# Clean up expired key
|
| 192 |
+
self._remove_item(cache_key, namespace)
|
| 193 |
+
if self._stats_enabled:
|
| 194 |
+
self._misses += 1
|
| 195 |
+
value = None
|
| 196 |
+
else:
|
| 197 |
+
# Update access metadata
|
| 198 |
+
cache_item.touch()
|
| 199 |
+
if self._stats_enabled:
|
| 200 |
+
self._hits += 1
|
| 201 |
+
value = cache_item.value
|
| 202 |
+
else:
|
| 203 |
+
if self._stats_enabled:
|
| 204 |
+
self._misses += 1
|
| 205 |
+
value = None
|
| 206 |
+
|
| 207 |
+
if self._stats_enabled:
|
| 208 |
+
self._total_get_time += time.time() - start_time
|
| 209 |
+
|
| 210 |
+
return value
|
| 211 |
+
|
| 212 |
+
def set(self, key: str, value: Any, ttl: int = 300, priority: int = 1, namespace: str = None) -> None:
|
| 213 |
+
"""Set a value in the cache with TTL in seconds"""
|
| 214 |
+
if self._stats_enabled:
|
| 215 |
+
start_time = time.time()
|
| 216 |
+
|
| 217 |
+
# Use namespaced key if namespace is provided
|
| 218 |
+
cache_key = f"{namespace}:{key}" if namespace else key
|
| 219 |
+
|
| 220 |
+
with self._lock:
|
| 221 |
+
# Create cache item
|
| 222 |
+
cache_item = CacheItem(cache_key, value, ttl, priority)
|
| 223 |
+
item_size = cache_item.get_size()
|
| 224 |
+
|
| 225 |
+
# Check if we need to make room
|
| 226 |
+
if (len(self._cache) >= self._max_items or
|
| 227 |
+
self._current_size_bytes + item_size > self._max_size_bytes):
|
| 228 |
+
self._evict_items(item_size)
|
| 229 |
+
|
| 230 |
+
# Update size tracking
|
| 231 |
+
if cache_key in self._cache:
|
| 232 |
+
# If replacing, subtract old size first
|
| 233 |
+
self._current_size_bytes -= self._cache[cache_key].get_size()
|
| 234 |
+
self._current_size_bytes += item_size
|
| 235 |
+
|
| 236 |
+
# Store the item
|
| 237 |
+
self._cache[cache_key] = cache_item
|
| 238 |
+
|
| 239 |
+
# Update namespace tracking
|
| 240 |
+
if namespace:
|
| 241 |
+
if namespace not in self._namespace_cache:
|
| 242 |
+
self._namespace_cache[namespace] = set()
|
| 243 |
+
self._namespace_cache[namespace].add(cache_key)
|
| 244 |
+
|
| 245 |
+
if self._stats_enabled:
|
| 246 |
+
self._total_set_time += time.time() - start_time
|
| 247 |
+
|
| 248 |
+
def delete(self, key: str, namespace: str = None) -> None:
|
| 249 |
+
"""Delete a key from the cache"""
|
| 250 |
+
# Use namespaced key if namespace is provided
|
| 251 |
+
cache_key = f"{namespace}:{key}" if namespace else key
|
| 252 |
+
|
| 253 |
+
with self._lock:
|
| 254 |
+
self._remove_item(cache_key, namespace)
|
| 255 |
+
|
| 256 |
+
def _remove_item(self, key: str, namespace: str = None):
|
| 257 |
+
"""Internal method to remove an item and update tracking"""
|
| 258 |
if key in self._cache:
|
| 259 |
+
# Update size tracking
|
| 260 |
+
self._current_size_bytes -= self._cache[key].get_size()
|
| 261 |
+
# Remove from cache
|
| 262 |
+
del self._cache[key]
|
| 263 |
+
|
| 264 |
+
# Update namespace tracking
|
| 265 |
+
if namespace and namespace in self._namespace_cache:
|
| 266 |
+
if key in self._namespace_cache[namespace]:
|
| 267 |
+
self._namespace_cache[namespace].remove(key)
|
| 268 |
+
# Cleanup empty sets
|
| 269 |
+
if not self._namespace_cache[namespace]:
|
| 270 |
+
del self._namespace_cache[namespace]
|
| 271 |
+
|
| 272 |
+
def _evict_items(self, needed_space: int = 0) -> None:
|
| 273 |
+
"""Evict items to make room in the cache"""
|
| 274 |
+
if not self._cache:
|
| 275 |
+
return
|
| 276 |
+
|
| 277 |
+
with self._lock:
|
| 278 |
+
# Convert cache items to a list for sorting
|
| 279 |
+
items = list(self._cache.values())
|
| 280 |
+
|
| 281 |
+
# Sort by priority, access count, and last accessed time
|
| 282 |
+
items.sort() # Uses the __lt__ method of CacheItem
|
| 283 |
+
|
| 284 |
+
# Evict items until we have enough space
|
| 285 |
+
space_freed = 0
|
| 286 |
+
evicted_count = 0
|
| 287 |
+
|
| 288 |
+
for item in items:
|
| 289 |
+
# Stop if we've made enough room
|
| 290 |
+
if (len(self._cache) - evicted_count <= self._max_items * 0.9 and
|
| 291 |
+
(space_freed >= needed_space or
|
| 292 |
+
self._current_size_bytes - space_freed <= self._max_size_bytes * 0.9)):
|
| 293 |
+
break
|
| 294 |
+
|
| 295 |
+
# Skip high priority items unless absolutely necessary
|
| 296 |
+
if item.priority > 9 and evicted_count < len(items) // 2:
|
| 297 |
+
continue
|
| 298 |
+
|
| 299 |
+
# Evict this item
|
| 300 |
+
item_size = item.get_size()
|
| 301 |
+
namespace = item.key.split(':', 1)[0] if ':' in item.key else None
|
| 302 |
+
self._remove_item(item.key, namespace)
|
| 303 |
+
|
| 304 |
+
space_freed += item_size
|
| 305 |
+
evicted_count += 1
|
| 306 |
+
if self._stats_enabled:
|
| 307 |
+
self._evictions += 1
|
| 308 |
+
|
| 309 |
+
logger.info(f"Cache eviction: removed {evicted_count} items, freed {space_freed / 1024:.2f}KB")
|
| 310 |
+
|
| 311 |
+
def clear(self, namespace: str = None) -> None:
|
| 312 |
+
"""
|
| 313 |
+
Clear the cache or a specific namespace
|
| 314 |
+
"""
|
| 315 |
+
with self._lock:
|
| 316 |
+
if namespace:
|
| 317 |
+
# Clear only keys in the specified namespace
|
| 318 |
+
if namespace in self._namespace_cache:
|
| 319 |
+
keys_to_remove = list(self._namespace_cache[namespace])
|
| 320 |
+
for key in keys_to_remove:
|
| 321 |
+
self._remove_item(key, namespace)
|
| 322 |
+
# The namespace should be auto-cleaned in _remove_item
|
| 323 |
else:
|
| 324 |
+
# Clear the entire cache
|
| 325 |
+
self._cache.clear()
|
| 326 |
+
self._namespace_cache.clear()
|
| 327 |
+
self._current_size_bytes = 0
|
| 328 |
+
|
| 329 |
+
logger.info(f"Cache cleared{' for namespace ' + namespace if namespace else ''}")
|
| 330 |
+
|
| 331 |
+
def cleanup(self) -> None:
|
| 332 |
+
"""Remove expired items and run garbage collection if needed"""
|
| 333 |
+
with self._lock:
|
| 334 |
+
now = datetime.now()
|
| 335 |
+
# Only run if it's been at least cleanup_interval since last cleanup
|
| 336 |
+
if (now - self._last_cleanup).total_seconds() < self._cleanup_interval:
|
| 337 |
+
return
|
| 338 |
+
|
| 339 |
+
# Find expired items
|
| 340 |
+
expired_keys = []
|
| 341 |
+
for key, item in self._cache.items():
|
| 342 |
+
if item.is_expired():
|
| 343 |
+
expired_keys.append((key, key.split(':', 1)[0] if ':' in key else None))
|
| 344 |
+
|
| 345 |
+
# Remove expired items
|
| 346 |
+
for key, namespace in expired_keys:
|
| 347 |
+
self._remove_item(key, namespace)
|
| 348 |
+
|
| 349 |
+
# Update last cleanup time
|
| 350 |
+
self._last_cleanup = now
|
| 351 |
+
|
| 352 |
+
# Run garbage collection if we removed several items
|
| 353 |
+
if len(expired_keys) > 100:
|
| 354 |
+
gc.collect()
|
| 355 |
+
|
| 356 |
+
logger.info(f"Cache cleanup: removed {len(expired_keys)} expired items")
|
| 357 |
+
|
| 358 |
+
def get_stats(self) -> Dict:
|
| 359 |
+
"""Get cache statistics"""
|
| 360 |
+
with self._lock:
|
| 361 |
+
if not self._stats_enabled:
|
| 362 |
+
return {"stats_enabled": False}
|
| 363 |
+
|
| 364 |
+
# Calculate hit rate
|
| 365 |
+
total_requests = self._hits + self._misses
|
| 366 |
+
hit_rate = (self._hits / total_requests) * 100 if total_requests > 0 else 0
|
| 367 |
+
|
| 368 |
+
# Calculate average times
|
| 369 |
+
avg_get_time = (self._total_get_time / total_requests) * 1000 if total_requests > 0 else 0
|
| 370 |
+
avg_set_time = (self._total_set_time / self._evictions) * 1000 if self._evictions > 0 else 0
|
| 371 |
+
|
| 372 |
+
return {
|
| 373 |
+
"stats_enabled": True,
|
| 374 |
+
"item_count": len(self._cache),
|
| 375 |
+
"max_items": self._max_items,
|
| 376 |
+
"size_bytes": self._current_size_bytes,
|
| 377 |
+
"max_size_bytes": self._max_size_bytes,
|
| 378 |
+
"hits": self._hits,
|
| 379 |
+
"misses": self._misses,
|
| 380 |
+
"hit_rate_percent": round(hit_rate, 2),
|
| 381 |
+
"evictions": self._evictions,
|
| 382 |
+
"avg_get_time_ms": round(avg_get_time, 3),
|
| 383 |
+
"avg_set_time_ms": round(avg_set_time, 3),
|
| 384 |
+
"namespace_count": len(self._namespace_cache),
|
| 385 |
+
"namespaces": list(self._namespace_cache.keys())
|
| 386 |
+
}
|
| 387 |
+
|
| 388 |
+
def preload(self, items: List[Tuple[str, Any, int, int]], namespace: str = None) -> None:
|
| 389 |
+
"""
|
| 390 |
+
Preload a list of items into the cache
|
| 391 |
+
|
| 392 |
+
Args:
|
| 393 |
+
items: List of (key, value, ttl, priority) tuples
|
| 394 |
+
namespace: Optional namespace for all items
|
| 395 |
+
"""
|
| 396 |
+
for key, value, ttl, priority in items:
|
| 397 |
+
self.set(key, value, ttl, priority, namespace)
|
| 398 |
+
|
| 399 |
+
logger.info(f"Preloaded {len(items)} items into cache{' namespace ' + namespace if namespace else ''}")
|
| 400 |
+
|
| 401 |
+
def get_or_load(self, key: str, loader_func: Callable[[], Any],
|
| 402 |
+
ttl: int = 300, priority: int = 1, namespace: str = None) -> Any:
|
| 403 |
+
"""
|
| 404 |
+
Get from cache or load using the provided function
|
| 405 |
+
|
| 406 |
+
Args:
|
| 407 |
+
key: Cache key
|
| 408 |
+
loader_func: Function to call if cache miss occurs
|
| 409 |
+
ttl: TTL in seconds
|
| 410 |
+
priority: Item priority
|
| 411 |
+
namespace: Optional namespace
|
| 412 |
+
|
| 413 |
+
Returns:
|
| 414 |
+
Cached or freshly loaded value
|
| 415 |
+
"""
|
| 416 |
+
# Try to get from cache first
|
| 417 |
+
value = self.get(key, namespace)
|
| 418 |
+
|
| 419 |
+
# If not in cache, load it
|
| 420 |
+
if value is None:
|
| 421 |
+
value = loader_func()
|
| 422 |
+
# Only cache if we got a valid value
|
| 423 |
+
if value is not None:
|
| 424 |
+
self.set(key, value, ttl, priority, namespace)
|
| 425 |
+
|
| 426 |
+
return value
|
| 427 |
+
|
| 428 |
+
# Load cache configuration from environment variables
|
| 429 |
+
CACHE_STRATEGY = os.getenv("CACHE_STRATEGY", "mixed")
|
| 430 |
+
CACHE_MAX_ITEMS = int(os.getenv("CACHE_MAX_ITEMS", "10000"))
|
| 431 |
+
CACHE_MAX_SIZE_MB = int(os.getenv("CACHE_MAX_SIZE_MB", "100"))
|
| 432 |
+
CACHE_CLEANUP_INTERVAL = int(os.getenv("CACHE_CLEANUP_INTERVAL", "60"))
|
| 433 |
+
CACHE_STATS_ENABLED = os.getenv("CACHE_STATS_ENABLED", "true").lower() in ("true", "1", "yes")
|
| 434 |
+
|
| 435 |
+
# Initialize the enhanced cache
|
| 436 |
+
cache = EnhancedCache(
|
| 437 |
+
strategy=CACHE_STRATEGY,
|
| 438 |
+
max_items=CACHE_MAX_ITEMS,
|
| 439 |
+
max_size_mb=CACHE_MAX_SIZE_MB,
|
| 440 |
+
cleanup_interval=CACHE_CLEANUP_INTERVAL,
|
| 441 |
+
stats_enabled=CACHE_STATS_ENABLED
|
| 442 |
+
)
|
| 443 |
+
|
| 444 |
+
# Backward compatibility for SimpleCache - for a transition period
|
| 445 |
+
class SimpleCache:
|
| 446 |
+
def __init__(self):
|
| 447 |
+
"""Legacy SimpleCache implementation that uses EnhancedCache underneath"""
|
| 448 |
+
logger.warning("SimpleCache is deprecated, please use EnhancedCache directly")
|
| 449 |
+
|
| 450 |
+
def get(self, key: str) -> Optional[Any]:
|
| 451 |
+
"""Get value from cache if it exists and hasn't expired"""
|
| 452 |
+
return cache.get(key)
|
| 453 |
|
| 454 |
def set(self, key: str, value: Any, ttl: int = 300) -> None:
|
| 455 |
"""Set a value in the cache with TTL in seconds"""
|
| 456 |
+
cache.set(key, value, ttl)
|
|
|
|
|
|
|
| 457 |
|
| 458 |
def delete(self, key: str) -> None:
|
| 459 |
"""Delete a key from the cache"""
|
| 460 |
+
cache.delete(key)
|
|
|
|
|
|
|
|
|
|
| 461 |
|
| 462 |
def clear(self) -> None:
|
| 463 |
"""Clear the entire cache"""
|
| 464 |
+
cache.clear()
|
|
|
|
|
|
|
|
|
|
|
|
|
| 465 |
|
| 466 |
def get_host_url(request) -> str:
|
| 467 |
"""
|
docs/api_documentation.md
ADDED
|
@@ -0,0 +1,581 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# API Documentation
|
| 2 |
+
|
| 3 |
+
## Frontend Setup
|
| 4 |
+
|
| 5 |
+
```javascript
|
| 6 |
+
// Basic Axios setup
|
| 7 |
+
import axios from 'axios';
|
| 8 |
+
|
| 9 |
+
const api = axios.create({
|
| 10 |
+
baseURL: 'https://api.your-domain.com',
|
| 11 |
+
timeout: 10000,
|
| 12 |
+
headers: {
|
| 13 |
+
'Content-Type': 'application/json',
|
| 14 |
+
'Accept': 'application/json'
|
| 15 |
+
}
|
| 16 |
+
});
|
| 17 |
+
|
| 18 |
+
// Error handling
|
| 19 |
+
api.interceptors.response.use(
|
| 20 |
+
response => response.data,
|
| 21 |
+
error => {
|
| 22 |
+
const errorMessage = error.response?.data?.detail || 'An error occurred';
|
| 23 |
+
console.error('API Error:', errorMessage);
|
| 24 |
+
return Promise.reject(errorMessage);
|
| 25 |
+
}
|
| 26 |
+
);
|
| 27 |
+
```
|
| 28 |
+
|
| 29 |
+
## Caching System
|
| 30 |
+
|
| 31 |
+
- All GET endpoints support `use_cache=true` parameter (default)
|
| 32 |
+
- Cache TTL: 300 seconds (5 minutes)
|
| 33 |
+
- Cache is automatically invalidated on data changes
|
| 34 |
+
|
| 35 |
+
## Authentication
|
| 36 |
+
|
| 37 |
+
Currently no authentication is required. If implemented in the future, use JWT Bearer tokens:
|
| 38 |
+
|
| 39 |
+
```javascript
|
| 40 |
+
const api = axios.create({
|
| 41 |
+
// ...other config
|
| 42 |
+
headers: {
|
| 43 |
+
// ...other headers
|
| 44 |
+
'Authorization': `Bearer ${token}`
|
| 45 |
+
}
|
| 46 |
+
});
|
| 47 |
+
```
|
| 48 |
+
|
| 49 |
+
## Error Codes
|
| 50 |
+
|
| 51 |
+
| Code | Description |
|
| 52 |
+
|------|-------------|
|
| 53 |
+
| 400 | Bad Request |
|
| 54 |
+
| 404 | Not Found |
|
| 55 |
+
| 500 | Internal Server Error |
|
| 56 |
+
| 503 | Service Unavailable |
|
| 57 |
+
|
| 58 |
+
## PostgreSQL Endpoints
|
| 59 |
+
|
| 60 |
+
### FAQ Endpoints
|
| 61 |
+
|
| 62 |
+
#### Get FAQs List
|
| 63 |
+
```
|
| 64 |
+
GET /postgres/faq
|
| 65 |
+
```
|
| 66 |
+
|
| 67 |
+
Parameters:
|
| 68 |
+
- `skip`: Number of items to skip (default: 0)
|
| 69 |
+
- `limit`: Maximum items to return (default: 100)
|
| 70 |
+
- `active_only`: Return only active items (default: false)
|
| 71 |
+
- `use_cache`: Use cached data if available (default: true)
|
| 72 |
+
|
| 73 |
+
Response:
|
| 74 |
+
```json
|
| 75 |
+
[
|
| 76 |
+
{
|
| 77 |
+
"question": "How do I book a hotel?",
|
| 78 |
+
"answer": "You can book a hotel through our app or website.",
|
| 79 |
+
"is_active": true,
|
| 80 |
+
"id": 1,
|
| 81 |
+
"created_at": "2023-01-01T00:00:00",
|
| 82 |
+
"updated_at": "2023-01-01T00:00:00"
|
| 83 |
+
}
|
| 84 |
+
]
|
| 85 |
+
```
|
| 86 |
+
|
| 87 |
+
Example:
|
| 88 |
+
```javascript
|
| 89 |
+
async function getFAQs() {
|
| 90 |
+
try {
|
| 91 |
+
const data = await api.get('/postgres/faq', {
|
| 92 |
+
params: { active_only: true, limit: 20 }
|
| 93 |
+
});
|
| 94 |
+
return data;
|
| 95 |
+
} catch (error) {
|
| 96 |
+
console.error('Error fetching FAQs:', error);
|
| 97 |
+
throw error;
|
| 98 |
+
}
|
| 99 |
+
}
|
| 100 |
+
```
|
| 101 |
+
|
| 102 |
+
#### Create FAQ
|
| 103 |
+
```
|
| 104 |
+
POST /postgres/faq
|
| 105 |
+
```
|
| 106 |
+
|
| 107 |
+
Request Body:
|
| 108 |
+
```json
|
| 109 |
+
{
|
| 110 |
+
"question": "How do I book a hotel?",
|
| 111 |
+
"answer": "You can book a hotel through our app or website.",
|
| 112 |
+
"is_active": true
|
| 113 |
+
}
|
| 114 |
+
```
|
| 115 |
+
|
| 116 |
+
Response: Created FAQ object
|
| 117 |
+
|
| 118 |
+
#### Get FAQ Detail
|
| 119 |
+
```
|
| 120 |
+
GET /postgres/faq/{faq_id}
|
| 121 |
+
```
|
| 122 |
+
|
| 123 |
+
Parameters:
|
| 124 |
+
- `faq_id`: ID of FAQ (required)
|
| 125 |
+
- `use_cache`: Use cached data if available (default: true)
|
| 126 |
+
|
| 127 |
+
Response: FAQ object
|
| 128 |
+
|
| 129 |
+
#### Update FAQ
|
| 130 |
+
```
|
| 131 |
+
PUT /postgres/faq/{faq_id}
|
| 132 |
+
```
|
| 133 |
+
|
| 134 |
+
Parameters:
|
| 135 |
+
- `faq_id`: ID of FAQ to update (required)
|
| 136 |
+
|
| 137 |
+
Request Body: Partial or complete FAQ object
|
| 138 |
+
Response: Updated FAQ object
|
| 139 |
+
|
| 140 |
+
#### Delete FAQ
|
| 141 |
+
```
|
| 142 |
+
DELETE /postgres/faq/{faq_id}
|
| 143 |
+
```
|
| 144 |
+
|
| 145 |
+
Parameters:
|
| 146 |
+
- `faq_id`: ID of FAQ to delete (required)
|
| 147 |
+
|
| 148 |
+
Response:
|
| 149 |
+
```json
|
| 150 |
+
{
|
| 151 |
+
"status": "success",
|
| 152 |
+
"message": "FAQ item 1 deleted"
|
| 153 |
+
}
|
| 154 |
+
```
|
| 155 |
+
|
| 156 |
+
#### Batch Operations
|
| 157 |
+
|
| 158 |
+
Create multiple FAQs:
|
| 159 |
+
```
|
| 160 |
+
POST /postgres/faqs/batch
|
| 161 |
+
```
|
| 162 |
+
|
| 163 |
+
Update status of multiple FAQs:
|
| 164 |
+
```
|
| 165 |
+
PUT /postgres/faqs/batch-update-status
|
| 166 |
+
```
|
| 167 |
+
|
| 168 |
+
Delete multiple FAQs:
|
| 169 |
+
```
|
| 170 |
+
DELETE /postgres/faqs/batch
|
| 171 |
+
```
|
| 172 |
+
|
| 173 |
+
### Emergency Contact Endpoints
|
| 174 |
+
|
| 175 |
+
#### Get Emergency Contacts
|
| 176 |
+
```
|
| 177 |
+
GET /postgres/emergency
|
| 178 |
+
```
|
| 179 |
+
|
| 180 |
+
Parameters:
|
| 181 |
+
- `skip`: Number of items to skip (default: 0)
|
| 182 |
+
- `limit`: Maximum items to return (default: 100)
|
| 183 |
+
- `active_only`: Return only active items (default: false)
|
| 184 |
+
- `use_cache`: Use cached data if available (default: true)
|
| 185 |
+
|
| 186 |
+
Response: Array of Emergency Contact objects
|
| 187 |
+
|
| 188 |
+
#### Create Emergency Contact
|
| 189 |
+
```
|
| 190 |
+
POST /postgres/emergency
|
| 191 |
+
```
|
| 192 |
+
|
| 193 |
+
Request Body:
|
| 194 |
+
```json
|
| 195 |
+
{
|
| 196 |
+
"name": "Fire Department",
|
| 197 |
+
"phone_number": "114",
|
| 198 |
+
"description": "Fire rescue services",
|
| 199 |
+
"address": "Da Nang",
|
| 200 |
+
"location": "16.0544, 108.2022",
|
| 201 |
+
"priority": 1,
|
| 202 |
+
"is_active": true
|
| 203 |
+
}
|
| 204 |
+
```
|
| 205 |
+
|
| 206 |
+
Response: Created Emergency Contact object
|
| 207 |
+
|
| 208 |
+
#### Get Emergency Contact
|
| 209 |
+
```
|
| 210 |
+
GET /postgres/emergency/{emergency_id}
|
| 211 |
+
```
|
| 212 |
+
|
| 213 |
+
#### Update Emergency Contact
|
| 214 |
+
```
|
| 215 |
+
PUT /postgres/emergency/{emergency_id}
|
| 216 |
+
```
|
| 217 |
+
|
| 218 |
+
#### Delete Emergency Contact
|
| 219 |
+
```
|
| 220 |
+
DELETE /postgres/emergency/{emergency_id}
|
| 221 |
+
```
|
| 222 |
+
|
| 223 |
+
#### Batch Operations
|
| 224 |
+
|
| 225 |
+
Create multiple Emergency Contacts:
|
| 226 |
+
```
|
| 227 |
+
POST /postgres/emergency/batch
|
| 228 |
+
```
|
| 229 |
+
|
| 230 |
+
Update status of multiple Emergency Contacts:
|
| 231 |
+
```
|
| 232 |
+
PUT /postgres/emergency/batch-update-status
|
| 233 |
+
```
|
| 234 |
+
|
| 235 |
+
Delete multiple Emergency Contacts:
|
| 236 |
+
```
|
| 237 |
+
DELETE /postgres/emergency/batch
|
| 238 |
+
```
|
| 239 |
+
|
| 240 |
+
### Event Endpoints
|
| 241 |
+
|
| 242 |
+
#### Get Events
|
| 243 |
+
```
|
| 244 |
+
GET /postgres/events
|
| 245 |
+
```
|
| 246 |
+
|
| 247 |
+
Parameters:
|
| 248 |
+
- `skip`: Number of items to skip (default: 0)
|
| 249 |
+
- `limit`: Maximum items to return (default: 100)
|
| 250 |
+
- `active_only`: Return only active items (default: false)
|
| 251 |
+
- `featured_only`: Return only featured items (default: false)
|
| 252 |
+
- `use_cache`: Use cached data if available (default: true)
|
| 253 |
+
|
| 254 |
+
Response: Array of Event objects
|
| 255 |
+
|
| 256 |
+
#### Create Event
|
| 257 |
+
```
|
| 258 |
+
POST /postgres/events
|
| 259 |
+
```
|
| 260 |
+
|
| 261 |
+
Request Body:
|
| 262 |
+
```json
|
| 263 |
+
{
|
| 264 |
+
"name": "Da Nang Fireworks Festival",
|
| 265 |
+
"description": "International Fireworks Festival Da Nang 2023",
|
| 266 |
+
"address": "Dragon Bridge, Da Nang",
|
| 267 |
+
"location": "16.0610, 108.2277",
|
| 268 |
+
"date_start": "2023-06-01T19:00:00",
|
| 269 |
+
"date_end": "2023-06-01T22:00:00",
|
| 270 |
+
"price": [
|
| 271 |
+
{"type": "VIP", "amount": 500000},
|
| 272 |
+
{"type": "Standard", "amount": 300000}
|
| 273 |
+
],
|
| 274 |
+
"url": "https://danangfireworks.com",
|
| 275 |
+
"is_active": true,
|
| 276 |
+
"featured": true
|
| 277 |
+
}
|
| 278 |
+
```
|
| 279 |
+
|
| 280 |
+
Response: Created Event object
|
| 281 |
+
|
| 282 |
+
#### Get Event
|
| 283 |
+
```
|
| 284 |
+
GET /postgres/events/{event_id}
|
| 285 |
+
```
|
| 286 |
+
|
| 287 |
+
#### Update Event
|
| 288 |
+
```
|
| 289 |
+
PUT /postgres/events/{event_id}
|
| 290 |
+
```
|
| 291 |
+
|
| 292 |
+
#### Delete Event
|
| 293 |
+
```
|
| 294 |
+
DELETE /postgres/events/{event_id}
|
| 295 |
+
```
|
| 296 |
+
|
| 297 |
+
#### Batch Operations
|
| 298 |
+
|
| 299 |
+
Create multiple Events:
|
| 300 |
+
```
|
| 301 |
+
POST /postgres/events/batch
|
| 302 |
+
```
|
| 303 |
+
|
| 304 |
+
Update status of multiple Events:
|
| 305 |
+
```
|
| 306 |
+
PUT /postgres/events/batch-update-status
|
| 307 |
+
```
|
| 308 |
+
|
| 309 |
+
Delete multiple Events:
|
| 310 |
+
```
|
| 311 |
+
DELETE /postgres/events/batch
|
| 312 |
+
```
|
| 313 |
+
|
| 314 |
+
### About Pixity Endpoints
|
| 315 |
+
|
| 316 |
+
#### Get About Pixity
|
| 317 |
+
```
|
| 318 |
+
GET /postgres/about-pixity
|
| 319 |
+
```
|
| 320 |
+
|
| 321 |
+
Response:
|
| 322 |
+
```json
|
| 323 |
+
{
|
| 324 |
+
"content": "PiXity is your smart, AI-powered local companion...",
|
| 325 |
+
"id": 1,
|
| 326 |
+
"created_at": "2023-01-01T00:00:00",
|
| 327 |
+
"updated_at": "2023-01-01T00:00:00"
|
| 328 |
+
}
|
| 329 |
+
```
|
| 330 |
+
|
| 331 |
+
#### Update About Pixity
|
| 332 |
+
```
|
| 333 |
+
PUT /postgres/about-pixity
|
| 334 |
+
```
|
| 335 |
+
|
| 336 |
+
Request Body:
|
| 337 |
+
```json
|
| 338 |
+
{
|
| 339 |
+
"content": "PiXity is your smart, AI-powered local companion..."
|
| 340 |
+
}
|
| 341 |
+
```
|
| 342 |
+
|
| 343 |
+
Response: Updated About Pixity object
|
| 344 |
+
|
| 345 |
+
### Da Nang Bucket List Endpoints
|
| 346 |
+
|
| 347 |
+
#### Get Da Nang Bucket List
|
| 348 |
+
```
|
| 349 |
+
GET /postgres/danang-bucket-list
|
| 350 |
+
```
|
| 351 |
+
|
| 352 |
+
Response: Bucket List object with JSON content string
|
| 353 |
+
|
| 354 |
+
#### Update Da Nang Bucket List
|
| 355 |
+
```
|
| 356 |
+
PUT /postgres/danang-bucket-list
|
| 357 |
+
```
|
| 358 |
+
|
| 359 |
+
### Solana Summit Endpoints
|
| 360 |
+
|
| 361 |
+
#### Get Solana Summit
|
| 362 |
+
```
|
| 363 |
+
GET /postgres/solana-summit
|
| 364 |
+
```
|
| 365 |
+
|
| 366 |
+
Response: Solana Summit object with JSON content string
|
| 367 |
+
|
| 368 |
+
#### Update Solana Summit
|
| 369 |
+
```
|
| 370 |
+
PUT /postgres/solana-summit
|
| 371 |
+
```
|
| 372 |
+
|
| 373 |
+
### Health Check
|
| 374 |
+
```
|
| 375 |
+
GET /postgres/health
|
| 376 |
+
```
|
| 377 |
+
|
| 378 |
+
Response:
|
| 379 |
+
```json
|
| 380 |
+
{
|
| 381 |
+
"status": "healthy",
|
| 382 |
+
"message": "PostgreSQL connection is working",
|
| 383 |
+
"timestamp": "2023-01-01T00:00:00"
|
| 384 |
+
}
|
| 385 |
+
```
|
| 386 |
+
|
| 387 |
+
## MongoDB Endpoints
|
| 388 |
+
|
| 389 |
+
### Session Endpoints
|
| 390 |
+
|
| 391 |
+
#### Create Session
|
| 392 |
+
```
|
| 393 |
+
POST /session
|
| 394 |
+
```
|
| 395 |
+
|
| 396 |
+
Request Body:
|
| 397 |
+
```json
|
| 398 |
+
{
|
| 399 |
+
"user_id": "user123",
|
| 400 |
+
"query": "How do I book a room?",
|
| 401 |
+
"timestamp": "2023-01-01T00:00:00",
|
| 402 |
+
"metadata": {
|
| 403 |
+
"client_info": "web",
|
| 404 |
+
"location": "Da Nang"
|
| 405 |
+
}
|
| 406 |
+
}
|
| 407 |
+
```
|
| 408 |
+
|
| 409 |
+
Response: Created Session object with session_id
|
| 410 |
+
|
| 411 |
+
#### Update Session with Response
|
| 412 |
+
```
|
| 413 |
+
PUT /session/{session_id}/response
|
| 414 |
+
```
|
| 415 |
+
|
| 416 |
+
Request Body:
|
| 417 |
+
```json
|
| 418 |
+
{
|
| 419 |
+
"response": "You can book a room through our app or website.",
|
| 420 |
+
"response_timestamp": "2023-01-01T00:00:05",
|
| 421 |
+
"metadata": {
|
| 422 |
+
"response_time_ms": 234,
|
| 423 |
+
"model_version": "gpt-4"
|
| 424 |
+
}
|
| 425 |
+
}
|
| 426 |
+
```
|
| 427 |
+
|
| 428 |
+
Response: Updated Session object
|
| 429 |
+
|
| 430 |
+
#### Get Session
|
| 431 |
+
```
|
| 432 |
+
GET /session/{session_id}
|
| 433 |
+
```
|
| 434 |
+
|
| 435 |
+
Response: Session object
|
| 436 |
+
|
| 437 |
+
#### Get User History
|
| 438 |
+
```
|
| 439 |
+
GET /history
|
| 440 |
+
```
|
| 441 |
+
|
| 442 |
+
Parameters:
|
| 443 |
+
- `user_id`: User ID (required)
|
| 444 |
+
- `limit`: Maximum sessions to return (default: 10)
|
| 445 |
+
- `skip`: Number of sessions to skip (default: 0)
|
| 446 |
+
|
| 447 |
+
Response:
|
| 448 |
+
```json
|
| 449 |
+
{
|
| 450 |
+
"user_id": "user123",
|
| 451 |
+
"sessions": [
|
| 452 |
+
{
|
| 453 |
+
"session_id": "60f7a8b9c1d2e3f4a5b6c7d8",
|
| 454 |
+
"query": "How do I book a room?",
|
| 455 |
+
"timestamp": "2023-01-01T00:00:00",
|
| 456 |
+
"response": "You can book a room through our app or website.",
|
| 457 |
+
"response_timestamp": "2023-01-01T00:00:05"
|
| 458 |
+
}
|
| 459 |
+
],
|
| 460 |
+
"total_count": 1
|
| 461 |
+
}
|
| 462 |
+
```
|
| 463 |
+
|
| 464 |
+
#### Health Check
|
| 465 |
+
```
|
| 466 |
+
GET /health
|
| 467 |
+
```
|
| 468 |
+
|
| 469 |
+
## RAG Endpoints
|
| 470 |
+
|
| 471 |
+
### Create Embedding
|
| 472 |
+
```
|
| 473 |
+
POST /embedding
|
| 474 |
+
```
|
| 475 |
+
|
| 476 |
+
Request Body:
|
| 477 |
+
```json
|
| 478 |
+
{
|
| 479 |
+
"text": "Text to embed"
|
| 480 |
+
}
|
| 481 |
+
```
|
| 482 |
+
|
| 483 |
+
Response:
|
| 484 |
+
```json
|
| 485 |
+
{
|
| 486 |
+
"embedding": [0.1, 0.2, 0.3, ...],
|
| 487 |
+
"dimensions": 1536
|
| 488 |
+
}
|
| 489 |
+
```
|
| 490 |
+
|
| 491 |
+
### Process Chat Request
|
| 492 |
+
```
|
| 493 |
+
POST /chat
|
| 494 |
+
```
|
| 495 |
+
|
| 496 |
+
Request Body:
|
| 497 |
+
```json
|
| 498 |
+
{
|
| 499 |
+
"query": "Can you tell me about Pixity?",
|
| 500 |
+
"chat_history": [
|
| 501 |
+
{"role": "user", "content": "Hello"},
|
| 502 |
+
{"role": "assistant", "content": "Hello! How can I help you?"}
|
| 503 |
+
]
|
| 504 |
+
}
|
| 505 |
+
```
|
| 506 |
+
|
| 507 |
+
Response:
|
| 508 |
+
```json
|
| 509 |
+
{
|
| 510 |
+
"answer": "Pixity is a platform...",
|
| 511 |
+
"sources": [
|
| 512 |
+
{
|
| 513 |
+
"document_id": "doc123",
|
| 514 |
+
"chunk_id": "chunk456",
|
| 515 |
+
"chunk_text": "Pixity was founded in...",
|
| 516 |
+
"relevance_score": 0.92
|
| 517 |
+
}
|
| 518 |
+
]
|
| 519 |
+
}
|
| 520 |
+
```
|
| 521 |
+
|
| 522 |
+
### Direct RAG Query
|
| 523 |
+
```
|
| 524 |
+
POST /rag
|
| 525 |
+
```
|
| 526 |
+
|
| 527 |
+
Request Body:
|
| 528 |
+
```json
|
| 529 |
+
{
|
| 530 |
+
"query": "Can you tell me about Pixity?",
|
| 531 |
+
"namespace": "about_pixity",
|
| 532 |
+
"top_k": 3
|
| 533 |
+
}
|
| 534 |
+
```
|
| 535 |
+
|
| 536 |
+
Response: Query results with relevance scores
|
| 537 |
+
|
| 538 |
+
### Health Check
|
| 539 |
+
```
|
| 540 |
+
GET /health
|
| 541 |
+
```
|
| 542 |
+
|
| 543 |
+
## PDF Processing Endpoints
|
| 544 |
+
|
| 545 |
+
### Upload and Process PDF
|
| 546 |
+
```
|
| 547 |
+
POST /pdf/upload
|
| 548 |
+
```
|
| 549 |
+
|
| 550 |
+
Form Data:
|
| 551 |
+
- `file`: PDF file (required)
|
| 552 |
+
- `namespace`: Vector database namespace (default: "Default")
|
| 553 |
+
- `index_name`: Vector database index name (default: "testbot768")
|
| 554 |
+
- `title`: Document title (optional)
|
| 555 |
+
- `description`: Document description (optional)
|
| 556 |
+
- `user_id`: User ID for WebSocket updates (optional)
|
| 557 |
+
|
| 558 |
+
Response: Processing results with document_id
|
| 559 |
+
|
| 560 |
+
### Delete Documents in Namespace
|
| 561 |
+
```
|
| 562 |
+
DELETE /pdf/namespace
|
| 563 |
+
```
|
| 564 |
+
|
| 565 |
+
Parameters:
|
| 566 |
+
- `namespace`: Vector database namespace (default: "Default")
|
| 567 |
+
- `index_name`: Vector database index name (default: "testbot768")
|
| 568 |
+
- `user_id`: User ID for WebSocket updates (optional)
|
| 569 |
+
|
| 570 |
+
Response: Deletion results
|
| 571 |
+
|
| 572 |
+
### Get Documents List
|
| 573 |
+
```
|
| 574 |
+
GET /pdf/documents
|
| 575 |
+
```
|
| 576 |
+
|
| 577 |
+
Parameters:
|
| 578 |
+
- `namespace`: Vector database namespace (default: "Default")
|
| 579 |
+
- `index_name`: Vector database index name (default: "testbot768")
|
| 580 |
+
|
| 581 |
+
Response: List of documents in the namespace
|
requirements.txt
CHANGED
|
@@ -40,4 +40,8 @@ watchfiles==0.21.0
|
|
| 40 |
|
| 41 |
# Core dependencies
|
| 42 |
starlette==0.27.0
|
| 43 |
-
psutil==5.9.6
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 40 |
|
| 41 |
# Core dependencies
|
| 42 |
starlette==0.27.0
|
| 43 |
+
psutil==5.9.6
|
| 44 |
+
|
| 45 |
+
# Upload PDF
|
| 46 |
+
pypdf==3.17.4
|
| 47 |
+
|