working with google auth
Browse files- .gitignore +2 -1
- README.md +1 -1
- agentic_implementation/README_MCP.md +0 -115
- agentic_implementation/agent.py +0 -141
- agentic_implementation/email_db.json +0 -14
- agentic_implementation/email_mcp_server.py +0 -245
- agentic_implementation/email_mcp_server_oauth.py +61 -3
- agentic_implementation/email_scraper.py +0 -443
- agentic_implementation/gradio_ag.py +0 -102
- agentic_implementation/name_mapping.json +0 -4
- agentic_implementation/re_act.py +0 -229
- agentic_implementation/requirements_oauth.txt +1 -1
- agentic_implementation/schemas.py +0 -47
- agentic_implementation/tools.py +0 -244
- app.py +0 -64
- client/main.py +0 -183
- requirements.txt +0 -11
- server/email_db.json +0 -135
- server/email_scraper.py +0 -267
- server/main.py +0 -29
- server/name_mapping.json +0 -3
- server/query_parser.py +0 -189
- server/routes.py +0 -206
.gitignore
CHANGED
@@ -9,4 +9,5 @@ __pycache__/
|
|
9 |
*.log
|
10 |
|
11 |
agentic_implementation/*.json
|
12 |
-
agentic_implementation/*.db
|
|
|
|
9 |
*.log
|
10 |
|
11 |
agentic_implementation/*.json
|
12 |
+
agentic_implementation/*.db
|
13 |
+
logs/
|
README.md
CHANGED
@@ -5,7 +5,7 @@ colorFrom: yellow
|
|
5 |
colorTo: purple
|
6 |
sdk: gradio
|
7 |
sdk_version: 5.0.1
|
8 |
-
app_file: agentic_implementation/
|
9 |
pinned: false
|
10 |
short_description: Answer any questions you have about the content of your mail
|
11 |
---
|
|
|
5 |
colorTo: purple
|
6 |
sdk: gradio
|
7 |
sdk_version: 5.0.1
|
8 |
+
app_file: agentic_implementation/email_mcp_server_oauth.py
|
9 |
pinned: false
|
10 |
short_description: Answer any questions you have about the content of your mail
|
11 |
---
|
agentic_implementation/README_MCP.md
DELETED
@@ -1,115 +0,0 @@
|
|
1 |
-
# Email Assistant MCP Server
|
2 |
-
|
3 |
-
This is a Gradio-based MCP (Model Context Protocol) server that allows Claude Desktop to interact with your Gmail emails.
|
4 |
-
|
5 |
-
## Features
|
6 |
-
|
7 |
-
- **Email Search**: Search your emails using natural language queries
|
8 |
-
- **Email Details**: Get full details of specific emails by message ID
|
9 |
-
- **Pattern Analysis**: Analyze email patterns from specific senders over time
|
10 |
-
|
11 |
-
## Setup
|
12 |
-
|
13 |
-
1. **Install Dependencies**:
|
14 |
-
```bash
|
15 |
-
pip install -r requirements_mcp.txt
|
16 |
-
```
|
17 |
-
|
18 |
-
2. **Set up Gmail App Password**:
|
19 |
-
- Enable 2-Factor Authentication on your Gmail account
|
20 |
-
- Generate an App Password: https://support.google.com/accounts/answer/185833
|
21 |
-
- Keep your Gmail address and app password ready
|
22 |
-
|
23 |
-
3. **Run the MCP Server**:
|
24 |
-
```bash
|
25 |
-
python email_mcp_server.py
|
26 |
-
```
|
27 |
-
|
28 |
-
The server will start and show you the MCP endpoint URL, typically:
|
29 |
-
```
|
30 |
-
http://localhost:7860/gradio_api/mcp/sse
|
31 |
-
```
|
32 |
-
|
33 |
-
## Claude Desktop Configuration
|
34 |
-
|
35 |
-
Add this configuration to your Claude Desktop MCP settings:
|
36 |
-
|
37 |
-
**For SSE-supported clients:**
|
38 |
-
```json
|
39 |
-
{
|
40 |
-
"mcpServers": {
|
41 |
-
"email-assistant": {
|
42 |
-
"url": "http://localhost:7860/gradio_api/mcp/sse"
|
43 |
-
}
|
44 |
-
}
|
45 |
-
}
|
46 |
-
```
|
47 |
-
|
48 |
-
**For Claude Desktop (requires mcp-remote):**
|
49 |
-
```json
|
50 |
-
{
|
51 |
-
"mcpServers": {
|
52 |
-
"email-assistant": {
|
53 |
-
"command": "npx",
|
54 |
-
"args": [
|
55 |
-
"mcp-remote",
|
56 |
-
"http://localhost:7860/gradio_api/mcp/sse"
|
57 |
-
]
|
58 |
-
}
|
59 |
-
}
|
60 |
-
}
|
61 |
-
```
|
62 |
-
|
63 |
-
## Available Tools
|
64 |
-
|
65 |
-
### 1. search_emails
|
66 |
-
Search your emails using natural language queries.
|
67 |
-
|
68 |
-
**Parameters:**
|
69 |
-
- `email_address`: Your Gmail address
|
70 |
-
- `app_password`: Your Gmail app password
|
71 |
-
- `query`: Natural language query (e.g., "show me emails from amazon last week")
|
72 |
-
|
73 |
-
**Example Usage in Claude:**
|
74 |
-
> "Can you search my emails for messages from Swiggy in the last week? My email is [email protected] and my app password is xxxx-xxxx-xxxx-xxxx"
|
75 |
-
|
76 |
-
### 2. get_email_details
|
77 |
-
Get full details of a specific email by message ID.
|
78 |
-
|
79 |
-
**Parameters:**
|
80 |
-
- `email_address`: Your Gmail address
|
81 |
-
- `app_password`: Your Gmail app password
|
82 |
-
- `message_id`: Message ID from search results
|
83 |
-
|
84 |
-
### 3. analyze_email_patterns
|
85 |
-
Analyze email patterns from a specific sender over time.
|
86 |
-
|
87 |
-
**Parameters:**
|
88 |
-
- `email_address`: Your Gmail address
|
89 |
-
- `app_password`: Your Gmail app password
|
90 |
-
- `sender_keyword`: Sender to analyze (e.g., "amazon", "google")
|
91 |
-
- `days_back`: Number of days to analyze (default: "30")
|
92 |
-
|
93 |
-
## Security Notes
|
94 |
-
|
95 |
-
- Your email credentials are only used for the duration of each tool call
|
96 |
-
- Credentials are not stored or logged by the server
|
97 |
-
- All communication happens locally on your machine
|
98 |
-
- The server only exposes the MCP interface, not a public web interface
|
99 |
-
|
100 |
-
## Troubleshooting
|
101 |
-
|
102 |
-
1. **Connection Issues**: Make sure your Gmail app password is correct and 2FA is enabled
|
103 |
-
2. **MCP Client Issues**: Try restarting Claude Desktop after configuration changes
|
104 |
-
3. **Search Issues**: The tool searches in FROM, SUBJECT, and BODY fields for keywords
|
105 |
-
|
106 |
-
## Example Queries
|
107 |
-
|
108 |
-
Once configured with Claude Desktop, you can ask:
|
109 |
-
|
110 |
-
- "Search my emails for messages from Amazon in the last month"
|
111 |
-
- "Show me emails from my bank from last week"
|
112 |
-
- "Analyze my LinkedIn email patterns over the last 60 days"
|
113 |
-
- "Find emails from Swiggy today"
|
114 |
-
|
115 |
-
Claude will automatically call the appropriate tools with your provided credentials.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
agentic_implementation/agent.py
DELETED
@@ -1,141 +0,0 @@
|
|
1 |
-
# agent.py
|
2 |
-
|
3 |
-
import json
|
4 |
-
from typing import Dict, Any
|
5 |
-
|
6 |
-
from re_act import (
|
7 |
-
get_plan_from_llm,
|
8 |
-
think,
|
9 |
-
act,
|
10 |
-
store_name_email_mapping,
|
11 |
-
extract_sender_info,
|
12 |
-
client
|
13 |
-
)
|
14 |
-
from schemas import PlanStep
|
15 |
-
from logger import logger # from logger.py
|
16 |
-
|
17 |
-
|
18 |
-
def run_agent():
|
19 |
-
"""
|
20 |
-
Main REPL loop for the email agent.
|
21 |
-
"""
|
22 |
-
logger.info("Starting Email Agent REPL...")
|
23 |
-
print("🤖 Email Agent ready. Type 'exit' to quit.\n")
|
24 |
-
|
25 |
-
while True:
|
26 |
-
try:
|
27 |
-
user_query = input("🗨 You: ").strip()
|
28 |
-
logger.info("Received user input: %s", user_query)
|
29 |
-
|
30 |
-
if user_query.lower() in ("exit", "quit"):
|
31 |
-
logger.info("Exit command detected, shutting down agent.")
|
32 |
-
print("👋 Goodbye!")
|
33 |
-
break
|
34 |
-
|
35 |
-
# 1) Generate plan
|
36 |
-
try:
|
37 |
-
plan = get_plan_from_llm(user_query)
|
38 |
-
logger.debug("Generated plan: %s", plan)
|
39 |
-
except Exception as e:
|
40 |
-
logger.error("Failed to generate plan: %s", e)
|
41 |
-
print(f"❌ Could not generate a plan: {e}")
|
42 |
-
continue
|
43 |
-
|
44 |
-
# print plan for user transparency
|
45 |
-
print("\n\nplan:")
|
46 |
-
print(plan)
|
47 |
-
print("\n\n")
|
48 |
-
|
49 |
-
results: Dict[str, Any] = {}
|
50 |
-
|
51 |
-
# 2) Execute each plan step
|
52 |
-
for step in plan.plan:
|
53 |
-
logger.info("Processing step: %s", step.action)
|
54 |
-
|
55 |
-
if step.action == "done":
|
56 |
-
logger.info("Encountered 'done' action. Plan complete.")
|
57 |
-
print("✅ Plan complete.")
|
58 |
-
break
|
59 |
-
|
60 |
-
try:
|
61 |
-
should_run, updated_step, user_prompt = think(step, results, user_query)
|
62 |
-
logger.debug(
|
63 |
-
"Think outcome for '%s': should_run=%s, updated_step=%s, user_prompt=%s",
|
64 |
-
step.action, should_run, updated_step, user_prompt
|
65 |
-
)
|
66 |
-
except Exception as e:
|
67 |
-
logger.error("Error in think() for step '%s': %s", step.action, e)
|
68 |
-
print(f"❌ Error in planning step '{step.action}': {e}")
|
69 |
-
break
|
70 |
-
|
71 |
-
# Handle user prompt (e.g., missing email mapping)
|
72 |
-
if user_prompt:
|
73 |
-
logger.info("User prompt required: %s", user_prompt)
|
74 |
-
print(f"❓ {user_prompt}")
|
75 |
-
user_input = input("📧 Email: ").strip()
|
76 |
-
|
77 |
-
try:
|
78 |
-
sender_info = extract_sender_info(user_query)
|
79 |
-
sender_intent = sender_info.get("sender_intent", "")
|
80 |
-
store_name_email_mapping(sender_intent, user_input)
|
81 |
-
logger.info("Stored mapping: %s -> %s", sender_intent, user_input)
|
82 |
-
print(f"✅ Stored mapping: {sender_intent} → {user_input}")
|
83 |
-
|
84 |
-
# Retry current step
|
85 |
-
should_run, updated_step, _ = think(step, results, user_query)
|
86 |
-
logger.debug(
|
87 |
-
"Post-mapping think outcome: should_run=%s, updated_step=%s",
|
88 |
-
should_run, updated_step
|
89 |
-
)
|
90 |
-
except Exception as e:
|
91 |
-
logger.error("Error storing mapping or retrying step '%s': %s", step.action, e)
|
92 |
-
print(f"❌ Error storing mapping or retrying step: {e}")
|
93 |
-
break
|
94 |
-
|
95 |
-
if not should_run:
|
96 |
-
logger.info("Skipping step: %s", step.action)
|
97 |
-
print(f"⏭️ Skipping `{step.action}`")
|
98 |
-
continue
|
99 |
-
|
100 |
-
# Execute the action
|
101 |
-
try:
|
102 |
-
output = act(updated_step)
|
103 |
-
results[updated_step.action] = output
|
104 |
-
logger.info("Action '%s' executed successfully.", updated_step.action)
|
105 |
-
print(f"🔧 Ran `{updated_step.action}`")
|
106 |
-
except Exception as e:
|
107 |
-
logger.error("Error executing action '%s': %s", updated_step.action, e)
|
108 |
-
print(f"❌ Error running `{updated_step.action}`: {e}")
|
109 |
-
break
|
110 |
-
|
111 |
-
# 3) Summarize results via LLM
|
112 |
-
try:
|
113 |
-
summary_rsp = client.chat.completions.create(
|
114 |
-
model="gpt-4o-mini",
|
115 |
-
temperature=0.0,
|
116 |
-
messages=[
|
117 |
-
{"role": "system", "content": "Summarize these results for the user in a friendly way."},
|
118 |
-
{"role": "assistant", "content": json.dumps(results)}
|
119 |
-
],
|
120 |
-
)
|
121 |
-
summary = summary_rsp.choices[0].message.content
|
122 |
-
logger.info("Summary generated successfully.")
|
123 |
-
print("\n📋 Summary:\n" + summary)
|
124 |
-
except Exception as e:
|
125 |
-
logger.error("Failed to generate summary: %s", e)
|
126 |
-
print("\n❌ Failed to generate summary.")
|
127 |
-
|
128 |
-
print("\nAnything else I can help you with?\n")
|
129 |
-
|
130 |
-
except KeyboardInterrupt:
|
131 |
-
logger.info("KeyboardInterrupt received, shutting down.")
|
132 |
-
print("\n👋 Goodbye!")
|
133 |
-
break
|
134 |
-
except Exception as e:
|
135 |
-
logger.exception("Unexpected error in REPL loop: %s", e)
|
136 |
-
print(f"❌ Unexpected error: {e}")
|
137 |
-
continue
|
138 |
-
|
139 |
-
|
140 |
-
if __name__ == "__main__":
|
141 |
-
run_agent()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
agentic_implementation/email_db.json
DELETED
@@ -1,14 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"[email protected]": {
|
3 |
-
"emails": [
|
4 |
-
{
|
5 |
-
"date": "07-Jun-2025",
|
6 |
-
"time": "16:42:51",
|
7 |
-
"subject": "testing",
|
8 |
-
"content": "hi bro",
|
9 |
-
"message_id": "<CAPziNCaSuVqpqNNfsRjhVbx22jN_vos3EGK_Odt-8WiD0HRKKQ@mail.gmail.com>"
|
10 |
-
}
|
11 |
-
],
|
12 |
-
"last_scraped": "08-Jun-2025"
|
13 |
-
}
|
14 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
agentic_implementation/email_mcp_server.py
DELETED
@@ -1,245 +0,0 @@
|
|
1 |
-
import gradio as gr
|
2 |
-
import json
|
3 |
-
import os
|
4 |
-
from typing import Dict, List
|
5 |
-
from datetime import datetime, timedelta
|
6 |
-
from dotenv import load_dotenv
|
7 |
-
|
8 |
-
# Import your existing modules
|
9 |
-
from tools import extract_query_info, analyze_emails
|
10 |
-
from email_scraper import scrape_emails_by_text_search_with_credentials, _load_email_db
|
11 |
-
from logger import logger
|
12 |
-
|
13 |
-
load_dotenv()
|
14 |
-
|
15 |
-
def search_emails(email_address: str, app_password: str, query: str) -> str:
|
16 |
-
"""
|
17 |
-
Search for emails based on a natural language query and return a summary.
|
18 |
-
|
19 |
-
Args:
|
20 |
-
email_address (str): The Gmail address to connect to
|
21 |
-
app_password (str): The Gmail app password for authentication
|
22 |
-
query (str): Natural language query (e.g., "show me mails from swiggy last week")
|
23 |
-
|
24 |
-
Returns:
|
25 |
-
str: JSON string containing email search results and analysis
|
26 |
-
"""
|
27 |
-
try:
|
28 |
-
logger.info("Email MCP tool called with query: %s", query)
|
29 |
-
|
30 |
-
# Extract sender keyword and date range from query
|
31 |
-
query_info = extract_query_info(query)
|
32 |
-
sender_keyword = query_info.get("sender_keyword", "")
|
33 |
-
start_date = query_info.get("start_date")
|
34 |
-
end_date = query_info.get("end_date")
|
35 |
-
|
36 |
-
print(f"Searching for emails with keyword '{sender_keyword}' between {start_date} and {end_date}")
|
37 |
-
|
38 |
-
# Use the modified scraper function that accepts credentials
|
39 |
-
full_emails = scrape_emails_by_text_search_with_credentials(
|
40 |
-
email_address, app_password, sender_keyword, start_date, end_date
|
41 |
-
)
|
42 |
-
|
43 |
-
if not full_emails:
|
44 |
-
result = {
|
45 |
-
"query_info": query_info,
|
46 |
-
"email_summary": [],
|
47 |
-
"analysis": {"summary": f"No emails found for '{sender_keyword}' in the specified date range.", "insights": []},
|
48 |
-
"email_count": 0
|
49 |
-
}
|
50 |
-
return json.dumps(result, indent=2)
|
51 |
-
|
52 |
-
# Create summary version without full content
|
53 |
-
email_summary = []
|
54 |
-
for email in full_emails:
|
55 |
-
summary_email = {
|
56 |
-
"date": email.get("date"),
|
57 |
-
"time": email.get("time"),
|
58 |
-
"subject": email.get("subject"),
|
59 |
-
"from": email.get("from", "Unknown Sender"),
|
60 |
-
"message_id": email.get("message_id")
|
61 |
-
}
|
62 |
-
email_summary.append(summary_email)
|
63 |
-
|
64 |
-
# Auto-analyze the emails for insights
|
65 |
-
analysis = analyze_emails(full_emails)
|
66 |
-
|
67 |
-
# Return summary info with analysis
|
68 |
-
result = {
|
69 |
-
"query_info": query_info,
|
70 |
-
"email_summary": email_summary,
|
71 |
-
"analysis": analysis,
|
72 |
-
"email_count": len(full_emails)
|
73 |
-
}
|
74 |
-
|
75 |
-
return json.dumps(result, indent=2)
|
76 |
-
|
77 |
-
except Exception as e:
|
78 |
-
logger.error("Error in search_emails: %s", e)
|
79 |
-
error_result = {
|
80 |
-
"error": str(e),
|
81 |
-
"query": query,
|
82 |
-
"message": "Failed to search emails. Please check your credentials and try again."
|
83 |
-
}
|
84 |
-
return json.dumps(error_result, indent=2)
|
85 |
-
|
86 |
-
|
87 |
-
def get_email_details(email_address: str, app_password: str, message_id: str) -> str:
|
88 |
-
"""
|
89 |
-
Get full details of a specific email by its message ID.
|
90 |
-
|
91 |
-
Args:
|
92 |
-
email_address (str): The Gmail address to connect to
|
93 |
-
app_password (str): The Gmail app password for authentication
|
94 |
-
message_id (str): The message ID of the email to retrieve
|
95 |
-
|
96 |
-
Returns:
|
97 |
-
str: JSON string containing the full email details
|
98 |
-
"""
|
99 |
-
try:
|
100 |
-
logger.info("Getting email details for message_id: %s", message_id)
|
101 |
-
|
102 |
-
# Load from local cache first
|
103 |
-
db = _load_email_db()
|
104 |
-
|
105 |
-
# Search each sender's email list
|
106 |
-
for sender_data in db.values():
|
107 |
-
for email in sender_data.get("emails", []):
|
108 |
-
if email.get("message_id") == message_id:
|
109 |
-
return json.dumps(email, indent=2)
|
110 |
-
|
111 |
-
# If not found in cache
|
112 |
-
error_result = {
|
113 |
-
"error": f"No email found with message_id '{message_id}'",
|
114 |
-
"message": "Email may not be in local cache. Try searching for emails first."
|
115 |
-
}
|
116 |
-
return json.dumps(error_result, indent=2)
|
117 |
-
|
118 |
-
except Exception as e:
|
119 |
-
logger.error("Error in get_email_details: %s", e)
|
120 |
-
error_result = {
|
121 |
-
"error": str(e),
|
122 |
-
"message_id": message_id,
|
123 |
-
"message": "Failed to retrieve email details."
|
124 |
-
}
|
125 |
-
return json.dumps(error_result, indent=2)
|
126 |
-
|
127 |
-
|
128 |
-
def analyze_email_patterns(email_address: str, app_password: str, sender_keyword: str, days_back: str = "30") -> str:
|
129 |
-
"""
|
130 |
-
Analyze email patterns from a specific sender over a given time period.
|
131 |
-
|
132 |
-
Args:
|
133 |
-
email_address (str): The Gmail address to connect to
|
134 |
-
app_password (str): The Gmail app password for authentication
|
135 |
-
sender_keyword (str): The sender/company keyword to analyze (e.g., "amazon", "google")
|
136 |
-
days_back (str): Number of days to look back (default: "30")
|
137 |
-
|
138 |
-
Returns:
|
139 |
-
str: JSON string containing email pattern analysis
|
140 |
-
"""
|
141 |
-
try:
|
142 |
-
logger.info("Analyzing email patterns for sender: %s, days_back: %s", sender_keyword, days_back)
|
143 |
-
|
144 |
-
# Calculate date range
|
145 |
-
days_int = int(days_back)
|
146 |
-
end_date = datetime.today()
|
147 |
-
start_date = end_date - timedelta(days=days_int)
|
148 |
-
|
149 |
-
start_date_str = start_date.strftime("%d-%b-%Y")
|
150 |
-
end_date_str = end_date.strftime("%d-%b-%Y")
|
151 |
-
|
152 |
-
# Search for emails
|
153 |
-
full_emails = scrape_emails_by_text_search_with_credentials(
|
154 |
-
email_address, app_password, sender_keyword, start_date_str, end_date_str
|
155 |
-
)
|
156 |
-
|
157 |
-
if not full_emails:
|
158 |
-
result = {
|
159 |
-
"sender_keyword": sender_keyword,
|
160 |
-
"date_range": f"{start_date_str} to {end_date_str}",
|
161 |
-
"analysis": {"summary": f"No emails found from '{sender_keyword}' in the last {days_back} days.", "insights": []},
|
162 |
-
"email_count": 0
|
163 |
-
}
|
164 |
-
return json.dumps(result, indent=2)
|
165 |
-
|
166 |
-
# Analyze the emails
|
167 |
-
analysis = analyze_emails(full_emails)
|
168 |
-
|
169 |
-
result = {
|
170 |
-
"sender_keyword": sender_keyword,
|
171 |
-
"date_range": f"{start_date_str} to {end_date_str}",
|
172 |
-
"analysis": analysis,
|
173 |
-
"email_count": len(full_emails)
|
174 |
-
}
|
175 |
-
|
176 |
-
return json.dumps(result, indent=2)
|
177 |
-
|
178 |
-
except Exception as e:
|
179 |
-
logger.error("Error in analyze_email_patterns: %s", e)
|
180 |
-
error_result = {
|
181 |
-
"error": str(e),
|
182 |
-
"sender_keyword": sender_keyword,
|
183 |
-
"message": "Failed to analyze email patterns."
|
184 |
-
}
|
185 |
-
return json.dumps(error_result, indent=2)
|
186 |
-
|
187 |
-
|
188 |
-
# Create the Gradio interface for email search
|
189 |
-
search_interface = gr.Interface(
|
190 |
-
fn=search_emails,
|
191 |
-
inputs=[
|
192 |
-
gr.Textbox(label="Email Address", placeholder="[email protected]"),
|
193 |
-
gr.Textbox(label="App Password", type="password", placeholder="Your Gmail app password"),
|
194 |
-
gr.Textbox(label="Query", placeholder="Show me emails from amazon last week")
|
195 |
-
],
|
196 |
-
outputs=gr.Textbox(label="Search Results", lines=20),
|
197 |
-
title="Email Search",
|
198 |
-
description="Search your emails using natural language queries"
|
199 |
-
)
|
200 |
-
|
201 |
-
# Create the Gradio interface for email details
|
202 |
-
details_interface = gr.Interface(
|
203 |
-
fn=get_email_details,
|
204 |
-
inputs=[
|
205 |
-
gr.Textbox(label="Email Address", placeholder="[email protected]"),
|
206 |
-
gr.Textbox(label="App Password", type="password", placeholder="Your Gmail app password"),
|
207 |
-
gr.Textbox(label="Message ID", placeholder="Email message ID from search results")
|
208 |
-
],
|
209 |
-
outputs=gr.Textbox(label="Email Details", lines=20),
|
210 |
-
title="Email Details",
|
211 |
-
description="Get full details of a specific email by message ID"
|
212 |
-
)
|
213 |
-
|
214 |
-
# Create the Gradio interface for email pattern analysis
|
215 |
-
analysis_interface = gr.Interface(
|
216 |
-
fn=analyze_email_patterns,
|
217 |
-
inputs=[
|
218 |
-
gr.Textbox(label="Email Address", placeholder="[email protected]"),
|
219 |
-
gr.Textbox(label="App Password", type="password", placeholder="Your Gmail app password"),
|
220 |
-
gr.Textbox(label="Sender Keyword", placeholder="amazon, google, linkedin, etc."),
|
221 |
-
gr.Textbox(label="Days Back", value="30", placeholder="Number of days to analyze")
|
222 |
-
],
|
223 |
-
outputs=gr.Textbox(label="Analysis Results", lines=20),
|
224 |
-
title="Email Pattern Analysis",
|
225 |
-
description="Analyze email patterns from a specific sender over time"
|
226 |
-
)
|
227 |
-
|
228 |
-
# Combine interfaces into a tabbed interface
|
229 |
-
demo = gr.TabbedInterface(
|
230 |
-
[search_interface, details_interface, analysis_interface],
|
231 |
-
["Email Search", "Email Details", "Pattern Analysis"],
|
232 |
-
title="📧 Email Assistant MCP Server"
|
233 |
-
)
|
234 |
-
|
235 |
-
if __name__ == "__main__":
|
236 |
-
# Set environment variable to enable MCP server
|
237 |
-
import os
|
238 |
-
os.environ["GRADIO_MCP_SERVER"] = "True"
|
239 |
-
|
240 |
-
# Launch the server
|
241 |
-
demo.launch(share=False)
|
242 |
-
|
243 |
-
print("\n🚀 MCP Server is running!")
|
244 |
-
print("📍 MCP Endpoint: http://localhost:7860/gradio_api/mcp/sse")
|
245 |
-
print("📖 Copy this URL to your Claude Desktop MCP configuration")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
agentic_implementation/email_mcp_server_oauth.py
CHANGED
@@ -5,13 +5,16 @@ Gmail MCP Server with OAuth Authentication and Multi-Account Support
|
|
5 |
|
6 |
import gradio as gr
|
7 |
import json
|
|
|
|
|
|
|
8 |
import os
|
9 |
from typing import Dict, List
|
10 |
from datetime import datetime, timedelta
|
11 |
from dotenv import load_dotenv
|
12 |
|
13 |
# Import OAuth-enabled modules
|
14 |
-
from tools import extract_query_info, analyze_emails
|
15 |
from gmail_api_scraper import GmailAPIScraper
|
16 |
from oauth_manager import oauth_manager
|
17 |
from logger import logger
|
@@ -517,6 +520,49 @@ def get_authentication_status() -> str:
|
|
517 |
"error": str(e),
|
518 |
"message": "Failed to check authentication status"
|
519 |
}, indent=2)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
520 |
|
521 |
# Create Gradio interfaces
|
522 |
search_interface = gr.Interface(
|
@@ -596,10 +642,22 @@ remove_interface = gr.Interface(
|
|
596 |
description="Remove an authenticated Gmail account and its credentials"
|
597 |
)
|
598 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
599 |
# Combine interfaces into a tabbed interface
|
600 |
demo = gr.TabbedInterface(
|
601 |
-
[auth_interface, status_interface, accounts_interface, switch_interface, remove_interface, search_interface, details_interface, analysis_interface],
|
602 |
-
["🔐 Authenticate", "📊 Status", "👥 All Accounts", "🔄 Switch Account", "🗑️ Remove Account", "📧 Email Search", "📄 Email Details", "📈 Pattern Analysis"],
|
603 |
title="📧 Gmail Assistant MCP Server (Multi-Account OAuth)"
|
604 |
)
|
605 |
|
|
|
5 |
|
6 |
import gradio as gr
|
7 |
import json
|
8 |
+
import base64
|
9 |
+
from email.mime.text import MIMEText
|
10 |
+
from googleapiclient.errors import HttpError
|
11 |
import os
|
12 |
from typing import Dict, List
|
13 |
from datetime import datetime, timedelta
|
14 |
from dotenv import load_dotenv
|
15 |
|
16 |
# Import OAuth-enabled modules
|
17 |
+
# from tools import extract_query_info, analyze_emails
|
18 |
from gmail_api_scraper import GmailAPIScraper
|
19 |
from oauth_manager import oauth_manager
|
20 |
from logger import logger
|
|
|
520 |
"error": str(e),
|
521 |
"message": "Failed to check authentication status"
|
522 |
}, indent=2)
|
523 |
+
|
524 |
+
def send_email(recipient: str, subject: str, body: str) -> str:
|
525 |
+
"""
|
526 |
+
Send a plain-text email via the authenticated Gmail account.
|
527 |
+
Returns JSON with either:
|
528 |
+
{"success": true, "message_id": "..."}
|
529 |
+
or
|
530 |
+
{"success": false, "error": "..."}
|
531 |
+
"""
|
532 |
+
# Use the correct method on your OAuth manager:
|
533 |
+
service = oauth_manager.get_gmail_service()
|
534 |
+
if service is None:
|
535 |
+
return json.dumps(
|
536 |
+
{"success": False, "error": "Not authenticated or failed to build service."},
|
537 |
+
indent=2,
|
538 |
+
)
|
539 |
+
|
540 |
+
# Build the MIME message
|
541 |
+
mime_msg = MIMEText(body, "plain", "utf-8")
|
542 |
+
mime_msg["to"] = recipient
|
543 |
+
mime_msg["subject"] = subject
|
544 |
+
|
545 |
+
# Base64-encode and send
|
546 |
+
raw_msg = base64.urlsafe_b64encode(mime_msg.as_bytes()).decode()
|
547 |
+
try:
|
548 |
+
sent = (
|
549 |
+
service.users()
|
550 |
+
.messages()
|
551 |
+
.send(userId="me", body={"raw": raw_msg})
|
552 |
+
.execute()
|
553 |
+
)
|
554 |
+
return json.dumps(
|
555 |
+
{"success": True, "message_id": sent.get("id")}, indent=2
|
556 |
+
)
|
557 |
+
except HttpError as err:
|
558 |
+
logger.error(f"Error sending email: {err}")
|
559 |
+
# err.error_details may be None; fallback to string
|
560 |
+
error_detail = getattr(err, "error_details", None) or str(err)
|
561 |
+
return json.dumps(
|
562 |
+
{"success": False, "error": error_detail},
|
563 |
+
indent=2,
|
564 |
+
)
|
565 |
+
|
566 |
|
567 |
# Create Gradio interfaces
|
568 |
search_interface = gr.Interface(
|
|
|
642 |
description="Remove an authenticated Gmail account and its credentials"
|
643 |
)
|
644 |
|
645 |
+
send_interface = gr.Interface(
|
646 |
+
fn=send_email,
|
647 |
+
inputs=[
|
648 |
+
gr.Textbox(label="Recipient Email", placeholder="[email protected]"),
|
649 |
+
gr.Textbox(label="Subject", placeholder="Email subject"),
|
650 |
+
gr.Textbox(label="Body", placeholder="Email body text", lines=5)
|
651 |
+
],
|
652 |
+
outputs=gr.Textbox(label="Send Result", lines=10),
|
653 |
+
title="✉️ Send Email",
|
654 |
+
description="Send an email via Gmail using OAuth authenticated account"
|
655 |
+
)
|
656 |
+
|
657 |
# Combine interfaces into a tabbed interface
|
658 |
demo = gr.TabbedInterface(
|
659 |
+
[auth_interface, status_interface, accounts_interface, switch_interface, remove_interface, search_interface, details_interface, analysis_interface, send_interface],
|
660 |
+
["🔐 Authenticate", "📊 Status", "👥 All Accounts", "🔄 Switch Account", "🗑️ Remove Account", "📧 Email Search", "📄 Email Details", "📈 Pattern Analysis", "✉️ Send Email"],
|
661 |
title="📧 Gmail Assistant MCP Server (Multi-Account OAuth)"
|
662 |
)
|
663 |
|
agentic_implementation/email_scraper.py
DELETED
@@ -1,443 +0,0 @@
|
|
1 |
-
#!/usr/bin/env python3
|
2 |
-
"""
|
3 |
-
Enhanced Email Scraper with Intelligent Caching
|
4 |
-
"""
|
5 |
-
|
6 |
-
import os
|
7 |
-
import imaplib
|
8 |
-
import json
|
9 |
-
from email import message_from_bytes
|
10 |
-
from bs4 import BeautifulSoup
|
11 |
-
from datetime import datetime, timedelta
|
12 |
-
from dotenv import load_dotenv
|
13 |
-
from zoneinfo import ZoneInfo
|
14 |
-
from email.utils import parsedate_to_datetime
|
15 |
-
from typing import List, Dict
|
16 |
-
from logger import logger
|
17 |
-
load_dotenv()
|
18 |
-
|
19 |
-
# Email credentials
|
20 |
-
APP_PASSWORD = os.getenv("APP_PASSWORD")
|
21 |
-
EMAIL_ID = os.getenv("EMAIL_ID")
|
22 |
-
print("EMAIL_ID: ", EMAIL_ID)
|
23 |
-
EMAIL_DB_FILE = "email_db.json"
|
24 |
-
|
25 |
-
def validate_email_setup():
|
26 |
-
"""Validate email setup and credentials"""
|
27 |
-
print("=== Email Setup Validation ===")
|
28 |
-
|
29 |
-
issues = []
|
30 |
-
|
31 |
-
if not os.getenv("OPENAI_API_KEY"):
|
32 |
-
issues.append("OPENAI_API_KEY not set (needed for query processing)")
|
33 |
-
|
34 |
-
if issues:
|
35 |
-
print("❌ Issues found:")
|
36 |
-
for issue in issues:
|
37 |
-
print(f" - {issue}")
|
38 |
-
return False
|
39 |
-
else:
|
40 |
-
print("✅ All credentials look good!")
|
41 |
-
return True
|
42 |
-
|
43 |
-
def _imap_connect():
|
44 |
-
"""Connect to Gmail IMAP server"""
|
45 |
-
print("=== IMAP Connection Debug ===")
|
46 |
-
|
47 |
-
# Check if environment variables are loaded
|
48 |
-
print(f"EMAIL_ID loaded: {'✅ Yes' if EMAIL_ID else '❌ No (None/Empty)'}")
|
49 |
-
print(f"APP_PASSWORD loaded: {'✅ Yes' if APP_PASSWORD else '❌ No (None/Empty)'}")
|
50 |
-
|
51 |
-
if EMAIL_ID:
|
52 |
-
print(f"Email ID: {EMAIL_ID[:5]}...@{EMAIL_ID.split('@')[1] if '@' in EMAIL_ID else 'INVALID'}")
|
53 |
-
|
54 |
-
if not EMAIL_ID or not APP_PASSWORD:
|
55 |
-
error_msg = "Missing credentials in environment variables!"
|
56 |
-
print(f"❌ {error_msg}")
|
57 |
-
raise Exception(error_msg)
|
58 |
-
|
59 |
-
try:
|
60 |
-
print("🔄 Attempting IMAP SSL connection to imap.gmail.com:993...")
|
61 |
-
mail = imaplib.IMAP4_SSL("imap.gmail.com")
|
62 |
-
print("✅ SSL connection established")
|
63 |
-
|
64 |
-
print("🔄 Attempting login...")
|
65 |
-
result = mail.login(EMAIL_ID, APP_PASSWORD)
|
66 |
-
print(f"✅ Login successful: {result}")
|
67 |
-
|
68 |
-
print("🔄 Selecting mailbox: [Gmail]/All Mail...")
|
69 |
-
result = mail.select('"[Gmail]/All Mail"')
|
70 |
-
print(f"✅ Mailbox selected: {result}")
|
71 |
-
|
72 |
-
print("=== IMAP Connection Successful ===")
|
73 |
-
return mail
|
74 |
-
|
75 |
-
except imaplib.IMAP4.error as e:
|
76 |
-
print(f"❌ IMAP Error: {e}")
|
77 |
-
print("💡 Possible causes:")
|
78 |
-
print(" - App Password is incorrect or expired")
|
79 |
-
print(" - 2FA not enabled on Gmail account")
|
80 |
-
print(" - IMAP access not enabled in Gmail settings")
|
81 |
-
print(" - Gmail account locked or requires security verification")
|
82 |
-
raise
|
83 |
-
except Exception as e:
|
84 |
-
print(f"❌ Connection Error: {e}")
|
85 |
-
print("💡 Possible causes:")
|
86 |
-
print(" - Network connectivity issues")
|
87 |
-
print(" - Gmail IMAP server temporarily unavailable")
|
88 |
-
print(" - Firewall blocking IMAP port 993")
|
89 |
-
raise
|
90 |
-
|
91 |
-
def _email_to_clean_text(msg):
|
92 |
-
"""Extract clean text from email message"""
|
93 |
-
# Try HTML first
|
94 |
-
html_content = None
|
95 |
-
text_content = None
|
96 |
-
|
97 |
-
if msg.is_multipart():
|
98 |
-
for part in msg.walk():
|
99 |
-
content_type = part.get_content_type()
|
100 |
-
if content_type == "text/html":
|
101 |
-
try:
|
102 |
-
html_content = part.get_payload(decode=True).decode(errors="ignore")
|
103 |
-
except:
|
104 |
-
continue
|
105 |
-
elif content_type == "text/plain":
|
106 |
-
try:
|
107 |
-
text_content = part.get_payload(decode=True).decode(errors="ignore")
|
108 |
-
except:
|
109 |
-
continue
|
110 |
-
else:
|
111 |
-
# Non-multipart message
|
112 |
-
content_type = msg.get_content_type()
|
113 |
-
try:
|
114 |
-
content = msg.get_payload(decode=True).decode(errors="ignore")
|
115 |
-
if content_type == "text/html":
|
116 |
-
html_content = content
|
117 |
-
else:
|
118 |
-
text_content = content
|
119 |
-
except:
|
120 |
-
pass
|
121 |
-
|
122 |
-
# Clean HTML content
|
123 |
-
if html_content:
|
124 |
-
soup = BeautifulSoup(html_content, "html.parser")
|
125 |
-
# Remove script and style elements
|
126 |
-
for script in soup(["script", "style"]):
|
127 |
-
script.decompose()
|
128 |
-
return soup.get_text(separator=' ', strip=True)
|
129 |
-
elif text_content:
|
130 |
-
return text_content.strip()
|
131 |
-
else:
|
132 |
-
return ""
|
133 |
-
|
134 |
-
def _load_email_db() -> Dict:
|
135 |
-
"""Load email database from file"""
|
136 |
-
if not os.path.exists(EMAIL_DB_FILE):
|
137 |
-
return {}
|
138 |
-
try:
|
139 |
-
with open(EMAIL_DB_FILE, "r") as f:
|
140 |
-
return json.load(f)
|
141 |
-
except (json.JSONDecodeError, IOError):
|
142 |
-
print(f"Warning: Could not load {EMAIL_DB_FILE}, starting with empty database")
|
143 |
-
return {}
|
144 |
-
|
145 |
-
def _save_email_db(db: Dict):
|
146 |
-
"""Save email database to file"""
|
147 |
-
try:
|
148 |
-
with open(EMAIL_DB_FILE, "w") as f:
|
149 |
-
json.dump(db, f, indent=2)
|
150 |
-
except IOError as e:
|
151 |
-
print(f"Error saving database: {e}")
|
152 |
-
raise
|
153 |
-
|
154 |
-
def _date_to_imap_format(date_str: str) -> str:
|
155 |
-
"""Convert DD-MMM-YYYY to IMAP date format"""
|
156 |
-
try:
|
157 |
-
dt = datetime.strptime(date_str, "%d-%b-%Y")
|
158 |
-
return dt.strftime("%d-%b-%Y")
|
159 |
-
except ValueError:
|
160 |
-
raise ValueError(f"Invalid date format: {date_str}. Expected DD-MMM-YYYY")
|
161 |
-
|
162 |
-
def _is_date_in_range(email_date: str, start_date: str, end_date: str) -> bool:
|
163 |
-
"""Check if email date is within the specified range"""
|
164 |
-
try:
|
165 |
-
email_dt = datetime.strptime(email_date, "%d-%b-%Y")
|
166 |
-
start_dt = datetime.strptime(start_date, "%d-%b-%Y")
|
167 |
-
end_dt = datetime.strptime(end_date, "%d-%b-%Y")
|
168 |
-
return start_dt <= email_dt <= end_dt
|
169 |
-
except ValueError:
|
170 |
-
return False
|
171 |
-
|
172 |
-
|
173 |
-
def scrape_emails_by_text_search(keyword: str, start_date: str, end_date: str) -> List[Dict]:
|
174 |
-
"""
|
175 |
-
Scrape emails containing a specific keyword (like company name) within date range.
|
176 |
-
Uses IMAP text search to find emails from senders containing the keyword.
|
177 |
-
"""
|
178 |
-
print(f"Searching emails containing '{keyword}' between {start_date} and {end_date}")
|
179 |
-
|
180 |
-
# Validate setup first
|
181 |
-
if not validate_email_setup():
|
182 |
-
raise Exception("Email setup validation failed. Please check your .env file and credentials.")
|
183 |
-
|
184 |
-
try:
|
185 |
-
mail = _imap_connect()
|
186 |
-
|
187 |
-
# Prepare IMAP search criteria with text search
|
188 |
-
start_imap = _date_to_imap_format(start_date)
|
189 |
-
# Add one day to end_date for BEFORE criteria (IMAP BEFORE is exclusive)
|
190 |
-
end_dt = datetime.strptime(end_date, "%d-%b-%Y") + timedelta(days=1)
|
191 |
-
end_imap = end_dt.strftime("%d-%b-%Y")
|
192 |
-
|
193 |
-
# Search for emails containing the keyword in FROM field or SUBJECT or BODY
|
194 |
-
# We'll search multiple criteria and combine results
|
195 |
-
search_criteria_list = [
|
196 |
-
f'FROM "{keyword}" SINCE "{start_imap}" BEFORE "{end_imap}"',
|
197 |
-
f'SUBJECT "{keyword}" SINCE "{start_imap}" BEFORE "{end_imap}"',
|
198 |
-
f'BODY "{keyword}" SINCE "{start_imap}" BEFORE "{end_imap}"'
|
199 |
-
]
|
200 |
-
|
201 |
-
all_email_ids = set()
|
202 |
-
|
203 |
-
# Search with multiple criteria to catch emails containing the keyword
|
204 |
-
for search_criteria in search_criteria_list:
|
205 |
-
try:
|
206 |
-
print(f"IMAP search: {search_criteria}")
|
207 |
-
status, data = mail.search(None, search_criteria)
|
208 |
-
if status == 'OK' and data[0]:
|
209 |
-
email_ids = data[0].split()
|
210 |
-
all_email_ids.update(email_ids)
|
211 |
-
print(f"Found {len(email_ids)} emails with this criteria")
|
212 |
-
except Exception as e:
|
213 |
-
print(f"Search criteria failed: {search_criteria}, error: {e}")
|
214 |
-
continue
|
215 |
-
|
216 |
-
print(f"Total unique emails found: {len(all_email_ids)}")
|
217 |
-
scraped_emails = []
|
218 |
-
|
219 |
-
# Process each email
|
220 |
-
for i, email_id in enumerate(all_email_ids):
|
221 |
-
try:
|
222 |
-
print(f"Processing email {i+1}/{len(all_email_ids)}")
|
223 |
-
|
224 |
-
# Fetch email
|
225 |
-
status, msg_data = mail.fetch(email_id, "(RFC822)")
|
226 |
-
if status != 'OK':
|
227 |
-
continue
|
228 |
-
|
229 |
-
# Parse email
|
230 |
-
msg = message_from_bytes(msg_data[0][1])
|
231 |
-
|
232 |
-
# Extract information
|
233 |
-
subject = msg.get("Subject", "No Subject")
|
234 |
-
from_header = msg.get("From", "Unknown Sender")
|
235 |
-
content = _email_to_clean_text(msg)
|
236 |
-
|
237 |
-
# Check if the keyword is actually present (case-insensitive)
|
238 |
-
keyword_lower = keyword.lower()
|
239 |
-
if not any(keyword_lower in text.lower() for text in [subject, from_header, content]):
|
240 |
-
continue
|
241 |
-
|
242 |
-
# Parse date
|
243 |
-
date_header = msg.get("Date", "")
|
244 |
-
if date_header:
|
245 |
-
try:
|
246 |
-
dt_obj = parsedate_to_datetime(date_header)
|
247 |
-
# Convert to IST
|
248 |
-
ist_dt = dt_obj.astimezone(ZoneInfo("Asia/Kolkata"))
|
249 |
-
email_date = ist_dt.strftime("%d-%b-%Y")
|
250 |
-
email_time = ist_dt.strftime("%H:%M:%S")
|
251 |
-
except:
|
252 |
-
email_date = datetime.today().strftime("%d-%b-%Y")
|
253 |
-
email_time = "00:00:00"
|
254 |
-
else:
|
255 |
-
email_date = datetime.today().strftime("%d-%b-%Y")
|
256 |
-
email_time = "00:00:00"
|
257 |
-
|
258 |
-
# Double-check date range
|
259 |
-
if not _is_date_in_range(email_date, start_date, end_date):
|
260 |
-
continue
|
261 |
-
|
262 |
-
# Get message ID for deduplication
|
263 |
-
message_id = msg.get("Message-ID", f"missing-{email_id.decode()}")
|
264 |
-
|
265 |
-
scraped_emails.append({
|
266 |
-
"date": email_date,
|
267 |
-
"time": email_time,
|
268 |
-
"subject": subject,
|
269 |
-
"from": from_header,
|
270 |
-
"content": content[:2000], # Limit content length
|
271 |
-
"message_id": message_id
|
272 |
-
})
|
273 |
-
|
274 |
-
except Exception as e:
|
275 |
-
print(f"Error processing email {email_id}: {e}")
|
276 |
-
continue
|
277 |
-
|
278 |
-
mail.logout()
|
279 |
-
|
280 |
-
# Sort by date (newest first)
|
281 |
-
scraped_emails.sort(key=lambda x: datetime.strptime(f"{x['date']} {x['time']}", "%d-%b-%Y %H:%M:%S"), reverse=True)
|
282 |
-
|
283 |
-
print(f"Successfully processed {len(scraped_emails)} emails containing '{keyword}'")
|
284 |
-
return scraped_emails
|
285 |
-
|
286 |
-
except Exception as e:
|
287 |
-
print(f"Email text search failed: {e}")
|
288 |
-
raise
|
289 |
-
|
290 |
-
def scrape_emails_by_text_search_with_credentials(email_id: str, app_password: str, keyword: str, start_date: str, end_date: str) -> List[Dict]:
|
291 |
-
"""
|
292 |
-
Scrape emails containing a specific keyword (like company name) within date range.
|
293 |
-
Uses provided credentials instead of environment variables.
|
294 |
-
|
295 |
-
Args:
|
296 |
-
email_id: Gmail address
|
297 |
-
app_password: Gmail app password
|
298 |
-
keyword: Keyword to search for
|
299 |
-
start_date: Start date in DD-MMM-YYYY format
|
300 |
-
end_date: End date in DD-MMM-YYYY format
|
301 |
-
"""
|
302 |
-
print(f"Searching emails containing '{keyword}' between {start_date} and {end_date}")
|
303 |
-
|
304 |
-
if not email_id or not app_password:
|
305 |
-
raise Exception("Email ID and App Password are required")
|
306 |
-
|
307 |
-
try:
|
308 |
-
# Connect using provided credentials
|
309 |
-
print("=== IMAP Connection Debug ===")
|
310 |
-
print(f"Email ID: {email_id[:5]}...@{email_id.split('@')[1] if '@' in email_id else 'INVALID'}")
|
311 |
-
print("App password: [PROVIDED]")
|
312 |
-
|
313 |
-
print("🔄 Attempting IMAP SSL connection to imap.gmail.com:993...")
|
314 |
-
mail = imaplib.IMAP4_SSL("imap.gmail.com")
|
315 |
-
print("✅ SSL connection established")
|
316 |
-
|
317 |
-
print("🔄 Attempting login...")
|
318 |
-
result = mail.login(email_id, app_password)
|
319 |
-
print(f"✅ Login successful: {result}")
|
320 |
-
|
321 |
-
print("🔄 Selecting mailbox: [Gmail]/All Mail...")
|
322 |
-
result = mail.select('"[Gmail]/All Mail"')
|
323 |
-
print(f"✅ Mailbox selected: {result}")
|
324 |
-
|
325 |
-
# Prepare IMAP search criteria with text search
|
326 |
-
start_imap = _date_to_imap_format(start_date)
|
327 |
-
# Add one day to end_date for BEFORE criteria (IMAP BEFORE is exclusive)
|
328 |
-
end_dt = datetime.strptime(end_date, "%d-%b-%Y") + timedelta(days=1)
|
329 |
-
end_imap = end_dt.strftime("%d-%b-%Y")
|
330 |
-
|
331 |
-
# Search for emails containing the keyword in FROM field or SUBJECT or BODY
|
332 |
-
# We'll search multiple criteria and combine results
|
333 |
-
search_criteria_list = [
|
334 |
-
f'FROM "{keyword}" SINCE "{start_imap}" BEFORE "{end_imap}"',
|
335 |
-
f'SUBJECT "{keyword}" SINCE "{start_imap}" BEFORE "{end_imap}"',
|
336 |
-
f'BODY "{keyword}" SINCE "{start_imap}" BEFORE "{end_imap}"'
|
337 |
-
]
|
338 |
-
|
339 |
-
all_email_ids = set()
|
340 |
-
|
341 |
-
# Search with multiple criteria to catch emails containing the keyword
|
342 |
-
for search_criteria in search_criteria_list:
|
343 |
-
try:
|
344 |
-
print(f"IMAP search: {search_criteria}")
|
345 |
-
status, data = mail.search(None, search_criteria)
|
346 |
-
if status == 'OK' and data[0]:
|
347 |
-
email_ids = data[0].split()
|
348 |
-
all_email_ids.update(email_ids)
|
349 |
-
print(f"Found {len(email_ids)} emails with this criteria")
|
350 |
-
except Exception as e:
|
351 |
-
print(f"Search criteria failed: {search_criteria}, error: {e}")
|
352 |
-
continue
|
353 |
-
|
354 |
-
print(f"Total unique emails found: {len(all_email_ids)}")
|
355 |
-
scraped_emails = []
|
356 |
-
|
357 |
-
# Process each email
|
358 |
-
for i, email_id in enumerate(all_email_ids):
|
359 |
-
try:
|
360 |
-
print(f"Processing email {i+1}/{len(all_email_ids)}")
|
361 |
-
|
362 |
-
# Fetch email
|
363 |
-
status, msg_data = mail.fetch(email_id, "(RFC822)")
|
364 |
-
if status != 'OK':
|
365 |
-
continue
|
366 |
-
|
367 |
-
# Parse email
|
368 |
-
msg = message_from_bytes(msg_data[0][1])
|
369 |
-
|
370 |
-
# Extract information
|
371 |
-
subject = msg.get("Subject", "No Subject")
|
372 |
-
from_header = msg.get("From", "Unknown Sender")
|
373 |
-
content = _email_to_clean_text(msg)
|
374 |
-
|
375 |
-
# Check if the keyword is actually present (case-insensitive)
|
376 |
-
keyword_lower = keyword.lower()
|
377 |
-
if not any(keyword_lower in text.lower() for text in [subject, from_header, content]):
|
378 |
-
continue
|
379 |
-
|
380 |
-
# Parse date
|
381 |
-
date_header = msg.get("Date", "")
|
382 |
-
if date_header:
|
383 |
-
try:
|
384 |
-
dt_obj = parsedate_to_datetime(date_header)
|
385 |
-
# Convert to IST
|
386 |
-
ist_dt = dt_obj.astimezone(ZoneInfo("Asia/Kolkata"))
|
387 |
-
email_date = ist_dt.strftime("%d-%b-%Y")
|
388 |
-
email_time = ist_dt.strftime("%H:%M:%S")
|
389 |
-
except:
|
390 |
-
email_date = datetime.today().strftime("%d-%b-%Y")
|
391 |
-
email_time = "00:00:00"
|
392 |
-
else:
|
393 |
-
email_date = datetime.today().strftime("%d-%b-%Y")
|
394 |
-
email_time = "00:00:00"
|
395 |
-
|
396 |
-
# Double-check date range
|
397 |
-
if not _is_date_in_range(email_date, start_date, end_date):
|
398 |
-
continue
|
399 |
-
|
400 |
-
# Get message ID for deduplication
|
401 |
-
message_id = msg.get("Message-ID", f"missing-{email_id.decode()}")
|
402 |
-
|
403 |
-
scraped_emails.append({
|
404 |
-
"date": email_date,
|
405 |
-
"time": email_time,
|
406 |
-
"subject": subject,
|
407 |
-
"from": from_header,
|
408 |
-
"content": content[:2000], # Limit content length
|
409 |
-
"message_id": message_id
|
410 |
-
})
|
411 |
-
|
412 |
-
except Exception as e:
|
413 |
-
print(f"Error processing email {email_id}: {e}")
|
414 |
-
continue
|
415 |
-
|
416 |
-
mail.logout()
|
417 |
-
|
418 |
-
# Sort by date (newest first)
|
419 |
-
scraped_emails.sort(key=lambda x: datetime.strptime(f"{x['date']} {x['time']}", "%d-%b-%Y %H:%M:%S"), reverse=True)
|
420 |
-
|
421 |
-
print(f"Successfully processed {len(scraped_emails)} emails containing '{keyword}'")
|
422 |
-
return scraped_emails
|
423 |
-
|
424 |
-
except Exception as e:
|
425 |
-
print(f"Email text search failed: {e}")
|
426 |
-
raise
|
427 |
-
|
428 |
-
# Test the scraper
|
429 |
-
if __name__ == "__main__":
|
430 |
-
# Test scraping
|
431 |
-
try:
|
432 |
-
emails = scrape_emails_by_text_search(
|
433 |
-
"[email protected]",
|
434 |
-
"01-Jun-2025",
|
435 |
-
"07-Jun-2025"
|
436 |
-
)
|
437 |
-
|
438 |
-
print(f"\nFound {len(emails)} emails:")
|
439 |
-
for email in emails[:3]: # Show first 3
|
440 |
-
print(f"- {email['date']} {email['time']}: {email['subject']}")
|
441 |
-
|
442 |
-
except Exception as e:
|
443 |
-
print(f"Test failed: {e}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
agentic_implementation/gradio_ag.py
DELETED
@@ -1,102 +0,0 @@
|
|
1 |
-
import gradio as gr
|
2 |
-
import json
|
3 |
-
from typing import Any, Dict, List, Tuple
|
4 |
-
|
5 |
-
from re_act import (
|
6 |
-
get_plan_from_llm,
|
7 |
-
think,
|
8 |
-
act,
|
9 |
-
store_name_email_mapping,
|
10 |
-
extract_sender_info,
|
11 |
-
client,
|
12 |
-
)
|
13 |
-
from logger import logger # Assumes logger is configured
|
14 |
-
from schemas import PlanStep
|
15 |
-
|
16 |
-
# Maintain persistent session results
|
17 |
-
session_results: Dict[str, Any] = {}
|
18 |
-
|
19 |
-
def respond(
|
20 |
-
message: str,
|
21 |
-
history: List[Tuple[str, str]],
|
22 |
-
system_message: str,
|
23 |
-
max_tokens: int,
|
24 |
-
temperature: float
|
25 |
-
) -> str:
|
26 |
-
logger.info("Gradio agent received message: %s", message)
|
27 |
-
full_response = ""
|
28 |
-
|
29 |
-
try:
|
30 |
-
# Step 1: Generate plan
|
31 |
-
plan = get_plan_from_llm(message)
|
32 |
-
logger.debug("Generated plan: %s", plan)
|
33 |
-
full_response += "📌 **Plan**:\n"
|
34 |
-
for step in plan.plan:
|
35 |
-
full_response += f"- {step.action}\n"
|
36 |
-
full_response += "\n"
|
37 |
-
|
38 |
-
results = {}
|
39 |
-
|
40 |
-
# Step 2: Execute steps
|
41 |
-
for step in plan.plan:
|
42 |
-
if step.action == "done":
|
43 |
-
full_response += "✅ Plan complete.\n"
|
44 |
-
break
|
45 |
-
|
46 |
-
should_run, updated_step, user_prompt = think(step, results, message)
|
47 |
-
|
48 |
-
# Ask user for clarification if needed
|
49 |
-
if user_prompt:
|
50 |
-
full_response += f"❓ {user_prompt} (Please respond with an email)\n"
|
51 |
-
return full_response # wait for user
|
52 |
-
|
53 |
-
if not should_run:
|
54 |
-
full_response += f"⏭️ Skipping `{step.action}`\n"
|
55 |
-
continue
|
56 |
-
|
57 |
-
try:
|
58 |
-
output = act(updated_step)
|
59 |
-
results[updated_step.action] = output
|
60 |
-
full_response += f"🔧 Ran `{updated_step.action}` → {output}\n"
|
61 |
-
except Exception as e:
|
62 |
-
logger.error("Error running action '%s': %s", updated_step.action, e)
|
63 |
-
full_response += f"❌ Error running `{updated_step.action}`: {e}\n"
|
64 |
-
break
|
65 |
-
|
66 |
-
# Step 3: Summarize results
|
67 |
-
try:
|
68 |
-
summary_rsp = client.chat.completions.create(
|
69 |
-
model="gpt-4o-mini",
|
70 |
-
temperature=temperature,
|
71 |
-
max_tokens=max_tokens,
|
72 |
-
messages=[
|
73 |
-
{"role": "system", "content": "Summarize these results for the user in a friendly way."},
|
74 |
-
{"role": "assistant", "content": json.dumps(results)}
|
75 |
-
],
|
76 |
-
)
|
77 |
-
summary = summary_rsp.choices[0].message.content
|
78 |
-
full_response += "\n📋 **Summary**:\n" + summary
|
79 |
-
except Exception as e:
|
80 |
-
logger.error("Summary generation failed: %s", e)
|
81 |
-
full_response += "\n❌ Failed to generate summary."
|
82 |
-
|
83 |
-
except Exception as e:
|
84 |
-
logger.exception("Unhandled error in agent: %s", e)
|
85 |
-
full_response += f"\n❌ Unexpected error: {e}"
|
86 |
-
|
87 |
-
return full_response
|
88 |
-
|
89 |
-
|
90 |
-
demo = gr.ChatInterface(
|
91 |
-
respond,
|
92 |
-
additional_inputs=[
|
93 |
-
gr.Textbox(label="System message", value="You are an email assistant agent."),
|
94 |
-
gr.Slider(label="Max tokens", minimum=64, maximum=2048, value=512, step=1),
|
95 |
-
gr.Slider(label="Temperature", minimum=0.0, maximum=1.5, value=0.7, step=0.1),
|
96 |
-
],
|
97 |
-
title="📬 Email Agent",
|
98 |
-
description="Ask me anything related to your email tasks!"
|
99 |
-
)
|
100 |
-
|
101 |
-
if __name__ == "__main__":
|
102 |
-
demo.launch()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
agentic_implementation/name_mapping.json
DELETED
@@ -1,4 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"dev agarwal": "[email protected]",
|
3 |
-
"axis bank": "[email protected]"
|
4 |
-
}
|
|
|
|
|
|
|
|
|
|
agentic_implementation/re_act.py
DELETED
@@ -1,229 +0,0 @@
|
|
1 |
-
# orchestrator.py
|
2 |
-
|
3 |
-
import os
|
4 |
-
import json
|
5 |
-
import re
|
6 |
-
from typing import Any, Dict, Tuple, Optional
|
7 |
-
from datetime import datetime
|
8 |
-
|
9 |
-
from dotenv import load_dotenv
|
10 |
-
from openai import OpenAI
|
11 |
-
|
12 |
-
from schemas import Plan, PlanStep, FetchEmailsParams
|
13 |
-
from tools import TOOL_MAPPING
|
14 |
-
|
15 |
-
# Load .env and initialize OpenAI client
|
16 |
-
load_dotenv()
|
17 |
-
api_key = os.getenv("OPENAI_API_KEY")
|
18 |
-
if not api_key:
|
19 |
-
raise RuntimeError("Missing OPENAI_API_KEY in environment")
|
20 |
-
client = OpenAI(api_key=api_key)
|
21 |
-
|
22 |
-
# File paths for name mapping
|
23 |
-
NAME_MAPPING_FILE = "name_mapping.json"
|
24 |
-
|
25 |
-
# === Hard-coded list of available actions ===
|
26 |
-
SYSTEM_PLAN_PROMPT = """
|
27 |
-
You are an email assistant agent. You have access to the following actions:
|
28 |
-
|
29 |
-
• fetch_emails - fetch emails using text search with sender keywords and date extraction (e.g., "swiggy emails last week")
|
30 |
-
• show_email - display specific email content
|
31 |
-
• analyze_emails - analyze email patterns or content
|
32 |
-
• draft_reply - create a reply to an email
|
33 |
-
• send_reply - send a drafted reply
|
34 |
-
• done - complete the task
|
35 |
-
|
36 |
-
When the user gives you a query, output _only_ valid JSON of this form:
|
37 |
-
|
38 |
-
{
|
39 |
-
"plan": [
|
40 |
-
"fetch_emails",
|
41 |
-
...,
|
42 |
-
"done"
|
43 |
-
]
|
44 |
-
}
|
45 |
-
|
46 |
-
Rules:
|
47 |
-
- Use "fetch_emails" for text-based email search (automatically extracts sender keywords and dates)
|
48 |
-
- The final entry _must_ be "done"
|
49 |
-
- If no tool is needed, return `{"plan":["done"]}`
|
50 |
-
|
51 |
-
Example: For "show me emails from swiggy today" → ["fetch_emails", "done"]
|
52 |
-
"""
|
53 |
-
|
54 |
-
SYSTEM_VALIDATOR_TEMPLATE = """
|
55 |
-
You are a plan validator.
|
56 |
-
Context (results so far):
|
57 |
-
{context}
|
58 |
-
|
59 |
-
Next action:
|
60 |
-
{action}
|
61 |
-
|
62 |
-
Reply _only_ with JSON:
|
63 |
-
{{
|
64 |
-
"should_execute": <true|false>,
|
65 |
-
"parameters": <null or a JSON object with this action's parameters>
|
66 |
-
}}
|
67 |
-
"""
|
68 |
-
|
69 |
-
|
70 |
-
def _load_name_mapping() -> Dict[str, str]:
|
71 |
-
"""Load name to email mapping from JSON file"""
|
72 |
-
if not os.path.exists(NAME_MAPPING_FILE):
|
73 |
-
return {}
|
74 |
-
try:
|
75 |
-
with open(NAME_MAPPING_FILE, "r") as f:
|
76 |
-
return json.load(f)
|
77 |
-
except (json.JSONDecodeError, IOError):
|
78 |
-
return {}
|
79 |
-
|
80 |
-
|
81 |
-
def _save_name_mapping(mapping: Dict[str, str]):
|
82 |
-
"""Save name to email mapping to JSON file"""
|
83 |
-
with open(NAME_MAPPING_FILE, "w") as f:
|
84 |
-
json.dump(mapping, f, indent=2)
|
85 |
-
|
86 |
-
|
87 |
-
def store_name_email_mapping(name: str, email: str):
|
88 |
-
"""Store new name to email mapping"""
|
89 |
-
name_mapping = _load_name_mapping()
|
90 |
-
name_mapping[name.lower().strip()] = email.lower().strip()
|
91 |
-
_save_name_mapping(name_mapping)
|
92 |
-
|
93 |
-
|
94 |
-
def extract_sender_info(query: str) -> Dict:
|
95 |
-
"""
|
96 |
-
Extract sender information from user query using LLM
|
97 |
-
"""
|
98 |
-
system_prompt = """
|
99 |
-
You are an email query parser that extracts sender information.
|
100 |
-
|
101 |
-
Given a user query, extract the sender intent - the person/entity they want emails from.
|
102 |
-
This could be:
|
103 |
-
- A person's name (e.g., "dev", "john smith", "dev agarwal")
|
104 |
-
- A company/service (e.g., "amazon", "google", "linkedin")
|
105 |
-
- An email address (e.g., "[email protected]")
|
106 |
-
|
107 |
-
Examples:
|
108 |
-
- "emails from dev agarwal last week" → "dev agarwal"
|
109 |
-
- "show amazon emails from last month" → "amazon"
|
110 |
-
- "emails from [email protected] yesterday" → "[email protected]"
|
111 |
-
- "get messages from sarah" → "sarah"
|
112 |
-
|
113 |
-
Return ONLY valid JSON:
|
114 |
-
{
|
115 |
-
"sender_intent": "extracted name, company, or email"
|
116 |
-
}
|
117 |
-
"""
|
118 |
-
|
119 |
-
response = client.chat.completions.create(
|
120 |
-
model="gpt-4o-mini",
|
121 |
-
temperature=0.0,
|
122 |
-
messages=[
|
123 |
-
{"role": "system", "content": system_prompt},
|
124 |
-
{"role": "user", "content": query}
|
125 |
-
],
|
126 |
-
)
|
127 |
-
|
128 |
-
result = json.loads(response.choices[0].message.content)
|
129 |
-
return result
|
130 |
-
|
131 |
-
|
132 |
-
def resolve_sender_email(sender_intent: str) -> Tuple[Optional[str], bool]:
|
133 |
-
"""
|
134 |
-
Resolve sender intent to actual email address
|
135 |
-
Returns: (email_address, needs_user_input)
|
136 |
-
"""
|
137 |
-
# Check if it's already an email address
|
138 |
-
if "@" in sender_intent:
|
139 |
-
return sender_intent.lower(), False
|
140 |
-
|
141 |
-
# Load name mapping
|
142 |
-
name_mapping = _load_name_mapping()
|
143 |
-
|
144 |
-
# Normalize the intent (lowercase for comparison)
|
145 |
-
normalized_intent = sender_intent.lower().strip()
|
146 |
-
|
147 |
-
# Check direct match
|
148 |
-
if normalized_intent in name_mapping:
|
149 |
-
return name_mapping[normalized_intent], False
|
150 |
-
|
151 |
-
# Check partial matches (fuzzy matching)
|
152 |
-
for name, email in name_mapping.items():
|
153 |
-
if normalized_intent in name.lower() or name.lower() in normalized_intent:
|
154 |
-
return email, False
|
155 |
-
|
156 |
-
# No match found
|
157 |
-
return None, True
|
158 |
-
|
159 |
-
|
160 |
-
def get_plan_from_llm(user_query: str) -> Plan:
|
161 |
-
"""
|
162 |
-
Ask the LLM which actions to run, in order. No parameters here.
|
163 |
-
"""
|
164 |
-
response = client.chat.completions.create(
|
165 |
-
model="gpt-4o-mini",
|
166 |
-
temperature=0.0,
|
167 |
-
messages=[
|
168 |
-
{"role": "system", "content": SYSTEM_PLAN_PROMPT},
|
169 |
-
{"role": "user", "content": user_query},
|
170 |
-
],
|
171 |
-
)
|
172 |
-
|
173 |
-
plan_json = json.loads(response.choices[0].message.content)
|
174 |
-
steps = [PlanStep(action=a) for a in plan_json["plan"]]
|
175 |
-
return Plan(plan=steps)
|
176 |
-
|
177 |
-
|
178 |
-
def think(
|
179 |
-
step: PlanStep,
|
180 |
-
context: Dict[str, Any],
|
181 |
-
user_query: str
|
182 |
-
) -> Tuple[bool, Optional[PlanStep], Optional[str]]:
|
183 |
-
"""
|
184 |
-
Fill in parameters or skip based on the action:
|
185 |
-
- fetch_emails: pass the raw query for text-based search and date extraction
|
186 |
-
- others: ask the LLM validator for params
|
187 |
-
|
188 |
-
Returns: (should_execute, updated_step, user_prompt_if_needed)
|
189 |
-
"""
|
190 |
-
# 1) fetch_emails → pass the full query for text-based search and date extraction
|
191 |
-
if step.action == "fetch_emails":
|
192 |
-
params = FetchEmailsParams(
|
193 |
-
query=user_query # Pass the full query for keyword and date extraction
|
194 |
-
)
|
195 |
-
return True, PlanStep(action="fetch_emails", parameters=params), None
|
196 |
-
|
197 |
-
# 2) everything else → validate & supply params via LLM
|
198 |
-
prompt = SYSTEM_VALIDATOR_TEMPLATE.format(
|
199 |
-
context=json.dumps(context, indent=2),
|
200 |
-
action=step.action,
|
201 |
-
)
|
202 |
-
response = client.chat.completions.create(
|
203 |
-
model="gpt-4o-mini",
|
204 |
-
temperature=0.0,
|
205 |
-
messages=[
|
206 |
-
{"role": "system", "content": "Validate or supply parameters for this action."},
|
207 |
-
{"role": "user", "content": prompt},
|
208 |
-
],
|
209 |
-
)
|
210 |
-
verdict = json.loads(response.choices[0].message.content)
|
211 |
-
if not verdict.get("should_execute", False):
|
212 |
-
return False, None, None
|
213 |
-
|
214 |
-
return True, PlanStep(
|
215 |
-
action=step.action,
|
216 |
-
parameters=verdict.get("parameters")
|
217 |
-
), None
|
218 |
-
|
219 |
-
|
220 |
-
def act(step: PlanStep) -> Any:
|
221 |
-
"""
|
222 |
-
Dispatch to the actual implementation in tools.py.
|
223 |
-
"""
|
224 |
-
fn = TOOL_MAPPING.get(step.action)
|
225 |
-
if fn is None:
|
226 |
-
raise ValueError(f"Unknown action '{step.action}'")
|
227 |
-
|
228 |
-
kwargs = step.parameters.model_dump() if step.parameters else {}
|
229 |
-
return fn(**kwargs)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
agentic_implementation/requirements_oauth.txt
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
# Core OAuth Gmail MCP Server Dependencies
|
2 |
-
gradio
|
3 |
google-auth
|
4 |
google-auth-oauthlib
|
5 |
google-auth-httplib2
|
|
|
1 |
# Core OAuth Gmail MCP Server Dependencies
|
2 |
+
gradio[mcp]
|
3 |
google-auth
|
4 |
google-auth-oauthlib
|
5 |
google-auth-httplib2
|
agentic_implementation/schemas.py
DELETED
@@ -1,47 +0,0 @@
|
|
1 |
-
# schemas.py
|
2 |
-
|
3 |
-
from pydantic import BaseModel, EmailStr
|
4 |
-
from typing import List, Literal, Optional, Union
|
5 |
-
|
6 |
-
|
7 |
-
|
8 |
-
class FetchEmailsParams(BaseModel):
|
9 |
-
query: str # Natural language query with sender and date info (e.g., "show me mails for last week from swiggy")
|
10 |
-
|
11 |
-
|
12 |
-
class ShowEmailParams(BaseModel):
|
13 |
-
message_id: str
|
14 |
-
|
15 |
-
class AnalyzeEmailsParams(BaseModel):
|
16 |
-
emails: List[dict]
|
17 |
-
|
18 |
-
class DraftReplyParams(BaseModel):
|
19 |
-
email: dict
|
20 |
-
tone: Optional[Literal["formal", "informal"]] = "formal"
|
21 |
-
|
22 |
-
class SendReplyParams(BaseModel):
|
23 |
-
message_id: str
|
24 |
-
reply_body: str
|
25 |
-
|
26 |
-
|
27 |
-
ToolParams = Union[
|
28 |
-
FetchEmailsParams,
|
29 |
-
ShowEmailParams,
|
30 |
-
AnalyzeEmailsParams,
|
31 |
-
DraftReplyParams,
|
32 |
-
SendReplyParams
|
33 |
-
]
|
34 |
-
|
35 |
-
class PlanStep(BaseModel):
|
36 |
-
action: Literal[
|
37 |
-
"fetch_emails",
|
38 |
-
"show_email",
|
39 |
-
"analyze_emails",
|
40 |
-
"draft_reply",
|
41 |
-
"send_reply",
|
42 |
-
"done",
|
43 |
-
]
|
44 |
-
parameters: Optional[ToolParams] = None
|
45 |
-
|
46 |
-
class Plan(BaseModel):
|
47 |
-
plan: List[PlanStep]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
agentic_implementation/tools.py
DELETED
@@ -1,244 +0,0 @@
|
|
1 |
-
from schemas import (
|
2 |
-
FetchEmailsParams,
|
3 |
-
ShowEmailParams,
|
4 |
-
AnalyzeEmailsParams,
|
5 |
-
DraftReplyParams,
|
6 |
-
SendReplyParams,
|
7 |
-
)
|
8 |
-
from typing import Any, Dict
|
9 |
-
from email_scraper import scrape_emails_by_text_search, _load_email_db, _save_email_db, _is_date_in_range
|
10 |
-
from datetime import datetime, timedelta
|
11 |
-
from typing import List
|
12 |
-
from openai import OpenAI
|
13 |
-
import json
|
14 |
-
from dotenv import load_dotenv
|
15 |
-
import os
|
16 |
-
|
17 |
-
# Load environment variables from .env file
|
18 |
-
load_dotenv()
|
19 |
-
|
20 |
-
# Initialize OpenAI client
|
21 |
-
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
|
22 |
-
client = OpenAI(api_key=OPENAI_API_KEY)
|
23 |
-
|
24 |
-
|
25 |
-
def extract_query_info(query: str) -> Dict[str, str]:
|
26 |
-
"""
|
27 |
-
Use an LLM to extract sender information and date range from a user query.
|
28 |
-
Returns {"sender_keyword": "company/sender name", "start_date":"DD-MMM-YYYY","end_date":"DD-MMM-YYYY"}.
|
29 |
-
"""
|
30 |
-
today_str = datetime.today().strftime("%d-%b-%Y")
|
31 |
-
five_days_ago = (datetime.today() - timedelta(days=5)).strftime("%d-%b-%Y")
|
32 |
-
|
33 |
-
system_prompt = f"""
|
34 |
-
You are a query parser for email search. Today is {today_str}.
|
35 |
-
|
36 |
-
Given a user query, extract the sender/company keyword and date range. Return _only_ valid JSON with:
|
37 |
-
{{
|
38 |
-
"sender_keyword": "keyword or company name to search for",
|
39 |
-
"start_date": "DD-MMM-YYYY",
|
40 |
-
"end_date": "DD-MMM-YYYY"
|
41 |
-
}}
|
42 |
-
|
43 |
-
Rules:
|
44 |
-
1. Extract sender keywords from phrases like "from swiggy", "swiggy emails", "mails from amazon", etc.
|
45 |
-
2. If no time is mentioned, use last 5 days: {five_days_ago} to {today_str}
|
46 |
-
3. Interpret relative dates as:
|
47 |
-
- "today" → {today_str} to {today_str}
|
48 |
-
- "yesterday" → 1 day ago to 1 day ago
|
49 |
-
- "last week" → 7 days ago to {today_str}
|
50 |
-
- "last month" → 30 days ago to {today_str}
|
51 |
-
- "last N days" → N days ago to {today_str}
|
52 |
-
|
53 |
-
Examples:
|
54 |
-
- "show me mails for last week from swiggy"
|
55 |
-
→ {{"sender_keyword": "swiggy", "start_date": "01-Jun-2025", "end_date": "{today_str}"}}
|
56 |
-
- "emails from amazon yesterday"
|
57 |
-
→ {{"sender_keyword": "amazon", "start_date": "06-Jun-2025", "end_date": "06-Jun-2025"}}
|
58 |
-
- "show flipkart emails"
|
59 |
-
→ {{"sender_keyword": "flipkart", "start_date": "{five_days_ago}", "end_date": "{today_str}"}}
|
60 |
-
|
61 |
-
Return _only_ the JSON object—no extra text.
|
62 |
-
"""
|
63 |
-
|
64 |
-
messages = [
|
65 |
-
{"role": "system", "content": system_prompt},
|
66 |
-
{"role": "user", "content": query}
|
67 |
-
]
|
68 |
-
resp = client.chat.completions.create(
|
69 |
-
model="gpt-4o-mini",
|
70 |
-
temperature=0.0,
|
71 |
-
messages=messages
|
72 |
-
)
|
73 |
-
content = resp.choices[0].message.content.strip()
|
74 |
-
|
75 |
-
# Try direct parse; if the model added fluff, strip to the JSON block.
|
76 |
-
try:
|
77 |
-
return json.loads(content)
|
78 |
-
except json.JSONDecodeError:
|
79 |
-
start = content.find("{")
|
80 |
-
end = content.rfind("}") + 1
|
81 |
-
return json.loads(content[start:end])
|
82 |
-
|
83 |
-
|
84 |
-
def fetch_emails(query: str) -> Dict:
|
85 |
-
"""
|
86 |
-
Fetch emails based on a natural language query that contains sender information and date range.
|
87 |
-
Now uses text-based search and returns only summary information, not full content.
|
88 |
-
|
89 |
-
Args:
|
90 |
-
query: The natural language query (e.g., "show me mails for last week from swiggy")
|
91 |
-
|
92 |
-
Returns:
|
93 |
-
Dict with query_info, email_summary, analysis, and email_count
|
94 |
-
"""
|
95 |
-
# Extract sender keyword and date range from query
|
96 |
-
query_info = extract_query_info(query)
|
97 |
-
sender_keyword = query_info.get("sender_keyword", "")
|
98 |
-
start_date = query_info.get("start_date")
|
99 |
-
end_date = query_info.get("end_date")
|
100 |
-
|
101 |
-
print(f"Searching for emails with keyword '{sender_keyword}' between {start_date} and {end_date}")
|
102 |
-
|
103 |
-
# Use the new text-based search function
|
104 |
-
full_emails = scrape_emails_by_text_search(sender_keyword, start_date, end_date)
|
105 |
-
|
106 |
-
if not full_emails:
|
107 |
-
return {
|
108 |
-
"query_info": query_info,
|
109 |
-
"email_summary": [],
|
110 |
-
"analysis": {"summary": f"No emails found for '{sender_keyword}' in the specified date range.", "insights": []},
|
111 |
-
"email_count": 0
|
112 |
-
}
|
113 |
-
|
114 |
-
# Create summary version without full content
|
115 |
-
email_summary = []
|
116 |
-
for email in full_emails:
|
117 |
-
summary_email = {
|
118 |
-
"date": email.get("date"),
|
119 |
-
"time": email.get("time"),
|
120 |
-
"subject": email.get("subject"),
|
121 |
-
"from": email.get("from", "Unknown Sender"),
|
122 |
-
"message_id": email.get("message_id")
|
123 |
-
# Note: Removed 'content' to keep response clean
|
124 |
-
}
|
125 |
-
email_summary.append(summary_email)
|
126 |
-
|
127 |
-
# Auto-analyze the emails for insights
|
128 |
-
analysis = analyze_emails(full_emails) # Use full emails for analysis but don't return them
|
129 |
-
|
130 |
-
# Return summary info with analysis
|
131 |
-
return {
|
132 |
-
"query_info": query_info,
|
133 |
-
"email_summary": email_summary,
|
134 |
-
"analysis": analysis,
|
135 |
-
"email_count": len(full_emails)
|
136 |
-
}
|
137 |
-
|
138 |
-
|
139 |
-
def show_email(message_id: str) -> Dict:
|
140 |
-
"""
|
141 |
-
Retrieve the full email record (date, time, subject, content, etc.)
|
142 |
-
from the local cache by message_id.
|
143 |
-
"""
|
144 |
-
db = _load_email_db() # returns { sender_email: { "emails": [...], "last_scraped": ... }, ... }
|
145 |
-
|
146 |
-
# Search each sender's email list
|
147 |
-
for sender_data in db.values():
|
148 |
-
for email in sender_data.get("emails", []):
|
149 |
-
if email.get("message_id") == message_id:
|
150 |
-
return email
|
151 |
-
|
152 |
-
# If we didn't find it, raise or return an error structure
|
153 |
-
raise ValueError(f"No email found with message_id '{message_id}'")
|
154 |
-
|
155 |
-
|
156 |
-
def draft_reply(email: Dict, tone: str) -> str:
|
157 |
-
# call LLM to generate reply
|
158 |
-
# return a dummy reply for now
|
159 |
-
print(f"Drafting reply for email {email['id']} with tone: {tone}")
|
160 |
-
return f"Drafted reply for email {email['id']} with tone {tone}."
|
161 |
-
...
|
162 |
-
|
163 |
-
|
164 |
-
def send_reply(message_id: str, reply_body: str) -> Dict:
|
165 |
-
# SMTP / Gmail API send
|
166 |
-
print(f"Sending reply to message {message_id} with body: {reply_body}")
|
167 |
-
...
|
168 |
-
|
169 |
-
|
170 |
-
def analyze_emails(emails: List[Dict]) -> Dict:
|
171 |
-
"""
|
172 |
-
Summarize and extract insights from a list of emails.
|
173 |
-
Returns a dict with this schema:
|
174 |
-
{
|
175 |
-
"summary": str, # a concise overview of all emails
|
176 |
-
"insights": [str, ...] # list of key observations or stats
|
177 |
-
}
|
178 |
-
"""
|
179 |
-
if not emails:
|
180 |
-
return {"summary": "No emails to analyze.", "insights": []}
|
181 |
-
|
182 |
-
# 1) Create a simplified email summary for analysis (without full content)
|
183 |
-
simplified_emails = []
|
184 |
-
for email in emails:
|
185 |
-
simplified_email = {
|
186 |
-
"date": email.get("date"),
|
187 |
-
"time": email.get("time"),
|
188 |
-
"subject": email.get("subject"),
|
189 |
-
"from": email.get("from", "Unknown Sender"),
|
190 |
-
"content_preview": email.get("content", "")[:200] + "..." if email.get("content") else ""
|
191 |
-
}
|
192 |
-
simplified_emails.append(simplified_email)
|
193 |
-
|
194 |
-
emails_payload = json.dumps(simplified_emails, ensure_ascii=False)
|
195 |
-
|
196 |
-
# 2) Build the LLM prompt
|
197 |
-
system_prompt = """
|
198 |
-
You are an expert email analyst. You will be given a JSON array of email objects,
|
199 |
-
each with keys: date, time, subject, from, content_preview.
|
200 |
-
|
201 |
-
Your job is to produce _only_ valid JSON with two fields:
|
202 |
-
1. summary: a 1–2 sentence high-level overview of these emails.
|
203 |
-
2. insights: a list of 3–5 bullet-style observations or statistics
|
204 |
-
(e.g. "5 emails from Swiggy", "mostly promotional content", "received over 3 days").
|
205 |
-
|
206 |
-
Focus on metadata like senders, subjects, dates, and patterns rather than detailed content analysis.
|
207 |
-
|
208 |
-
Output exactly:
|
209 |
-
|
210 |
-
{
|
211 |
-
"summary": "...",
|
212 |
-
"insights": ["...", "...", ...]
|
213 |
-
}
|
214 |
-
"""
|
215 |
-
messages = [
|
216 |
-
{"role": "system", "content": system_prompt},
|
217 |
-
{"role": "user", "content": f"Here are the emails:\n{emails_payload}"}
|
218 |
-
]
|
219 |
-
|
220 |
-
# 3) Call the LLM
|
221 |
-
response = client.chat.completions.create(
|
222 |
-
model="gpt-4o-mini",
|
223 |
-
temperature=0.0,
|
224 |
-
messages=messages
|
225 |
-
)
|
226 |
-
|
227 |
-
# 4) Parse and return
|
228 |
-
content = response.choices[0].message.content.strip()
|
229 |
-
try:
|
230 |
-
return json.loads(content)
|
231 |
-
except json.JSONDecodeError:
|
232 |
-
# In case the model outputs extra text, extract the JSON block
|
233 |
-
start = content.find('{')
|
234 |
-
end = content.rfind('}') + 1
|
235 |
-
return json.loads(content[start:end])
|
236 |
-
|
237 |
-
|
238 |
-
TOOL_MAPPING = {
|
239 |
-
"fetch_emails": fetch_emails,
|
240 |
-
"show_email": show_email,
|
241 |
-
"analyze_emails": analyze_emails,
|
242 |
-
"draft_reply": draft_reply,
|
243 |
-
"send_reply": send_reply,
|
244 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
app.py
DELETED
@@ -1,64 +0,0 @@
|
|
1 |
-
import gradio as gr
|
2 |
-
from huggingface_hub import InferenceClient
|
3 |
-
|
4 |
-
"""
|
5 |
-
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
|
6 |
-
"""
|
7 |
-
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
|
8 |
-
|
9 |
-
|
10 |
-
def respond(
|
11 |
-
message,
|
12 |
-
history: list[tuple[str, str]],
|
13 |
-
system_message,
|
14 |
-
max_tokens,
|
15 |
-
temperature,
|
16 |
-
top_p,
|
17 |
-
):
|
18 |
-
messages = [{"role": "system", "content": system_message}]
|
19 |
-
|
20 |
-
for val in history:
|
21 |
-
if val[0]:
|
22 |
-
messages.append({"role": "user", "content": val[0]})
|
23 |
-
if val[1]:
|
24 |
-
messages.append({"role": "assistant", "content": val[1]})
|
25 |
-
|
26 |
-
messages.append({"role": "user", "content": message})
|
27 |
-
|
28 |
-
response = ""
|
29 |
-
|
30 |
-
for message in client.chat_completion(
|
31 |
-
messages,
|
32 |
-
max_tokens=max_tokens,
|
33 |
-
stream=True,
|
34 |
-
temperature=temperature,
|
35 |
-
top_p=top_p,
|
36 |
-
):
|
37 |
-
token = message.choices[0].delta.content
|
38 |
-
|
39 |
-
response += token
|
40 |
-
yield response
|
41 |
-
|
42 |
-
|
43 |
-
"""
|
44 |
-
For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
|
45 |
-
"""
|
46 |
-
demo = gr.ChatInterface(
|
47 |
-
respond,
|
48 |
-
additional_inputs=[
|
49 |
-
gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
|
50 |
-
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
|
51 |
-
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
|
52 |
-
gr.Slider(
|
53 |
-
minimum=0.1,
|
54 |
-
maximum=1.0,
|
55 |
-
value=0.95,
|
56 |
-
step=0.05,
|
57 |
-
label="Top-p (nucleus sampling)",
|
58 |
-
),
|
59 |
-
],
|
60 |
-
)
|
61 |
-
|
62 |
-
|
63 |
-
if __name__ == "__main__":
|
64 |
-
demo.launch()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
client/main.py
DELETED
@@ -1,183 +0,0 @@
|
|
1 |
-
#!/usr/bin/env python3
|
2 |
-
|
3 |
-
import requests
|
4 |
-
import sys
|
5 |
-
from typing import Dict, Any
|
6 |
-
|
7 |
-
API_BASE = "http://127.0.0.1:8000/api/v1"
|
8 |
-
|
9 |
-
class EmailQueryCLI:
|
10 |
-
def __init__(self):
|
11 |
-
self.session = requests.Session()
|
12 |
-
|
13 |
-
def check_connection(self) -> bool:
|
14 |
-
"""Check if API server is running"""
|
15 |
-
try:
|
16 |
-
response = self.session.get(f"{API_BASE}/health")
|
17 |
-
response.raise_for_status()
|
18 |
-
return True
|
19 |
-
except:
|
20 |
-
return False
|
21 |
-
|
22 |
-
def pretty_print_email(self, email: Dict) -> str:
|
23 |
-
"""Format email for display"""
|
24 |
-
return f"""
|
25 |
-
📧 {email['subject']}
|
26 |
-
📅 {email['date']} {email['time']}
|
27 |
-
💬 {email['content'][:200]}...
|
28 |
-
🆔 {email['message_id'][:20]}...
|
29 |
-
{"─" * 60}"""
|
30 |
-
|
31 |
-
def handle_query(self, query: str):
|
32 |
-
"""Handle a natural language query"""
|
33 |
-
print(f"\n🔍 Processing: '{query}'")
|
34 |
-
|
35 |
-
try:
|
36 |
-
# Try to get emails directly
|
37 |
-
response = self.session.post(
|
38 |
-
f"{API_BASE}/get_emails",
|
39 |
-
json={"query": query}
|
40 |
-
)
|
41 |
-
|
42 |
-
if response.status_code == 200:
|
43 |
-
data = response.json()
|
44 |
-
self.display_email_results(data)
|
45 |
-
return True
|
46 |
-
|
47 |
-
elif response.status_code == 400:
|
48 |
-
error_detail = response.json()["detail"]
|
49 |
-
|
50 |
-
# Check if we need email mapping
|
51 |
-
if isinstance(error_detail, dict) and error_detail.get("type") == "need_email_input":
|
52 |
-
mapping_success = self.handle_missing_mapping(error_detail)
|
53 |
-
if mapping_success and hasattr(self, '_retry_query'):
|
54 |
-
# Retry the query after successful mapping
|
55 |
-
print(f"🔄 Retrying query...")
|
56 |
-
delattr(self, '_retry_query')
|
57 |
-
return self.handle_query(query) # Recursive retry
|
58 |
-
return mapping_success
|
59 |
-
else:
|
60 |
-
print(f"❌ Error: {error_detail}")
|
61 |
-
return False
|
62 |
-
else:
|
63 |
-
print(f"❌ API Error: {response.status_code}")
|
64 |
-
return False
|
65 |
-
|
66 |
-
except Exception as e:
|
67 |
-
print(f"❌ Connection Error: {e}")
|
68 |
-
return False
|
69 |
-
|
70 |
-
def handle_missing_mapping(self, error_detail: Dict) -> bool:
|
71 |
-
"""Handle case where email mapping is needed"""
|
72 |
-
sender_intent = error_detail["sender_intent"]
|
73 |
-
print(f"\n❓ {error_detail['message']}")
|
74 |
-
|
75 |
-
try:
|
76 |
-
email = input(f"📧 Enter email for '{sender_intent}': ").strip()
|
77 |
-
if not email or "@" not in email:
|
78 |
-
print("❌ Invalid email address")
|
79 |
-
return False
|
80 |
-
|
81 |
-
# Add the mapping
|
82 |
-
mapping_response = self.session.post(
|
83 |
-
f"{API_BASE}/add_email_mapping",
|
84 |
-
json={"name": sender_intent, "email": email}
|
85 |
-
)
|
86 |
-
|
87 |
-
if mapping_response.status_code == 200:
|
88 |
-
print(f"✅ Mapping saved: '{sender_intent}' → '{email}'")
|
89 |
-
self._retry_query = True # Flag to retry the original query
|
90 |
-
return True
|
91 |
-
else:
|
92 |
-
print(f"❌ Failed to save mapping: {mapping_response.text}")
|
93 |
-
return False
|
94 |
-
|
95 |
-
except KeyboardInterrupt:
|
96 |
-
print("\n❌ Cancelled")
|
97 |
-
return False
|
98 |
-
|
99 |
-
def display_email_results(self, data: Dict):
|
100 |
-
"""Display email search results"""
|
101 |
-
print(f"\n✅ Found {data['total_emails']} emails")
|
102 |
-
print(f"📤 From: {data['resolved_email']}")
|
103 |
-
print(f"📅 Period: {data['start_date']} to {data['end_date']}")
|
104 |
-
|
105 |
-
if data['emails']:
|
106 |
-
print(f"\n📧 Emails:")
|
107 |
-
for email in data['emails'][:10]: # Show first 10
|
108 |
-
print(self.pretty_print_email(email))
|
109 |
-
|
110 |
-
if len(data['emails']) > 10:
|
111 |
-
print(f"\n... and {len(data['emails']) - 10} more emails")
|
112 |
-
else:
|
113 |
-
print("\n📭 No emails found in this date range")
|
114 |
-
|
115 |
-
def show_mappings(self):
|
116 |
-
"""Display all stored name-to-email mappings"""
|
117 |
-
try:
|
118 |
-
response = self.session.get(f"{API_BASE}/view_mappings")
|
119 |
-
if response.status_code == 200:
|
120 |
-
data = response.json()
|
121 |
-
mappings = data["mappings"]
|
122 |
-
|
123 |
-
print(f"\n📇 Stored Mappings ({len(mappings)}):")
|
124 |
-
if mappings:
|
125 |
-
for name, email in mappings.items():
|
126 |
-
print(f" 👤 {name} → 📧 {email}")
|
127 |
-
else:
|
128 |
-
print(" (No mappings stored)")
|
129 |
-
else:
|
130 |
-
print(f"❌ Failed to load mappings: {response.text}")
|
131 |
-
except Exception as e:
|
132 |
-
print(f"❌ Error: {e}")
|
133 |
-
|
134 |
-
def run(self):
|
135 |
-
"""Main CLI loop"""
|
136 |
-
if not self.check_connection():
|
137 |
-
print("❌ Cannot connect to API server at http://127.0.0.1:8000")
|
138 |
-
print(" Make sure to run: uvicorn main:app --reload")
|
139 |
-
sys.exit(1)
|
140 |
-
|
141 |
-
print("✅ Connected to Email Query System")
|
142 |
-
print("💡 Try queries like:")
|
143 |
-
print(" • 'emails from john last week'")
|
144 |
-
print(" • 'show amazon emails from last month'")
|
145 |
-
print(" • 'get [email protected] emails yesterday'")
|
146 |
-
print("\n📋 Commands:")
|
147 |
-
print(" • 'mappings' - View stored name-to-email mappings")
|
148 |
-
print(" • 'quit' or Ctrl+C - Exit")
|
149 |
-
print("=" * 60)
|
150 |
-
|
151 |
-
while True:
|
152 |
-
try:
|
153 |
-
query = input("\n🗨️ You: ").strip()
|
154 |
-
|
155 |
-
if not query:
|
156 |
-
continue
|
157 |
-
|
158 |
-
if query.lower() in ['quit', 'exit', 'q']:
|
159 |
-
break
|
160 |
-
elif query.lower() in ['mappings', 'map', 'm']:
|
161 |
-
self.show_mappings()
|
162 |
-
elif query.lower() in ['help', 'h']:
|
163 |
-
print("\n💡 Examples:")
|
164 |
-
print(" • emails from amazon last 5 days")
|
165 |
-
print(" • show john smith emails this week")
|
166 |
-
print(" • get notifications from google yesterday")
|
167 |
-
else:
|
168 |
-
self.handle_query(query)
|
169 |
-
|
170 |
-
except KeyboardInterrupt:
|
171 |
-
break
|
172 |
-
except Exception as e:
|
173 |
-
print(f"❌ Unexpected error: {e}")
|
174 |
-
|
175 |
-
print("\n👋 Goodbye!")
|
176 |
-
|
177 |
-
def main():
|
178 |
-
"""Entry point for CLI"""
|
179 |
-
cli = EmailQueryCLI()
|
180 |
-
cli.run()
|
181 |
-
|
182 |
-
if __name__ == "__main__":
|
183 |
-
main()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
requirements.txt
DELETED
@@ -1,11 +0,0 @@
|
|
1 |
-
huggingface_hub==0.25.2
|
2 |
-
uvicorn
|
3 |
-
fastapi
|
4 |
-
openai
|
5 |
-
requests
|
6 |
-
python-dateutil
|
7 |
-
beautifulsoup4
|
8 |
-
python-dotenv
|
9 |
-
pydantic[email]
|
10 |
-
gradio
|
11 |
-
loguru
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
server/email_db.json
DELETED
@@ -1,135 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"[email protected]": {
|
3 |
-
"emails": [
|
4 |
-
{
|
5 |
-
"date": "01-Jan-2025",
|
6 |
-
"time": "11:00:26",
|
7 |
-
"subject": "2025: A Year to Unite, Innovate, and Lead with Integrity: Let's make\r\n IIT Jodhpur a beacon of excellence and sustainability!",
|
8 |
-
"content": "Dear Members of the IIT Jodhpur Fraternity, Students, Staff Members, and Faculty Colleagues, As we stand on the brink of a new year, I take this opportunity to reflect on\u00a0the remarkable journey of IIT Jodhpur so far and the collective efforts that\u00a0have brought us to where we are today over the last\u00a016\u00a0years or so. Each one of you - our faculty,\u00a0students, staff, alumni, and partners, has played a vital role in shaping\u00a0this institute and its evolution as a hub of learning, innovation, and excellence. It is\u00a0your commitment, resilience, and unwavering dedication that inspires all of us to aim higher and envision a future of much greater achievements,\u00a0breaking the\u00a0moulds of conventional thinking and defining excellence at a new\u00a0level. The year 2025 holds immense promise. It is a year when our shared vision of growth, innovation, and sustainability will take centre stage. Our institution stands at the confluence of tradition and modernity, of\u00a0ambition and responsibility. Let this be the year where we align our\u00a0efforts to not only push the boundaries of research and education but\u00a0also embrace a balanced approach toward sustainability in every aspect. Together, we can make IIT Jodhpur a beacon of innovation,\u00a0collaboration, and societal impact.\u00a0Our strength lies in our unity. As a diverse and dynamic community, we\u00a0can bring together varied perspectives, talents, and\u00a0disciplines to create something extraordinary. It is through this synergy that we will achieve the breakthroughs needed to address global\u00a0challenges and contribute meaningfully to the world. Let us continue to\u00a0foster an environment of mutual respect, inclusivity, and shared\u00a0purpose, ensuring that every member of our community feels\u00a0empowered to contribute their best. The journey ahead requires us to focus on strengthening collaborations\u00a0that amplify our impact. By working closely with industry, academia, and\u00a0governmental organizations, we can create pathways to meaningful\u00a0innovation and transformative research. O",
|
9 |
-
"message_id": "<CADCv5Wjqd09OCsR0fT2YXv5zzyJT=+xsuvsu4rhLTKbDBznYNw@mail.gmail.com>"
|
10 |
-
},
|
11 |
-
{
|
12 |
-
"date": "14-Jan-2025",
|
13 |
-
"time": "09:52:47",
|
14 |
-
"subject": "Makar Sankranti, Lohri, Bihu, Pongal: Festivals of Progress and New Aspirations",
|
15 |
-
"content": "Dear Members of the IIT Jodhpur Fraternity, \u0906\u092a\u0915\u094b \u0932\u094b\u0939\u093f\u095c\u0940, \u092c\u093f\u0939\u0942, \u092a\u094b\u0902\u0917\u0932 \u0914\u0930 \u092e\u0915\u0930 \u0938\u0902\u0915\u094d\u0930\u093e\u0902\u0924\u093f \u0915\u0940 \u0939\u093e\u0930\u094d\u0926\u093f\u0915 \u0936\u0941\u092d\u0915\u093e\u092e\u0928\u093e\u090f\u0901! \u092d\u093e\u0938\u094d\u0915\u0930\u0938\u094d\u092f \u092f\u0925\u093e \u0924\u0947\u091c\u094b \u092e\u0915\u0930\u0938\u094d\u0925\u0938\u094d\u092f \u0935\u0930\u094d\u0927\u0924\u0947\u0964 \u0924\u0925\u0948\u0935 \u092d\u0935\u0924\u093e\u0902 \u0924\u0947\u091c\u094b \u0935\u0930\u094d\u0927\u0924\u093e\u092e\u093f\u0924\u093f \u0915\u093e\u092e\u092f\u0947\u0964\u0964 \u092d\u0917\u0935\u093e\u0928 \u0938\u0942\u0930\u094d\u092f \u0915\u0947 \u092e\u0915\u0930 \u0930\u093e\u0936\u093f \u092e\u0947\u0902 \u092a\u094d\u0930\u0935\u0947\u0936 \u0924\u0925\u093e \u0909\u0924\u094d\u0924\u0930\u093e\u092f\u0923 \u0939\u094b\u0928\u0947 \u092a\u0930 \u0938\u0902\u092a\u0942\u0930\u094d\u0923 \u092d\u093e\u0930\u0924\u0935\u0930\u094d\u0937 \u092e\u0947\u0902 \u090a\u0930\u094d\u091c\u093e \u0935 \u0909\u0937\u094d\u092e\u093e \u092e\u0947\u0902 \u0935\u0943\u0926\u094d\u0927\u093f \u0915\u0947 \u092a\u094d\u0930\u0924\u0940\u0915-\u092a\u0930\u094d\u0935 \u00a0\"\u092e\u0915\u0930 \u0938\u0902\u0915\u094d\u0930\u093e\u0902\u0924\u093f\" \u092a\u0930 \u0906\u092a\u0915\u093e \u090f\u0935\u0902 \u0906\u092a\u0915\u0947 \u092a\u0930\u093f\u0935\u093e\u0930 \u092e\u0947\u0902 \u0938\u092d\u0940 \u0915\u093e \u091c\u0940\u0935\u0928 \u0905\u0924\u094d\u092f\u0902\u0924 \u092a\u094d\u0930\u0915\u093e\u0936\u092e\u093e\u0928 \u0939\u094b! \u0906\u092a \u0938\u092d\u0940 \u0938\u094d\u0935\u0938\u094d\u0925 \u0930\u0939\u0947\u0902, \u092a\u094d\u0930\u0938\u0928\u094d\u0928 \u0930\u0939\u0947\u0902 \u0914\u0930 \u0938\u0942\u0930\u094d\u092f \u0915\u0940 \u092d\u093e\u0901\u0924\u093f \u0905\u092a\u0928\u0947 \u092a\u094d\u0930\u0915\u093e\u0936 \u0938\u0947 \u0935\u093f\u0936\u094d\u0935 \u0915\u094b \u0906\u0932\u094b\u0915\u093f\u0924 \u0915\u0930\u0947\u0902! \u0906\u0907\u090f, \u0905\u092a\u0928\u0940 \u0906\u0932\u094b\u0915\u0927\u0930\u094d\u092e\u0940 \u0938\u0902\u0938\u094d\u0915\u0943\u0924\u093f \u0915\u0940 \u0935\u093f\u0930\u093e\u0938\u0924 \u0915\u0947 \u0935\u093e\u0939\u0915 \u092c\u0928\u0947\u0902\u0964 \u0909\u0938\u0915\u0947 \u092e\u0939\u0924\u094d\u0924\u094d\u0935 \u090f\u0935\u0902 \u0935\u0948\u091c\u094d\u091e\u093e\u0928\u093f\u0915\u0924\u093e \u0915\u094b \u092a\u0939\u0932\u0947 \u0938\u094d\u0935\u092f\u0902 \u0938\u092e\u091d\u0947\u0902, \u092b\u093f\u0930 \u0905\u092a\u0928\u0940 \u0938\u0902\u0924\u0924\u093f\u092f\u094b\u0902 \u0915\u094b \u092d\u0940 \u0938\u092e\u091d\u093e\u090f\u0901\u0964 \u0939\u092e\u093e\u0930\u0947 \u0924\u094d\u092f\u094b\u0939\u093e\u0930, \u0939\u092e\u093e\u0930\u0940 \u092d\u093e\u0937\u093e, \u0939\u092e\u093e\u0930\u0940 \u092a\u0930\u0902\u092a\u0930\u093e, \u0939\u092e\u093e\u0930\u0940 \u0938\u0902\u0938\u094d\u0915\u0943\u0924\u093f - \u0939\u0940\u0928\u0924\u093e \u0928\u0939\u0940\u0902, \u0917\u0930\u094d\u0935 \u0915\u0940 \u0935\u093f\u0937\u092f\u0935\u0938\u094d\u0924\u0941 \u0939\u0948\u0902\u0964 As we celebrate the auspicious occasion of Makar Sankranti, when the sun begins its northward journey (Uttarayan), let us draw inspiration from this symbol of progress, renewal, and growth. This festival reminds us to embrace change, rise above challenges, and strive for new aspirations. Much like the sun\u2019s steady path, our commitment to advancing knowledge, innovation, and societal impact continues to illuminate the way forward. At IIT Jodhpur, we are driven to explore transformative solutions, foster excellence, and shape a sustainable and inclusive future for ourselves, and the nation. Let us make use of this occasion to reflect on our achievements and renew our dedication to the goals that lie ahead. Together, as a community, we can reach greater heights and leave an enduring legacy for generations to come. May this festive season bring joy, prosperity, and inspiration to you and your families. Let us soar higher, united in our purpose and vision. -- Affectionately Yours..... With warm regards..... Prof. Avinash Kumar Agarwal, FTWAS, FAAAS, FCI, FSAE, FASME, FRSC, FNAE, FNASc, FISEES Director, IIT Jodhpur & Sir J C Bose National Fellow Tel: +91 291 2801011 (Off) Wikipedia: tinyurl.com/bdhe89ew | Scopus: https://tinyurl.com/mwccdcc4 | Google Scholar: https://tinyurl.com/mtbyv7w4 | FUE",
|
16 |
-
"message_id": "<CADCv5WgfRV2jFQf2=gfVHw1xfyE87tdqHSVtGQq3S4dhNmwEdA@mail.gmail.com>"
|
17 |
-
},
|
18 |
-
{
|
19 |
-
"date": "25-Jan-2025",
|
20 |
-
"time": "19:48:33",
|
21 |
-
"subject": "Happy Republic Day-2025",
|
22 |
-
"content": "My Dear Students, Faculty and Staff members, \ud83c\uddee\ud83c\uddf3 Greetings on the occasion of the 76th Republic Day! \ud83c\uddee\ud83c\uddf3 As we approach the 26th of January, we unite to celebrate not only the adoption of our Constitution but also the enduring principles of democracy, justice, and equality that define us as individuals and as an institution. This momentous day inspires us to reaffirm our collective commitment to shaping the future of our nation. At IIT Jodhpur, we hold a pivotal role in this journey of progress and innovation. As proud members of this esteemed institution, we bear the responsibility of fostering a culture rooted in innovation, academic excellence, and ethical leadership. Republic Day serves as a powerful reminder of our individual and collective contributions to IIT Jodhpur and the nation\u2014 not only through our collective professional accomplishments but also through the values we instil in our students and the spirit of collaboration and excellence we cultivate among ourselves. On this Republic Day, let us focus on: \u2705 Strengthening research and teaching excellence in our Institute, \u2705 Enhancing our infrastructure, and \u2705 Building a more inclusive and supportive environment for all members of our community. It is through these efforts that we will continue to push the frontiers of knowledge, innovation and excellence, contributing meaningfully to our nation and beyond. I encourage everyone to actively participate in the Republic Day celebrations tomorrow morning and reflect on how we can collectively elevate IIT Jodhpur\u2019s legacy. Together, let us uphold the values of integrity, diversity, and excellence\u2014the core pillars of our nation and our Institute. Wishing you and yours a thoughtful, inspiring, and joyous Republic Day 2025. Jai Hind. Jai Bharat. With warm regards and affection, Prof. Avinash Kumar Agarwal, FTWAS, FAAAS, FCI, FSAE, FASME, FRSC, FNAE, FNASc, FISEES Director, IIT Jodhpur & Sir J C Bose National Fellow Tel: +91 291 2801011 (Off) Wikipedia: tinyurl.com/bd",
|
23 |
-
"message_id": "<CADCv5WiULZvioxbVYrmR7mdmZHtB4jQ2P2cG_+S-Sdas1hNyew@mail.gmail.com>"
|
24 |
-
},
|
25 |
-
{
|
26 |
-
"date": "13-Feb-2025",
|
27 |
-
"time": "11:37:07",
|
28 |
-
"subject": "=?UTF-8?Q?Re=3A_=5Bfaculty=5D_Invitation_to_Hamira_Manganiyar_Group=27?=\r\n\t=?UTF-8?Q?s_Rajasthani_Folk_Music_Performance_Today_=E2=80=93_VIRASAT_2025?=",
|
29 |
-
"content": "Dear All, The Institute is organising Virasat 2025, and renowned artists will descend on our campus over the next five days. We must take this opportunity to learn about our cultural heritage and musical\u00a0performances during this period. I will strongly encourage all constituents of our campus community, including students, faculty and staff members and their families, and project staff members, to attend all these programs in the evening over the next five days and enjoy the cultural performances. Best wishes Avinash Kumar Agarwal On Thu, Feb 13, 2025 at 11:26\u202fAM Sherin Sabu < [email protected] > wrote: Dear all, We are delighted to invite you to an enchanting evening of Rajasthani folk music as part of VIRASAT 2025 , organized by IIT Jodhpur in collaboration with SPIC MACAY. \ud83c\udfb6 Performance Details: \ud83d\udccd Venue: Jodhpur Club, IIT Jodhpur \ud83c\udfa4 Artist: Hamira Manganiyar Group (Rajasthani Folk Music) \ud83d\udcc5 Date: Today: 13th February 2025 \u23f0 Time: 7:30 PM Immerse yourself in the vibrant and soulful rhythms of Rajasthan as the Hamira Manganiyar Group brings to life the rich musical traditions of the desert. This performance is a rare opportunity to experience the deep-rooted heritage of folk music passed down through generations. We warmly invite you to join us for this unforgettable musical evening.\u00a0\r\n\r\nPlease bring your family along to share in this cultural celebration! \ud83d\udccc Find attached the official event poster for more details. Looking forward to your presence! Warm Regards, Team Virasat 2025 IIT Jodhpur -- Dr Sherin Sabu Assistant Professor (Sociology), School of Liberal Arts (SoLA) Affiliate Faculty, Center for Emerging Technologies for Sustainable Development (CETSD) IIT Jodhpur",
|
30 |
-
"message_id": "<CADCv5WiZNk6BBrYPXhaaN0K_9N-L27ufUfg5JyWTrrJdbnwM=w@mail.gmail.com>"
|
31 |
-
},
|
32 |
-
{
|
33 |
-
"date": "26-Feb-2025",
|
34 |
-
"time": "19:54:24",
|
35 |
-
"subject": "Greetings on Mahashivratri!",
|
36 |
-
"content": "Dear all, Wishing you all a blessed and joyous Mahashivratri! I extend my warmest greetings to all of you and your family members. Mahashivratri is a time of deep spiritual reflection, inner growth, and devotion. This sacred festival symbolizes the triumph of wisdom, devotion, and inner strength, inspiring us to pursue knowledge and morality in all our endeavors. As we celebrate this day with devotion and reflection, let us also reaffirm our commitment to excellence, innovation, and the collective growth of IIT Jodhpur. Together, through dedication and hard work, we should continue to make meaningful contributions to knowledge, technology, and society. With warm regards, Prof. Avinash Kumar Agarwal ..",
|
37 |
-
"message_id": "<CADCv5WhSG91tOjiv_+XUUrxvqeOQv4xMocQNsgAC_EuUTQ87jw@mail.gmail.com>"
|
38 |
-
},
|
39 |
-
{
|
40 |
-
"date": "28-Feb-2025",
|
41 |
-
"time": "12:05:48",
|
42 |
-
"subject": "Re: [faculty] Invitation to celebrate \"National Science Day\" on 28th\r\n February 2025 at IIT Jodhpur",
|
43 |
-
"content": "Dear All, Hearty Congratulations to all of you on the occasion of National Science Day 2025. I urge all of you to attend this celebration of National Science Day. Sh Sharad Sarraf, BoG Chairman of IIT Mumbai and Jammu, is the Speaker and the chief guest. He is a strong well-wisher of IIT Jodhpur and we will enrich ourselves by listening to his words, full of wisdom. Best regards Avinash Kumar Agarwal On Wed, Feb 26, 2025 at 6:02\u202fPM Committee for Celebration of Commemorative Days < [email protected] > wrote: Dear All, \u092e\u0939\u093e\u0936\u093f\u0935\u0930\u093e\u0924\u094d\u0930\u093f\u00a0 \u0915\u0940 \u0939\u093e\u0930\u094d\u0926\u093f\u0915 \u0936\u0941\u092d\u0915\u093e\u092e\u0928\u093e\u090f\u0902 / Happy MahaShivratri....! \u0938\u094d\u092e\u093e\u0930\u0915 \u0926\u093f\u0935\u0938 \u0938\u092e\u093e\u0930\u094b\u0939 \u0938\u092e\u093f\u0924\u093f (\u0938\u0940\u0938\u0940\u0938\u0940\u0921\u0940) \u0915\u0940 \u0913\u0930 \u0938\u0947, \u092d\u093e\u0930\u0924\u0940\u092f \u092a\u094d\u0930\u094c\u0926\u094d\u092f\u094b\u0917\u093f\u0915\u0940 \u0938\u0902\u0938\u094d\u0925\u093e\u0928 \u091c\u094b\u0927\u092a\u0941\u0930 \u092e\u0947\u0902 28 \u092b\u0930\u0935\u0930\u0940, 2025 (\u0936\u0941\u0915\u094d\u0930\u0935\u093e\u0930) \u0915\u094b \u0930\u093e\u0937\u094d\u091f\u094d\u0930\u0940\u092f \u0935\u093f\u091c\u094d\u091e\u093e\u0928 \u0926\u093f\u0935\u0938\u00a0 2025 \u0915\u0947 \u0905\u0935\u0938\u0930 \u092a\u0930 \u0939\u092e \u0906\u092a\u0915\u094b \u00a0\u0939\u093e\u0930\u094d\u0926\u093f\u0915 \u0928\u093f\u092e\u0902\u0924\u094d\u0930\u0923 \u0926\u0947\u0924\u0947\u00a0 \u0939\u0948\u0902 \u0964 \u0939\u092e\u00a0\u00a0\u0907\u0938 \u092e\u0939\u0924\u094d\u0935\u092a\u0942\u0930\u094d\u0923 \u0915\u093e\u0930\u094d\u092f\u0915\u094d\u0930\u092e \u092e\u0947\u0902 \u0906\u092a\u0915\u0947 \u0936\u093e\u092e\u093f\u0932 \u0939\u094b\u0928\u0947 \u0915\u0947 \u0938\u092e\u094d\u092e\u093e\u0928 \u0915\u0940 \u0909\u0924\u094d\u0938\u0941\u0915\u0924\u093e \u0938\u0947 \u092a\u094d\u0930\u0924\u0940\u0915\u094d\u0937\u093e \u0915\u0930\u0947\u0902\u0917\u0947\u0964 On behalf of the Committee for Celebration of Commemorative Days (CCCD) at IIT Jodhpur, we cordially invite you to join us in commemorating the National Science Day 2025 on February 28, 2025 (Friday) . We eagerly anticipate the honour of having you at this momentous event. Program: National Science Day 2025 Date: 28th February 2025 (Friday) Venue: Jodhpur Club Time: 5:45 PM onwards Program details: Time Event 05:45 \u2013 06:05 PM Scientific Demonstration & Tea -Refreshments 06:05 \u2013 06:10 PM Lamp Lighting & felicitation 06:10 \u2013 06:20 PM Welcome address by the Director 06:20 \u2013 06:45 PM Talk and interaction by the Chief Guest 06:45 \u2013 06:50 PM Felicitation of Guests 06:50 \u2013 07:00 PM Library App Release 07:00 \u2013 07:05 PM Quiz Session 07:05 \u2013 07:15 PM Felicitation to ACAC students 07:15 \u2013 07:20 PM Vote of Thanks 07:20 PM National Anthem Your presence and active participation will contribute significantly to the success of this celebration. With warm regards, Himmat Singh Assistant Registrar ___________________________________________________ \u0938\u094d\u092e\u093e\u0930\u0915 \u0926\u093f\u0935\u0938 \u0938\u092e\u093e\u0930\u094b\u0939 \u0938\u092e\u093f\u0924\u093f / Committee for Celebration of Commemorative Days (CCCD) \u0906\u0908\u0906\u0908",
|
44 |
-
"message_id": "<CADCv5WjLQTBkWxUB7XuOmQEKmNJftW5Orw8rnnjX-cAQ4bPFuw@mail.gmail.com>"
|
45 |
-
},
|
46 |
-
{
|
47 |
-
"date": "01-Apr-2025",
|
48 |
-
"time": "10:53:52",
|
49 |
-
"subject": "Fwd: FY Closure and Updates",
|
50 |
-
"content": "Dear Students, Today, April 1, marks an important day because we are taking two important steps in the direction of our evolution as a mature institute of higher learning. 1. Today, our Health Center starts working in autonomous mode, managed and operated by OUR OWN team. 2. Today, our transport services also start operating in autonomous mode, managed and operated by OUR OWN team. These two are definitely two big steps in the evolution of our institute. I would also like to put on record my deep appreciation of the teams of the Health Center, led by Prof. Anil Tiwari and Dr Neha Sharma, and the transport team, led by Prof. Shree Prakash Tiwari and Sandeep Chandel. Please join me in congratulating them for a good start. While these are big transitions, it is\u00a0possible that there might be some perturbations in services in the initial period. Please give your feedback to the process owners, and actions will be taken to minimise the inconveniences and meet all genuine expectations. Best regards -- With warm regards..... Prof. Avinash Kumar Agarwal, FTWAS, FAAAS, FCI, FSAE, FASME, FRSC, FNAE, FNASc, FISEES Director, IIT Jodhpur & Sir J C Bose National Fellow Tel: +91 291 2801011 (Off) Wikipedia: tinyurl.com/bdhe89ew | Scopus: https://tinyurl.com/mwccdcc4 | Google Scholar: https://tinyurl.com/mtbyv7w4 | FUEL: https://tinyurl.com/bdzn4r28 | Orcid: https://tinyurl.com/537m3tad ------------------------------ ------------------------------ ---------------- \u2022\u00a0 \u00a0 \u00a0 \u00a0Fellow of The World Academy of Science \u2022\u00a0 \u00a0 \u00a0 \u00a0Fellow of Combustion Institute, USA \u2022\u00a0 \u00a0 \u00a0 \u00a0Fellow of American Association for the Advancement of Science \u2022\u00a0 \u00a0 \u00a0 \u00a0Fellow of American Society of Mechanical Engineers \u2022\u00a0 \u00a0 \u00a0 \u00a0Fellow of Society of Automotive Engineers International, USA \u2022\u00a0 \u00a0 \u00a0 \u00a0Fellow of World Society for Sustainable Energy Technologies, UK \u2022\u00a0 \u00a0 \u00a0 \u00a0Fellow of Royal Society of Chemistry, UK \u2022\u00a0 \u00a0 \u00a0 \u00a0Fellow of National Academy of Sciences India \u2022\u00a0 \u00a0 \u00a0 \u00a0Fellow of Indian National Academy of Engineering \u2022\u00a0 \u00a0 \u00a0 \u00a0F",
|
51 |
-
"message_id": "<CADCv5WhGgVj0LCjvkTKda_mNxQ6WqQdGmP1afV=sj2v=WSBwow@mail.gmail.com>"
|
52 |
-
},
|
53 |
-
{
|
54 |
-
"date": "23-Apr-2025",
|
55 |
-
"time": "20:27:50",
|
56 |
-
"subject": "Directorate Shifted to Chankya Complex",
|
57 |
-
"content": "Dear All, This is to inform you that all the offices of the Deans, Registrar, DD and D have moved back to Chanamkya\u00a0Complex. -- With warm regards..... Prof. Avinash Kumar Agarwal, FTWAS, FAAAS, FCI, FSAE, FASME, FRSC, FNAE, FNASc, FISEES Director, IIT Jodhpur & Sir J C Bose National Fellow Tel: +91 291 2801011 (Off) Wikipedia: tinyurl.com/bdhe89ew | Scopus: https://tinyurl.com/mwccdcc4 | Google Scholar: https://tinyurl.com/mtbyv7w4 | FUEL: https://tinyurl.com/bdzn4r28 | Orcid: https://tinyurl.com/537m3tad ------------------------------ ------------------------------ ---------------- \u2022\u00a0 \u00a0 \u00a0 \u00a0Fellow of The World Academy of Science \u2022\u00a0 \u00a0 \u00a0 \u00a0Fellow of Combustion Institute, USA \u2022\u00a0 \u00a0 \u00a0 \u00a0Fellow of American Association for the Advancement of Science \u2022\u00a0 \u00a0 \u00a0 \u00a0Fellow of American Society of Mechanical Engineers \u2022\u00a0 \u00a0 \u00a0 \u00a0Fellow of Society of Automotive Engineers International, USA \u2022\u00a0 \u00a0 \u00a0 \u00a0Fellow of World Society for Sustainable Energy Technologies, UK \u2022\u00a0 \u00a0 \u00a0 \u00a0Fellow of Royal Society of Chemistry, UK \u2022\u00a0 \u00a0 \u00a0 \u00a0Fellow of National Academy of Sciences India \u2022\u00a0 \u00a0 \u00a0 \u00a0Fellow of Indian National Academy of Engineering \u2022\u00a0 \u00a0 \u00a0 \u00a0Fellow of International Society of Energy, Environment, and Sustainability ------------------------------ ------------------------------ ------------- \u2022\u00a0 \u00a0 \u00a0 \u00a0Shanti Swarup Bhatnagar Award-2016 \u2022\u00a0 \u00a0 \u00a0 \u00a0Editor of FUEL \u2022\u00a0 \u00a0 \u00a0 \u00a0Associate Editor of ASME Open Journal of Engineering \u2022\u00a0 \u00a0 \u00a0 \u00a0Associate Editor of SAE International Journal of Engines ------------------------------ ------------------------------ --------------",
|
58 |
-
"message_id": "<CADCv5WhB=aoNjykLwPj9wY-ZNTCxnBp5EFsPriDs+mpH9Fi-WA@mail.gmail.com>"
|
59 |
-
},
|
60 |
-
{
|
61 |
-
"date": "01-May-2025",
|
62 |
-
"time": "12:20:20",
|
63 |
-
"subject": "Thank You",
|
64 |
-
"content": "Dear\r\nColleagues I want to thank all the stakeholders for their kind cooperation,\r\nenabling me to complete one year as Director of IIT Jodhpur. I joined the\r\nInstitute on 1 st May 2024. I realised this Institute has great potential and can break into the top echelons of\r\nranking among engineering institutions in the country and the world. However,\r\nto achieve this, all of us must work as a unified team. From my\r\nside, I assure you that I will make all possible efforts to ensure that fair\r\nand transparent governance processes are in place and we, as a team, make all\r\nthe efforts in the right direction. In the last\r\nyear, extra-mural research grants to IIT jodhpur have doubled, and project endorsements\r\nand publications have significantly increased; however, there are miles to go. I hope we\r\nall continue to work relentlessly to pursue excellence in our activities, be\r\nloyal to the Institute, and do all our duties with dedication, sincerity and\r\nhonesty. This Institute cannot have any room for corruption, nepotism and\r\nregionalism. As IIT Jodhpur stakeholders, we must commit to having excellent\r\nconduct and setting an example for others to follow. Wishing you\r\nall the very best Affectionately\r\nyours Avinash Kumar Agarwal, FTWAS, FAAAS, FCI, FSAE, FASME, FRSC, FNAE, FNASc, FISEES Director, IIT Jodhpur & Sir J C Bose National Fellow Tel: +91 291 2801011 (Off) Wikipedia: tinyurl.com/bdhe89ew | Scopus: https://tinyurl.com/mwccdcc4 | Google Scholar: https://tinyurl.com/mtbyv7w4 | FUEL: https://tinyurl.com/bdzn4r28 | Orcid: https://tinyurl.com/537m3tad ------------------------------ ------------------------------ ---------------- \u2022\u00a0 \u00a0 \u00a0 \u00a0Fellow of The World Academy of Science \u2022\u00a0 \u00a0 \u00a0 \u00a0Fellow of Combustion Institute, USA \u2022\u00a0 \u00a0 \u00a0 \u00a0Fellow of American Association for the Advancement of Science \u2022\u00a0 \u00a0 \u00a0 \u00a0Fellow of American Society of Mechanical Engineers \u2022\u00a0 \u00a0 \u00a0 \u00a0Fellow of Society of Automotive Engineers International, USA \u2022\u00a0 \u00a0 \u00a0 \u00a0Fellow of World Society for Sustainable Energy Technol",
|
65 |
-
"message_id": "<CADCv5Wjxn7Pj2XXBf4n2S0PyJQapU1aOOvfHfjjtr4xfSWbeKw@mail.gmail.com>"
|
66 |
-
},
|
67 |
-
{
|
68 |
-
"date": "07-May-2025",
|
69 |
-
"time": "20:56:02",
|
70 |
-
"subject": "Instruction for Next few days: Urgent attention",
|
71 |
-
"content": "Dear Faculty Members, Staff Members, Students and Other Constituents of our campus community You are aware that we are passing through a tough time, as far as national security is concerned. Today, we have done a drill for the evacuation of the campus community in the event of an air strike by our nemesis. We will have less than 5 minutes for blackout and evacuation into the tunnels. The next few days and nights are very critical, and we may be one of the soft targets. Hence, adequate care and precaution are important to all of us. Please note: 1. There will be no more drills. In case you hear a siren (Oscillating), it means an impending attack, and we will have less than 5 minutes to get into the tunnel hideouts.\u00a0The flat siren will mean that the danger has passed, and now it is safe to venture out. Enemy fighters\u00a0and missiles will reach us in 5-10 minutes after crossing the border, and that's all we would have to ensure our safety. 2. Today evening, we will do black out\u00a0drill between 10-10.15 PM. There will be no siren; hence, all of you are to follow the procedure voluntarily on your own. The power will be cut (If it is resumed by JVVNL). Every possible light source must be turned off. The lights can be put on again after 15 minutes, at 10.15 PM tonight. 3. In the event of an impending attack, the lights will be cut off centrally after the siren goes off, in the next 2-3 minutes.\u00a0That's all the time, you will have to come down the roads and move into the tunnels. It is\u00a0advised to carry your own water bottle in such an event. 4. There should be no live streaming, photographs posted on social media for these or sharing of this email on any platform. This will put all of us in danger. On each tunnel entry point, we will post security guards to\u00a0guide\u00a0you safely. Please ensure that you do not panic and move in a\u00a0disciplined manner into the tunnels,\u00a0when and if required. If you have already posted the photos and videos of tunnels on your social media accounts, please d",
|
72 |
-
"message_id": "<CADCv5WiK10w-aj0Vn2vq+bT1qViromAsfpwd+DPWGeYx2zspXA@mail.gmail.com>"
|
73 |
-
},
|
74 |
-
{
|
75 |
-
"date": "07-May-2025",
|
76 |
-
"time": "22:28:02",
|
77 |
-
"subject": "Re: Instruction for Next few days: Urgent attention",
|
78 |
-
"content": "Dear All, Thanks. Black out drill was an outstanding success and we figured out some lapses, which have been fixed. Be alert and all of us know the steps, in case required, to keep us safe. Be calm and hope that our forces will keep our nemesis at bay and we are not required to be in the hideout. In any case, now we all know the next steps and hopefully we will sleep peacefully. Best regards Avinash Kumar Agarwal On Wed, 7 May, 2025, 20:56 Director, IIT Jodhpur, < [email protected] > wrote: Dear Faculty Members, Staff Members, Students and Other Constituents of our campus community You are aware that we are passing through a tough time, as far as national security is concerned. Today, we have done a drill for the evacuation of the campus community in the event of an air strike by our nemesis. We will have less than 5 minutes for blackout and evacuation into the tunnels. The next few days and nights are very critical, and we may be one of the soft targets. Hence, adequate care and precaution are important to all of us. Please note: 1. There will be no more drills. In case you hear a siren (Oscillating), it means an impending attack, and we will have less than 5 minutes to get into the tunnel hideouts.\u00a0The flat siren will mean that the danger has passed, and now it is safe to venture out. Enemy fighters\u00a0and missiles will reach us in 5-10 minutes after crossing the border, and that's all we would have to ensure our safety. 2. Today evening, we will do black out\u00a0drill between 10-10.15 PM. There will be no siren; hence, all of you are to follow the procedure voluntarily on your own. The power will be cut (If it is resumed by JVVNL). Every possible light source must be turned off. The lights can be put on again after 15 minutes, at 10.15 PM tonight. 3. In the event of an impending attack, the lights will be cut off centrally after the siren goes off, in the next 2-3 minutes.\u00a0That's all the time, you will have to come down the roads and move into the tunnels. It is\u00a0advis",
|
79 |
-
"message_id": "<CADCv5Wh8x5L+Z=5bNYG_f=tbX=Lo0HH=1cT3yTw9Huv4Kr+iWQ@mail.gmail.com>"
|
80 |
-
},
|
81 |
-
{
|
82 |
-
"date": "08-May-2025",
|
83 |
-
"time": "11:06:17",
|
84 |
-
"subject": "Re: [faculty] Re: Instruction for Next few days: Urgent attention",
|
85 |
-
"content": "Dear All, There was a complete blackout in the entire city last night from 12-4 AM as all feeders were shut down by the district administration, and there was no power supply anywhere in the city. This might have led to some inconveniences\u00a0for all of us in these difficult times. These directions of complete blackout\u00a0are likely to be given again by the district administration over the next few days, depending on threat perception and intel inputs. I am trying to get our electricity supplies uninterrupted by discussing with the district admin so that the campus community stays indoors during these long and declared blackout periods. It is likely that we will keep all our street lights and public lights off starting the evenings over the next few days. The campus community is advised to ensure that they have all lights off during the declared blackout periods,\u00a0without any defaults. Any defaults may lead to our staying without electricity,\u00a0at par with the rest of the city. In addition, in the event of a siren going off, everyone needs to rush to the hideouts, as per our previous drill. Siren will indicate an upcoming aerial raid. I would also like to reiterate that there is no specific additional threat to the IITJ community. The threat to us is similar to that of any other part of the country, and there is no specific need for any panic or concern. We are all in this situation, as a united Bharat, and we must all face it bravely. There is no need for any anxiety or nervousness by seeing our emails about the safety protocols. These are just to ensure that in the event of any adverse action by our nemesis, our campus community stays safe, and all these measures taken by IITJ and drills were part of precautionary measures taken on the directions of the district administration. You may please contact Prof. Bhabani Satapathi, Prof. S R Vadera or Col Virendra Singh Rathore in case of any genuine concerns. Best wishes Avinash Kumar Agarwal On Wed, May 7, 2025 at 11:32\u202fPM Avin",
|
86 |
-
"message_id": "<CADCv5WixMeCadxAfjoOoNrBR2WotkVyw-678FsLfEODX5KRisA@mail.gmail.com>"
|
87 |
-
},
|
88 |
-
{
|
89 |
-
"date": "08-May-2025",
|
90 |
-
"time": "21:42:20",
|
91 |
-
"subject": "Re: Important Notice: Citywide Blackout and Campus Power Supply Instructions",
|
92 |
-
"content": "Evacuation to tunnels immediately On Thu, 8 May, 2025, 21:32 Deputy Director IIT Jodhpur, < [email protected] > wrote: Dear All, As per instructions from the District Administration, there will be a blackout and no power supply tonight across the entire city of Jodhpur. However, following discussions between our Director and the city administration, a special provision has been made to allow limited power supply within our campus\u2014only to Type C, Type B, and Hostel areas\u2014provided that the campus community strictly adheres to the following: \u2022\tRemain indoors throughout the blackout period. \u2022\tKeep all lights switched off; only fans may be used. \u2022\tNo lights should be visible from outside under any circumstance. Please note, no power supply will be provided to any other areas of the campus apart from the three mentioned above. Your cooperation is essential in ensuring compliance with this directive and maintaining safety for all. Warm regards, Prof. Bhabani Kumar Satapathy",
|
93 |
-
"message_id": "<CADCv5WiFEi5Qbim6PqHk7E-fu=qv4XP5ZDbg3uQgBAHou3Tzmw@mail.gmail.com>"
|
94 |
-
},
|
95 |
-
{
|
96 |
-
"date": "10-May-2025",
|
97 |
-
"time": "18:33:45",
|
98 |
-
"subject": "Updates",
|
99 |
-
"content": "Dear All, We should be aware that the threat is now over, and we can resume our \"Business as usual\". Those who are planning to go should not, and those who have already left the campus can make their plans to return, as per their convenience. Congratulations to all for showing an absolute resolve to tackle this national threat and showing that we are Bharat of the 21st century, a \"Naya Bharat\". This also calls for all IITJ constituents to work actively towards the national defence and offence capabilities. Jai Hind and Jai Bharat. Best regards Avinash Kumar AGarwal",
|
100 |
-
"message_id": "<CADCv5Wg8YogC8kG2DH7w=GmpZiKb80_nXMhBFRJjNTUZBVGQVQ@mail.gmail.com>"
|
101 |
-
},
|
102 |
-
{
|
103 |
-
"date": "24-May-2025",
|
104 |
-
"time": "21:46:02",
|
105 |
-
"subject": "Great News",
|
106 |
-
"content": "Dear All, I am delighted to share with you some fantastic news. Our Jaipur campus has come one step closer to realisation with the Government of Rajasthan agreeing \"in principle\" to allocate us land and buildings. Now we have secured a letter of intent from the GoR, which now needs to be taken up with the Ministry of Education and the Ministry of Finance, Government of India. Once these approvals are secured, we will realise our dream of having a Jaipur campus, apart from our main campus in Jodhpur. We are also beginning to work on our small footprint\u00a0campus in Jaisalmer to\u00a0complete our dream of IITJ3. This is a big feat for us as an institute to get the GoR to agree to our proposal. Hopefully, more good things will follow. -- With warm regards..... Prof. Avinash Kumar Agarwal, FTWAS, FAAAS, FCI, FSAE, FASME, FRSC, FNAE, FNASc, FISEES Director, IIT Jodhpur & Sir J C Bose National Fellow Tel: +91 291 2801011 (Off) Wikipedia: tinyurl.com/bdhe89ew | Scopus: https://tinyurl.com/mwccdcc4 | Google Scholar: https://tinyurl.com/mtbyv7w4 | FUEL: https://tinyurl.com/bdzn4r28 | Orcid: https://tinyurl.com/537m3tad ------------------------------ ------------------------------ ---------------- \u2022\u00a0 \u00a0 \u00a0 \u00a0Fellow of The World Academy of Science \u2022\u00a0 \u00a0 \u00a0 \u00a0Fellow of Combustion Institute, USA \u2022\u00a0 \u00a0 \u00a0 \u00a0Fellow of American Association for the Advancement of Science \u2022\u00a0 \u00a0 \u00a0 \u00a0Fellow of American Society of Mechanical Engineers \u2022\u00a0 \u00a0 \u00a0 \u00a0Fellow of Society of Automotive Engineers International, USA \u2022\u00a0 \u00a0 \u00a0 \u00a0Fellow of World Society for Sustainable Energy Technologies, UK \u2022\u00a0 \u00a0 \u00a0 \u00a0Fellow of Royal Society of Chemistry, UK \u2022\u00a0 \u00a0 \u00a0 \u00a0Fellow of National Academy of Sciences India \u2022\u00a0 \u00a0 \u00a0 \u00a0Fellow of Indian National Academy of Engineering \u2022\u00a0 \u00a0 \u00a0 \u00a0Fellow of International Society of Energy, Environment, and Sustainability ------------------------------ ------------------------------ ------------- \u2022\u00a0 \u00a0 \u00a0 \u00a0Shanti Swarup Bhatnagar Award-2016 \u2022\u00a0 \u00a0 \u00a0 \u00a0Editor of FUEL \u2022\u00a0 \u00a0 \u00a0 \u00a0Associate Editor of ASME Open Journal of Enginee",
|
107 |
-
"message_id": "<CADCv5Wik-=UY6XFVqbtHBPNYXGe3gWkYE82qV286AySDp1qL_w@mail.gmail.com>"
|
108 |
-
},
|
109 |
-
{
|
110 |
-
"date": "07-Jun-2025",
|
111 |
-
"time": "12:44:03",
|
112 |
-
"subject": "Greetings on the occasion of Eid al-Adha!",
|
113 |
-
"content": "Dear All, On the joyous occasion of Eid al-Adha, I extend my warmest greetings to all members of IITJ. This festival, rooted in the values of personal sacrifices, compassion, empathy and unity, inspires us to strengthen our bonds and work together for the greater good. At IIT Jodhpur, we are committed to nurturing an environment of compassion, empathy, honesty, collaboration, innovation, and integrity. As we celebrate this auspicious day, let us reaffirm our dedication to positive growth, unite in our pursuit of excellence, and resolve to uphold transparency. May this festival bring peace, prosperity, and harmony to our vibrant campus community and its constituents. Best wishes, Affectionately Yours Avinash Kumar Agarwal Director",
|
114 |
-
"message_id": "<CADCv5Wj4J-FCNitA2r_m9uT5pFZNz-OQFXTwQM1em+ki69=9jQ@mail.gmail.com>"
|
115 |
-
}
|
116 |
-
],
|
117 |
-
"last_scraped": "07-Jun-2025"
|
118 |
-
},
|
119 |
-
"[email protected]": {
|
120 |
-
"emails": [],
|
121 |
-
"last_scraped": "07-Jun-2025"
|
122 |
-
},
|
123 |
-
"[email protected]": {
|
124 |
-
"emails": [
|
125 |
-
{
|
126 |
-
"date": "07-Jun-2025",
|
127 |
-
"time": "16:42:51",
|
128 |
-
"subject": "testing",
|
129 |
-
"content": "hi bro",
|
130 |
-
"message_id": "<CAPziNCaSuVqpqNNfsRjhVbx22jN_vos3EGK_Odt-8WiD0HRKKQ@mail.gmail.com>"
|
131 |
-
}
|
132 |
-
],
|
133 |
-
"last_scraped": "07-Jun-2025"
|
134 |
-
}
|
135 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
server/email_scraper.py
DELETED
@@ -1,267 +0,0 @@
|
|
1 |
-
#!/usr/bin/env python3
|
2 |
-
"""
|
3 |
-
Enhanced Email Scraper with Intelligent Caching
|
4 |
-
"""
|
5 |
-
|
6 |
-
import os
|
7 |
-
import imaplib
|
8 |
-
import json
|
9 |
-
from email import message_from_bytes
|
10 |
-
from bs4 import BeautifulSoup
|
11 |
-
from datetime import datetime, timedelta
|
12 |
-
from dotenv import load_dotenv
|
13 |
-
from zoneinfo import ZoneInfo
|
14 |
-
from email.utils import parsedate_to_datetime
|
15 |
-
from typing import List, Dict
|
16 |
-
|
17 |
-
load_dotenv()
|
18 |
-
|
19 |
-
# Email credentials
|
20 |
-
APP_PASSWORD = os.getenv("APP_PASSWORD")
|
21 |
-
EMAIL_ID = os.getenv("EMAIL_ID")
|
22 |
-
EMAIL_DB_FILE = "email_db.json"
|
23 |
-
|
24 |
-
def _imap_connect():
|
25 |
-
"""Connect to Gmail IMAP server"""
|
26 |
-
try:
|
27 |
-
mail = imaplib.IMAP4_SSL("imap.gmail.com")
|
28 |
-
mail.login(EMAIL_ID, APP_PASSWORD)
|
29 |
-
mail.select('"[Gmail]/All Mail"')
|
30 |
-
return mail
|
31 |
-
except Exception as e:
|
32 |
-
print(f"IMAP connection failed: {e}")
|
33 |
-
raise
|
34 |
-
|
35 |
-
def _email_to_clean_text(msg):
|
36 |
-
"""Extract clean text from email message"""
|
37 |
-
# Try HTML first
|
38 |
-
html_content = None
|
39 |
-
text_content = None
|
40 |
-
|
41 |
-
if msg.is_multipart():
|
42 |
-
for part in msg.walk():
|
43 |
-
content_type = part.get_content_type()
|
44 |
-
if content_type == "text/html":
|
45 |
-
try:
|
46 |
-
html_content = part.get_payload(decode=True).decode(errors="ignore")
|
47 |
-
except:
|
48 |
-
continue
|
49 |
-
elif content_type == "text/plain":
|
50 |
-
try:
|
51 |
-
text_content = part.get_payload(decode=True).decode(errors="ignore")
|
52 |
-
except:
|
53 |
-
continue
|
54 |
-
else:
|
55 |
-
# Non-multipart message
|
56 |
-
content_type = msg.get_content_type()
|
57 |
-
try:
|
58 |
-
content = msg.get_payload(decode=True).decode(errors="ignore")
|
59 |
-
if content_type == "text/html":
|
60 |
-
html_content = content
|
61 |
-
else:
|
62 |
-
text_content = content
|
63 |
-
except:
|
64 |
-
pass
|
65 |
-
|
66 |
-
# Clean HTML content
|
67 |
-
if html_content:
|
68 |
-
soup = BeautifulSoup(html_content, "html.parser")
|
69 |
-
# Remove script and style elements
|
70 |
-
for script in soup(["script", "style"]):
|
71 |
-
script.decompose()
|
72 |
-
return soup.get_text(separator=' ', strip=True)
|
73 |
-
elif text_content:
|
74 |
-
return text_content.strip()
|
75 |
-
else:
|
76 |
-
return ""
|
77 |
-
|
78 |
-
def _load_email_db() -> Dict:
|
79 |
-
"""Load email database from file"""
|
80 |
-
if not os.path.exists(EMAIL_DB_FILE):
|
81 |
-
return {}
|
82 |
-
try:
|
83 |
-
with open(EMAIL_DB_FILE, "r") as f:
|
84 |
-
return json.load(f)
|
85 |
-
except (json.JSONDecodeError, IOError):
|
86 |
-
print(f"Warning: Could not load {EMAIL_DB_FILE}, starting with empty database")
|
87 |
-
return {}
|
88 |
-
|
89 |
-
def _save_email_db(db: Dict):
|
90 |
-
"""Save email database to file"""
|
91 |
-
try:
|
92 |
-
with open(EMAIL_DB_FILE, "w") as f:
|
93 |
-
json.dump(db, f, indent=2)
|
94 |
-
except IOError as e:
|
95 |
-
print(f"Error saving database: {e}")
|
96 |
-
raise
|
97 |
-
|
98 |
-
def _date_to_imap_format(date_str: str) -> str:
|
99 |
-
"""Convert DD-MMM-YYYY to IMAP date format"""
|
100 |
-
try:
|
101 |
-
dt = datetime.strptime(date_str, "%d-%b-%Y")
|
102 |
-
return dt.strftime("%d-%b-%Y")
|
103 |
-
except ValueError:
|
104 |
-
raise ValueError(f"Invalid date format: {date_str}. Expected DD-MMM-YYYY")
|
105 |
-
|
106 |
-
def _is_date_in_range(email_date: str, start_date: str, end_date: str) -> bool:
|
107 |
-
"""Check if email date is within the specified range"""
|
108 |
-
try:
|
109 |
-
email_dt = datetime.strptime(email_date, "%d-%b-%Y")
|
110 |
-
start_dt = datetime.strptime(start_date, "%d-%b-%Y")
|
111 |
-
end_dt = datetime.strptime(end_date, "%d-%b-%Y")
|
112 |
-
return start_dt <= email_dt <= end_dt
|
113 |
-
except ValueError:
|
114 |
-
return False
|
115 |
-
|
116 |
-
def scrape_emails_from_sender(sender_email: str, start_date: str, end_date: str) -> List[Dict]:
|
117 |
-
"""
|
118 |
-
Scrape emails from specific sender within date range
|
119 |
-
Uses intelligent caching to avoid re-scraping
|
120 |
-
"""
|
121 |
-
print(f"Scraping emails from {sender_email} between {start_date} and {end_date}")
|
122 |
-
|
123 |
-
# Load existing database
|
124 |
-
db = _load_email_db()
|
125 |
-
sender_email = sender_email.lower().strip()
|
126 |
-
|
127 |
-
# Check if we have cached emails for this sender
|
128 |
-
if sender_email in db:
|
129 |
-
cached_emails = db[sender_email].get("emails", [])
|
130 |
-
|
131 |
-
# Filter cached emails by date range
|
132 |
-
filtered_emails = [
|
133 |
-
email for email in cached_emails
|
134 |
-
if _is_date_in_range(email["date"], start_date, end_date)
|
135 |
-
]
|
136 |
-
|
137 |
-
# Check if we need to scrape more recent emails
|
138 |
-
last_scraped = db[sender_email].get("last_scraped", "01-Jan-2020")
|
139 |
-
today = datetime.today().strftime("%d-%b-%Y")
|
140 |
-
|
141 |
-
if last_scraped == today and filtered_emails:
|
142 |
-
print(f"Using cached emails (last scraped: {last_scraped})")
|
143 |
-
return filtered_emails
|
144 |
-
|
145 |
-
# Need to scrape emails
|
146 |
-
try:
|
147 |
-
mail = _imap_connect()
|
148 |
-
|
149 |
-
# Prepare IMAP search criteria
|
150 |
-
start_imap = _date_to_imap_format(start_date)
|
151 |
-
# Add one day to end_date for BEFORE criteria (IMAP BEFORE is exclusive)
|
152 |
-
end_dt = datetime.strptime(end_date, "%d-%b-%Y") + timedelta(days=1)
|
153 |
-
end_imap = end_dt.strftime("%d-%b-%Y")
|
154 |
-
|
155 |
-
search_criteria = f'(FROM "{sender_email}") SINCE "{start_imap}" BEFORE "{end_imap}"'
|
156 |
-
print(f"IMAP search: {search_criteria}")
|
157 |
-
|
158 |
-
# Search for emails
|
159 |
-
status, data = mail.search(None, search_criteria)
|
160 |
-
if status != 'OK':
|
161 |
-
raise Exception(f"IMAP search failed: {status}")
|
162 |
-
|
163 |
-
email_ids = data[0].split()
|
164 |
-
print(f"Found {len(email_ids)} emails")
|
165 |
-
|
166 |
-
scraped_emails = []
|
167 |
-
|
168 |
-
# Process each email
|
169 |
-
for i, email_id in enumerate(email_ids):
|
170 |
-
try:
|
171 |
-
print(f"Processing email {i+1}/{len(email_ids)}")
|
172 |
-
|
173 |
-
# Fetch email
|
174 |
-
status, msg_data = mail.fetch(email_id, "(RFC822)")
|
175 |
-
if status != 'OK':
|
176 |
-
continue
|
177 |
-
|
178 |
-
# Parse email
|
179 |
-
msg = message_from_bytes(msg_data[0][1])
|
180 |
-
|
181 |
-
# Extract information
|
182 |
-
subject = msg.get("Subject", "No Subject")
|
183 |
-
content = _email_to_clean_text(msg)
|
184 |
-
|
185 |
-
# Parse date
|
186 |
-
date_header = msg.get("Date", "")
|
187 |
-
if date_header:
|
188 |
-
try:
|
189 |
-
dt_obj = parsedate_to_datetime(date_header)
|
190 |
-
# Convert to IST
|
191 |
-
ist_dt = dt_obj.astimezone(ZoneInfo("Asia/Kolkata"))
|
192 |
-
email_date = ist_dt.strftime("%d-%b-%Y")
|
193 |
-
email_time = ist_dt.strftime("%H:%M:%S")
|
194 |
-
except:
|
195 |
-
email_date = datetime.today().strftime("%d-%b-%Y")
|
196 |
-
email_time = "00:00:00"
|
197 |
-
else:
|
198 |
-
email_date = datetime.today().strftime("%d-%b-%Y")
|
199 |
-
email_time = "00:00:00"
|
200 |
-
|
201 |
-
# Get message ID for deduplication
|
202 |
-
message_id = msg.get("Message-ID", f"missing-{email_id.decode()}")
|
203 |
-
|
204 |
-
scraped_emails.append({
|
205 |
-
"date": email_date,
|
206 |
-
"time": email_time,
|
207 |
-
"subject": subject,
|
208 |
-
"content": content[:2000], # Limit content length
|
209 |
-
"message_id": message_id
|
210 |
-
})
|
211 |
-
|
212 |
-
except Exception as e:
|
213 |
-
print(f"Error processing email {email_id}: {e}")
|
214 |
-
continue
|
215 |
-
|
216 |
-
mail.logout()
|
217 |
-
|
218 |
-
# Update database
|
219 |
-
if sender_email not in db:
|
220 |
-
db[sender_email] = {"emails": [], "last_scraped": ""}
|
221 |
-
|
222 |
-
# Merge with existing emails (avoid duplicates)
|
223 |
-
existing_emails = db[sender_email].get("emails", [])
|
224 |
-
existing_ids = {email.get("message_id") for email in existing_emails}
|
225 |
-
|
226 |
-
new_emails = [
|
227 |
-
email for email in scraped_emails
|
228 |
-
if email["message_id"] not in existing_ids
|
229 |
-
]
|
230 |
-
|
231 |
-
# Update database
|
232 |
-
db[sender_email]["emails"] = existing_emails + new_emails
|
233 |
-
db[sender_email]["last_scraped"] = datetime.today().strftime("%d-%b-%Y")
|
234 |
-
|
235 |
-
# Save database
|
236 |
-
_save_email_db(db)
|
237 |
-
|
238 |
-
# Return filtered results
|
239 |
-
all_emails = db[sender_email]["emails"]
|
240 |
-
filtered_emails = [
|
241 |
-
email for email in all_emails
|
242 |
-
if _is_date_in_range(email["date"], start_date, end_date)
|
243 |
-
]
|
244 |
-
|
245 |
-
print(f"Scraped {len(new_emails)} new emails, returning {len(filtered_emails)} in date range")
|
246 |
-
return filtered_emails
|
247 |
-
|
248 |
-
except Exception as e:
|
249 |
-
print(f"Email scraping failed: {e}")
|
250 |
-
raise
|
251 |
-
|
252 |
-
# Test the scraper
|
253 |
-
if __name__ == "__main__":
|
254 |
-
# Test scraping
|
255 |
-
try:
|
256 |
-
emails = scrape_emails_from_sender(
|
257 |
-
"[email protected]",
|
258 |
-
"01-Jun-2025",
|
259 |
-
"07-Jun-2025"
|
260 |
-
)
|
261 |
-
|
262 |
-
print(f"\nFound {len(emails)} emails:")
|
263 |
-
for email in emails[:3]: # Show first 3
|
264 |
-
print(f"- {email['date']} {email['time']}: {email['subject']}")
|
265 |
-
|
266 |
-
except Exception as e:
|
267 |
-
print(f"Test failed: {e}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
server/main.py
DELETED
@@ -1,29 +0,0 @@
|
|
1 |
-
from fastapi import FastAPI
|
2 |
-
from fastapi.middleware.cors import CORSMiddleware
|
3 |
-
from routes import router
|
4 |
-
|
5 |
-
app = FastAPI(
|
6 |
-
title="Email Query System",
|
7 |
-
description="Natural language email querying with intent classification",
|
8 |
-
version="1.0.0"
|
9 |
-
)
|
10 |
-
|
11 |
-
# Add CORS middleware
|
12 |
-
app.add_middleware(
|
13 |
-
CORSMiddleware,
|
14 |
-
allow_origins=["*"],
|
15 |
-
allow_credentials=True,
|
16 |
-
allow_methods=["*"],
|
17 |
-
allow_headers=["*"],
|
18 |
-
)
|
19 |
-
|
20 |
-
# Include routes
|
21 |
-
app.include_router(router, prefix="/api/v1")
|
22 |
-
|
23 |
-
@app.get("/")
|
24 |
-
def root():
|
25 |
-
return {
|
26 |
-
"message": "Email Query System API",
|
27 |
-
"docs": "/docs",
|
28 |
-
"health": "/api/v1/health"
|
29 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
server/name_mapping.json
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"dev": "[email protected]"
|
3 |
-
}
|
|
|
|
|
|
|
|
server/query_parser.py
DELETED
@@ -1,189 +0,0 @@
|
|
1 |
-
#!/usr/bin/env python3
|
2 |
-
"""
|
3 |
-
Query Parser with Intent Classification and Name-to-Email Resolution
|
4 |
-
"""
|
5 |
-
|
6 |
-
import json
|
7 |
-
import os
|
8 |
-
from datetime import datetime, timedelta
|
9 |
-
from openai import OpenAI
|
10 |
-
from typing import Dict, Optional, Tuple
|
11 |
-
from dotenv import load_dotenv # <-- Add this
|
12 |
-
|
13 |
-
# Load environment variables from .env file
|
14 |
-
load_dotenv() # <-- Add this
|
15 |
-
|
16 |
-
# Initialize OpenAI client
|
17 |
-
client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
|
18 |
-
# File paths
|
19 |
-
NAME_MAPPING_FILE = "name_mapping.json"
|
20 |
-
EMAIL_DB_FILE = "email_db.json"
|
21 |
-
|
22 |
-
def _llm(messages, model="gpt-4o-mini", temperature=0):
|
23 |
-
"""Helper function to call OpenAI API"""
|
24 |
-
rsp = client.chat.completions.create(
|
25 |
-
model=model,
|
26 |
-
temperature=temperature,
|
27 |
-
messages=messages,
|
28 |
-
)
|
29 |
-
return rsp.choices[0].message.content.strip()
|
30 |
-
|
31 |
-
def _load_name_mapping() -> Dict[str, str]:
|
32 |
-
"""Load name to email mapping from JSON file"""
|
33 |
-
if not os.path.exists(NAME_MAPPING_FILE):
|
34 |
-
return {}
|
35 |
-
try:
|
36 |
-
with open(NAME_MAPPING_FILE, "r") as f:
|
37 |
-
return json.load(f)
|
38 |
-
except (json.JSONDecodeError, IOError):
|
39 |
-
return {}
|
40 |
-
|
41 |
-
def _save_name_mapping(mapping: Dict[str, str]):
|
42 |
-
"""Save name to email mapping to JSON file"""
|
43 |
-
with open(NAME_MAPPING_FILE, "w") as f:
|
44 |
-
json.dump(mapping, f, indent=2)
|
45 |
-
|
46 |
-
def _load_email_db() -> Dict:
|
47 |
-
"""Load email database"""
|
48 |
-
if not os.path.exists(EMAIL_DB_FILE):
|
49 |
-
return {}
|
50 |
-
try:
|
51 |
-
with open(EMAIL_DB_FILE, "r") as f:
|
52 |
-
return json.load(f)
|
53 |
-
except (json.JSONDecodeError, IOError):
|
54 |
-
return {}
|
55 |
-
|
56 |
-
def _save_email_db(db: Dict):
|
57 |
-
"""Save email database"""
|
58 |
-
with open(EMAIL_DB_FILE, "w") as f:
|
59 |
-
json.dump(db, f, indent=2)
|
60 |
-
|
61 |
-
def extract_query_info(query: str) -> Dict:
|
62 |
-
"""
|
63 |
-
Extract intent and date range from user query using LLM
|
64 |
-
"""
|
65 |
-
today_str = datetime.today().strftime("%d-%b-%Y")
|
66 |
-
|
67 |
-
system_prompt = f"""
|
68 |
-
You are an email query parser. Today is {today_str}.
|
69 |
-
|
70 |
-
Given a user query, extract:
|
71 |
-
1. sender_intent: The person/entity they want emails from (could be name or email)
|
72 |
-
2. start_date and end_date: Date range in DD-MMM-YYYY format
|
73 |
-
|
74 |
-
For relative dates:
|
75 |
-
- "last week" = 7 days ago to today
|
76 |
-
- "yesterday" = yesterday only
|
77 |
-
- "last month" = 30 days ago to today
|
78 |
-
- "last 3 days" = 3 days ago to today
|
79 |
-
|
80 |
-
Examples:
|
81 |
-
- "emails from dev agarwal last week" → sender_intent: "dev agarwal"
|
82 |
-
- "show amazon emails from last month" → sender_intent: "amazon"
|
83 |
-
- "emails from [email protected] yesterday" → sender_intent: "[email protected]"
|
84 |
-
|
85 |
-
Return ONLY valid JSON:
|
86 |
-
{{
|
87 |
-
"sender_intent": "extracted name or email",
|
88 |
-
"start_date": "DD-MMM-YYYY",
|
89 |
-
"end_date": "DD-MMM-YYYY"
|
90 |
-
}}
|
91 |
-
"""
|
92 |
-
|
93 |
-
messages = [
|
94 |
-
{"role": "system", "content": system_prompt},
|
95 |
-
{"role": "user", "content": query}
|
96 |
-
]
|
97 |
-
|
98 |
-
result = _llm(messages)
|
99 |
-
return json.loads(result)
|
100 |
-
|
101 |
-
def resolve_sender_email(sender_intent: str) -> Tuple[Optional[str], bool]:
|
102 |
-
"""
|
103 |
-
Resolve sender intent to actual email address
|
104 |
-
Returns: (email_address, needs_user_input)
|
105 |
-
"""
|
106 |
-
# Check if it's already an email address
|
107 |
-
if "@" in sender_intent:
|
108 |
-
return sender_intent.lower(), False
|
109 |
-
|
110 |
-
# Load name mapping
|
111 |
-
name_mapping = _load_name_mapping()
|
112 |
-
|
113 |
-
# Normalize the intent (lowercase for comparison)
|
114 |
-
normalized_intent = sender_intent.lower().strip()
|
115 |
-
|
116 |
-
# Check direct match
|
117 |
-
if normalized_intent in name_mapping:
|
118 |
-
return name_mapping[normalized_intent], False
|
119 |
-
|
120 |
-
# Check partial matches (fuzzy matching)
|
121 |
-
for name, email in name_mapping.items():
|
122 |
-
if normalized_intent in name.lower() or name.lower() in normalized_intent:
|
123 |
-
return email, False
|
124 |
-
|
125 |
-
# No match found
|
126 |
-
return None, True
|
127 |
-
|
128 |
-
def store_name_email_mapping(name: str, email: str):
|
129 |
-
"""Store new name to email mapping"""
|
130 |
-
name_mapping = _load_name_mapping()
|
131 |
-
name_mapping[name.lower().strip()] = email.lower().strip()
|
132 |
-
_save_name_mapping(name_mapping)
|
133 |
-
|
134 |
-
def parse_email_query(query: str) -> Dict:
|
135 |
-
"""
|
136 |
-
Main function to parse email query
|
137 |
-
Returns structured response with next steps
|
138 |
-
"""
|
139 |
-
try:
|
140 |
-
# Step 1: Extract intent and dates
|
141 |
-
query_info = extract_query_info(query)
|
142 |
-
sender_intent = query_info["sender_intent"]
|
143 |
-
start_date = query_info["start_date"]
|
144 |
-
end_date = query_info["end_date"]
|
145 |
-
|
146 |
-
# Step 2: Resolve sender email
|
147 |
-
email_address, needs_input = resolve_sender_email(sender_intent)
|
148 |
-
|
149 |
-
if needs_input:
|
150 |
-
# Need to ask user for email address
|
151 |
-
return {
|
152 |
-
"status": "need_email_input",
|
153 |
-
"sender_intent": sender_intent,
|
154 |
-
"start_date": start_date,
|
155 |
-
"end_date": end_date,
|
156 |
-
"message": f"I don't have an email address for '{sender_intent}'. Please provide the email address."
|
157 |
-
}
|
158 |
-
else:
|
159 |
-
# Ready to proceed with email scraping
|
160 |
-
return {
|
161 |
-
"status": "ready_to_scrape",
|
162 |
-
"sender_intent": sender_intent,
|
163 |
-
"resolved_email": email_address,
|
164 |
-
"start_date": start_date,
|
165 |
-
"end_date": end_date,
|
166 |
-
"message": f"Found email: {email_address} for '{sender_intent}'"
|
167 |
-
}
|
168 |
-
|
169 |
-
except Exception as e:
|
170 |
-
return {
|
171 |
-
"status": "error",
|
172 |
-
"error": str(e),
|
173 |
-
"message": "Failed to parse query"
|
174 |
-
}
|
175 |
-
|
176 |
-
# Test the parser
|
177 |
-
if __name__ == "__main__":
|
178 |
-
# Test cases
|
179 |
-
test_queries = [
|
180 |
-
"Show me emails from dev agarwal last week",
|
181 |
-
"emails from amazon in the last month",
|
182 |
-
"get [email protected] emails yesterday",
|
183 |
-
"emails from new person last 3 days"
|
184 |
-
]
|
185 |
-
|
186 |
-
for query in test_queries:
|
187 |
-
print(f"\nQuery: {query}")
|
188 |
-
result = parse_email_query(query)
|
189 |
-
print(f"Result: {json.dumps(result, indent=2)}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
server/routes.py
DELETED
@@ -1,206 +0,0 @@
|
|
1 |
-
#!/usr/bin/env python3
|
2 |
-
"""
|
3 |
-
FastAPI Routes for Email Query System
|
4 |
-
"""
|
5 |
-
|
6 |
-
from fastapi import APIRouter, HTTPException
|
7 |
-
from pydantic import BaseModel, EmailStr
|
8 |
-
from typing import List, Dict, Optional
|
9 |
-
import json
|
10 |
-
|
11 |
-
# Import our modules
|
12 |
-
from query_parser import parse_email_query, store_name_email_mapping
|
13 |
-
from email_scraper import scrape_emails_from_sender
|
14 |
-
|
15 |
-
router = APIRouter()
|
16 |
-
|
17 |
-
# Pydantic models
|
18 |
-
class NaturalQuery(BaseModel):
|
19 |
-
query: str
|
20 |
-
|
21 |
-
class EmailMappingInput(BaseModel):
|
22 |
-
name: str
|
23 |
-
email: EmailStr
|
24 |
-
|
25 |
-
class EmailResponse(BaseModel):
|
26 |
-
date: str
|
27 |
-
time: str
|
28 |
-
subject: str
|
29 |
-
content: str
|
30 |
-
message_id: str
|
31 |
-
|
32 |
-
class QueryParseResponse(BaseModel):
|
33 |
-
status: str
|
34 |
-
sender_intent: Optional[str] = None
|
35 |
-
resolved_email: Optional[str] = None
|
36 |
-
start_date: Optional[str] = None
|
37 |
-
end_date: Optional[str] = None
|
38 |
-
message: str
|
39 |
-
error: Optional[str] = None
|
40 |
-
|
41 |
-
class EmailsResponse(BaseModel):
|
42 |
-
status: str
|
43 |
-
sender_intent: str
|
44 |
-
resolved_email: str
|
45 |
-
start_date: str
|
46 |
-
end_date: str
|
47 |
-
total_emails: int
|
48 |
-
emails: List[EmailResponse]
|
49 |
-
message: str
|
50 |
-
|
51 |
-
@router.post("/parse_query", response_model=QueryParseResponse)
|
52 |
-
def parse_email_query_endpoint(input_data: NaturalQuery):
|
53 |
-
"""
|
54 |
-
Parse natural language query to extract intent and dates
|
55 |
-
"""
|
56 |
-
try:
|
57 |
-
result = parse_email_query(input_data.query)
|
58 |
-
return QueryParseResponse(**result)
|
59 |
-
except Exception as e:
|
60 |
-
raise HTTPException(status_code=400, detail=f"Query parsing failed: {str(e)}")
|
61 |
-
|
62 |
-
@router.post("/add_email_mapping")
|
63 |
-
def add_email_mapping(mapping: EmailMappingInput):
|
64 |
-
"""
|
65 |
-
Add new name to email mapping
|
66 |
-
"""
|
67 |
-
try:
|
68 |
-
store_name_email_mapping(mapping.name, mapping.email)
|
69 |
-
return {
|
70 |
-
"status": "success",
|
71 |
-
"message": f"Mapping added: '{mapping.name}' → '{mapping.email}'"
|
72 |
-
}
|
73 |
-
except Exception as e:
|
74 |
-
raise HTTPException(status_code=400, detail=f"Failed to add mapping: {str(e)}")
|
75 |
-
|
76 |
-
@router.post("/get_emails", response_model=EmailsResponse)
|
77 |
-
def get_emails_from_query(input_data: NaturalQuery):
|
78 |
-
"""
|
79 |
-
Complete flow: Parse query → Resolve email → Scrape emails
|
80 |
-
"""
|
81 |
-
try:
|
82 |
-
# Step 1: Parse the query
|
83 |
-
parsed_result = parse_email_query(input_data.query)
|
84 |
-
|
85 |
-
if parsed_result["status"] == "need_email_input":
|
86 |
-
raise HTTPException(
|
87 |
-
status_code=400,
|
88 |
-
detail={
|
89 |
-
"type": "need_email_input",
|
90 |
-
"sender_intent": parsed_result["sender_intent"],
|
91 |
-
"message": parsed_result["message"]
|
92 |
-
}
|
93 |
-
)
|
94 |
-
elif parsed_result["status"] == "error":
|
95 |
-
raise HTTPException(status_code=400, detail=parsed_result["message"])
|
96 |
-
|
97 |
-
# Step 2: Scrape emails
|
98 |
-
emails = scrape_emails_from_sender(
|
99 |
-
parsed_result["resolved_email"],
|
100 |
-
parsed_result["start_date"],
|
101 |
-
parsed_result["end_date"]
|
102 |
-
)
|
103 |
-
|
104 |
-
# Step 3: Format response
|
105 |
-
email_responses = [
|
106 |
-
EmailResponse(
|
107 |
-
date=email["date"],
|
108 |
-
time=email["time"],
|
109 |
-
subject=email["subject"],
|
110 |
-
content=email["content"],
|
111 |
-
message_id=email["message_id"]
|
112 |
-
)
|
113 |
-
for email in emails
|
114 |
-
]
|
115 |
-
|
116 |
-
return EmailsResponse(
|
117 |
-
status="success",
|
118 |
-
sender_intent=parsed_result["sender_intent"],
|
119 |
-
resolved_email=parsed_result["resolved_email"],
|
120 |
-
start_date=parsed_result["start_date"],
|
121 |
-
end_date=parsed_result["end_date"],
|
122 |
-
total_emails=len(emails),
|
123 |
-
emails=email_responses,
|
124 |
-
message=f"Found {len(emails)} emails from {parsed_result['resolved_email']}"
|
125 |
-
)
|
126 |
-
|
127 |
-
except HTTPException:
|
128 |
-
raise
|
129 |
-
except Exception as e:
|
130 |
-
raise HTTPException(status_code=500, detail=f"Email retrieval failed: {str(e)}")
|
131 |
-
|
132 |
-
@router.get("/view_mappings")
|
133 |
-
def view_name_mappings():
|
134 |
-
"""
|
135 |
-
View all stored name to email mappings
|
136 |
-
"""
|
137 |
-
try:
|
138 |
-
from query_parser import _load_name_mapping
|
139 |
-
mappings = _load_name_mapping()
|
140 |
-
return {
|
141 |
-
"status": "success",
|
142 |
-
"total_mappings": len(mappings),
|
143 |
-
"mappings": mappings
|
144 |
-
}
|
145 |
-
except Exception as e:
|
146 |
-
raise HTTPException(status_code=500, detail=f"Failed to load mappings: {str(e)}")
|
147 |
-
|
148 |
-
@router.get("/health")
|
149 |
-
def health_check():
|
150 |
-
"""
|
151 |
-
Health check endpoint
|
152 |
-
"""
|
153 |
-
return {
|
154 |
-
"status": "healthy",
|
155 |
-
"message": "Email query system is running"
|
156 |
-
}
|
157 |
-
|
158 |
-
# For testing - manual endpoint to add mapping and then query
|
159 |
-
@router.post("/complete_flow")
|
160 |
-
def complete_email_flow(input_data: dict):
|
161 |
-
"""
|
162 |
-
Test endpoint for complete flow with optional mapping
|
163 |
-
Expected input:
|
164 |
-
{
|
165 |
-
"query": "emails from john last week",
|
166 |
-
"mapping": {"name": "john", "email": "[email protected]"} # optional
|
167 |
-
}
|
168 |
-
"""
|
169 |
-
try:
|
170 |
-
query = input_data.get("query")
|
171 |
-
mapping = input_data.get("mapping")
|
172 |
-
|
173 |
-
if not query:
|
174 |
-
raise HTTPException(status_code=400, detail="Query is required")
|
175 |
-
|
176 |
-
# Add mapping if provided
|
177 |
-
if mapping:
|
178 |
-
store_name_email_mapping(mapping["name"], mapping["email"])
|
179 |
-
|
180 |
-
# Parse and get emails
|
181 |
-
parsed_result = parse_email_query(query)
|
182 |
-
|
183 |
-
if parsed_result["status"] == "need_email_input":
|
184 |
-
return {
|
185 |
-
"status": "need_mapping",
|
186 |
-
"message": parsed_result["message"],
|
187 |
-
"sender_intent": parsed_result["sender_intent"]
|
188 |
-
}
|
189 |
-
|
190 |
-
# Get emails
|
191 |
-
emails = scrape_emails_from_sender(
|
192 |
-
parsed_result["resolved_email"],
|
193 |
-
parsed_result["start_date"],
|
194 |
-
parsed_result["end_date"]
|
195 |
-
)
|
196 |
-
|
197 |
-
return {
|
198 |
-
"status": "success",
|
199 |
-
"query": query,
|
200 |
-
"parsed": parsed_result,
|
201 |
-
"total_emails": len(emails),
|
202 |
-
"emails": emails[:5] # Return first 5 emails
|
203 |
-
}
|
204 |
-
|
205 |
-
except Exception as e:
|
206 |
-
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|