sagarnildass commited on
Commit
4013d2c
·
verified ·
1 Parent(s): 508ce05

Upload folder using huggingface_hub

Browse files
Files changed (1) hide show
  1. app.py +33 -0
app.py CHANGED
@@ -6,10 +6,35 @@ import requests
6
  from pypdf import PdfReader
7
  import gradio as gr
8
  import base64
 
 
9
 
10
 
11
  load_dotenv(override=True)
12
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
  def push(text):
14
  requests.post(
15
  "https://api.pushover.net/1/messages.json",
@@ -100,6 +125,7 @@ class Me:
100
  def __init__(self):
101
  self.openai = OpenAI()
102
  self.name = "Sagarnil Das"
 
103
  reader = PdfReader("me/linkedin.pdf")
104
  self.linkedin = ""
105
  for page in reader.pages:
@@ -137,6 +163,13 @@ in which they provide their email, then give a summary of the conversation so fa
137
  return system_prompt
138
 
139
  def chat(self, message, history):
 
 
 
 
 
 
 
140
  messages = [{"role": "system", "content": self.system_prompt()}]
141
 
142
  # Check if history is a list of dicts (Gradio "messages" format)
 
6
  from pypdf import PdfReader
7
  import gradio as gr
8
  import base64
9
+ import time
10
+ from collections import defaultdict
11
 
12
 
13
  load_dotenv(override=True)
14
 
15
+ class RateLimiter:
16
+ def __init__(self, max_requests=5, time_window=5):
17
+ # max_requests per time_window seconds
18
+ self.max_requests = max_requests
19
+ self.time_window = time_window # in seconds
20
+ self.request_history = defaultdict(list)
21
+
22
+ def is_rate_limited(self, user_id):
23
+ current_time = time.time()
24
+ # Remove old requests
25
+ self.request_history[user_id] = [
26
+ timestamp for timestamp in self.request_history[user_id]
27
+ if current_time - timestamp < self.time_window
28
+ ]
29
+
30
+ # Check if user has exceeded the limit
31
+ if len(self.request_history[user_id]) >= self.max_requests:
32
+ return True
33
+
34
+ # Add current request
35
+ self.request_history[user_id].append(current_time)
36
+ return False
37
+
38
  def push(text):
39
  requests.post(
40
  "https://api.pushover.net/1/messages.json",
 
125
  def __init__(self):
126
  self.openai = OpenAI()
127
  self.name = "Sagarnil Das"
128
+ self.rate_limiter = RateLimiter(max_requests=5, time_window=60) # 5 messages per minute
129
  reader = PdfReader("me/linkedin.pdf")
130
  self.linkedin = ""
131
  for page in reader.pages:
 
163
  return system_prompt
164
 
165
  def chat(self, message, history):
166
+ # Apply rate limiting with user's IP as the key
167
+ # In a production app, you would get the real client IP
168
+ user_id = "default_user" # Use request.client.host in a proper web framework
169
+
170
+ if self.rate_limiter.is_rate_limited(user_id):
171
+ return "You're sending messages too quickly. Please wait a moment before sending another message."
172
+
173
  messages = [{"role": "system", "content": self.system_prompt()}]
174
 
175
  # Check if history is a list of dicts (Gradio "messages" format)