Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -27,7 +27,6 @@ lock = threading.Lock() # Non-reentrant lock
|
|
27 |
# --- Helper Functions ---
|
28 |
def save_data_to_json():
|
29 |
# This function must be called with 'lock' acquired
|
30 |
-
# REMOVED 'with lock:' from here as callers already acquire it.
|
31 |
serializable_data = {}
|
32 |
for url_id, data in monitored_urls_store.items():
|
33 |
s_data = data.copy()
|
@@ -37,9 +36,9 @@ def save_data_to_json():
|
|
37 |
try:
|
38 |
with open(DATA_FILE, 'w') as f:
|
39 |
json.dump(serializable_data, f, indent=2)
|
40 |
-
print(f"Data saved to {DATA_FILE}")
|
41 |
except IOError as e:
|
42 |
-
print(f"Error saving data to {DATA_FILE}: {e}")
|
43 |
|
44 |
def load_data_from_json():
|
45 |
global monitored_urls_store
|
@@ -47,7 +46,7 @@ def load_data_from_json():
|
|
47 |
try:
|
48 |
with open(DATA_FILE, 'r') as f:
|
49 |
loaded_json_data = json.load(f)
|
50 |
-
print(f"Data loaded from {DATA_FILE}")
|
51 |
|
52 |
temp_store = {}
|
53 |
for url_id_key, data_item in loaded_json_data.items():
|
@@ -65,13 +64,13 @@ def load_data_from_json():
|
|
65 |
monitored_urls_store = temp_store
|
66 |
|
67 |
except json.JSONDecodeError:
|
68 |
-
print(f"Warning: Could not decode {DATA_FILE}. Starting with an empty list.")
|
69 |
with lock: monitored_urls_store = {}
|
70 |
except Exception as e:
|
71 |
-
print(f"Error loading data from {DATA_FILE}: {e}. Starting fresh.")
|
72 |
with lock: monitored_urls_store = {}
|
73 |
else:
|
74 |
-
print(f"{DATA_FILE} not found. Starting with an empty list.")
|
75 |
with lock: monitored_urls_store = {}
|
76 |
|
77 |
url_ids_to_start_monitoring = []
|
@@ -91,10 +90,10 @@ def get_host_ip_address(hostname_str):
|
|
91 |
ip_address = socket.gethostbyname(hostname_str)
|
92 |
return ip_address
|
93 |
except socket.gaierror:
|
94 |
-
print(f"Could not resolve hostname: {hostname_str}")
|
95 |
return 'N/A'
|
96 |
except Exception as e:
|
97 |
-
print(f"Error processing hostname/IP for {hostname_str}: {e}")
|
98 |
return 'N/A'
|
99 |
|
100 |
def prune_url_history(url_data_entry):
|
@@ -111,7 +110,7 @@ def execute_url_check(url_id_to_check):
|
|
111 |
current_url_data = monitored_urls_store[url_id_to_check]
|
112 |
if current_url_data.get('_stop_event') and current_url_data['_stop_event'].is_set(): return
|
113 |
|
114 |
-
print(f"Checking {current_url_data['url']} (ID: {url_id_to_check})...")
|
115 |
current_url_data['status'] = 'checking'
|
116 |
url_config_snapshot = current_url_data.copy()
|
117 |
|
@@ -128,11 +127,11 @@ def execute_url_check(url_id_to_check):
|
|
128 |
if 200 <= head_response.status_code < 400:
|
129 |
final_check_status = 'ok'
|
130 |
else:
|
131 |
-
print(f"HEAD for {url_config_snapshot['url']} returned {head_response.status_code}. Trying GET.")
|
132 |
except requests.exceptions.Timeout:
|
133 |
-
print(f"HEAD timeout for {url_config_snapshot['url']}. Trying GET...")
|
134 |
except requests.RequestException as e_head:
|
135 |
-
print(f"HEAD failed for {url_config_snapshot['url']}: {e_head}. Trying GET...")
|
136 |
|
137 |
if final_check_status != 'ok':
|
138 |
try:
|
@@ -140,20 +139,20 @@ def execute_url_check(url_id_to_check):
|
|
140 |
if get_response.ok:
|
141 |
final_check_status = 'ok'
|
142 |
else:
|
143 |
-
print(f"GET for {url_config_snapshot['url']} status: {get_response.status_code}")
|
144 |
final_check_status = 'error'
|
145 |
except requests.exceptions.Timeout:
|
146 |
-
print(f"GET timeout for {url_config_snapshot['url']}")
|
147 |
final_check_status = 'error'
|
148 |
except requests.RequestException as e_get:
|
149 |
-
print(f"GET failed for {url_config_snapshot['url']}: {e_get}")
|
150 |
final_check_status = 'error'
|
151 |
|
152 |
if final_check_status == 'ok':
|
153 |
http_response_time_ms = (time.perf_counter() - check_start_time) * 1000
|
154 |
|
155 |
except Exception as e:
|
156 |
-
print(f"Outer check exception for {url_config_snapshot['url']}: {e}")
|
157 |
final_check_status = 'error'
|
158 |
|
159 |
with lock:
|
@@ -170,7 +169,7 @@ def execute_url_check(url_id_to_check):
|
|
170 |
prune_url_history(live_url_data)
|
171 |
|
172 |
save_data_to_json() # Called while lock is held
|
173 |
-
print(f"Finished check for {live_url_data['url']}: {final_check_status}, {http_response_time_ms} ms")
|
174 |
|
175 |
def pinger_thread_function(url_id_param, stop_event_param):
|
176 |
while not stop_event_param.is_set():
|
@@ -178,18 +177,18 @@ def pinger_thread_function(url_id_param, stop_event_param):
|
|
178 |
for _ in range(PING_INTERVAL_SECONDS):
|
179 |
if stop_event_param.is_set(): break
|
180 |
time.sleep(1)
|
181 |
-
print(f"PingerThread for {url_id_param} stopped.")
|
182 |
|
183 |
def start_url_monitoring_thread(target_url_id):
|
184 |
with lock:
|
185 |
if target_url_id not in monitored_urls_store:
|
186 |
-
print(f"Cannot start monitoring: URL ID {target_url_id} not found.")
|
187 |
return
|
188 |
|
189 |
url_data_entry = monitored_urls_store[target_url_id]
|
190 |
|
191 |
if "_thread" in url_data_entry and url_data_entry["_thread"].is_alive():
|
192 |
-
print(f"Monitor for URL ID {target_url_id} already running. Attempting to restart.")
|
193 |
if "_stop_event" in url_data_entry and url_data_entry["_stop_event"]:
|
194 |
url_data_entry["_stop_event"].set()
|
195 |
url_data_entry["_thread"].join(timeout=3)
|
@@ -201,14 +200,14 @@ def start_url_monitoring_thread(target_url_id):
|
|
201 |
url_data_entry["_stop_event"] = new_stop_event
|
202 |
|
203 |
new_thread.start()
|
204 |
-
print(f"Started/Restarted monitoring for URL ID {target_url_id}: {url_data_entry['url']}")
|
205 |
|
206 |
def stop_url_monitoring_thread(target_url_id):
|
207 |
# This function must be called with 'lock' acquired
|
208 |
if target_url_id in monitored_urls_store:
|
209 |
url_data_entry = monitored_urls_store[target_url_id]
|
210 |
if "_thread" in url_data_entry and url_data_entry["_thread"].is_alive():
|
211 |
-
print(f"Signaling stop for monitor thread of URL ID {target_url_id}")
|
212 |
if "_stop_event" in url_data_entry and url_data_entry["_stop_event"]:
|
213 |
url_data_entry["_stop_event"].set()
|
214 |
url_data_entry.pop("_thread", None)
|
@@ -302,7 +301,7 @@ def delete_existing_url_for_user(target_url_id):
|
|
302 |
response_data = removed_url_entry.copy()
|
303 |
response_data.pop("_thread", None)
|
304 |
response_data.pop("_stop_event", None)
|
305 |
-
print(f"Deleted URL ID {target_url_id} for user {user_id}")
|
306 |
return jsonify({"message": "URL removed", "url": response_data}), 200
|
307 |
else:
|
308 |
return jsonify({"error": "URL not found"}), 404
|
|
|
27 |
# --- Helper Functions ---
|
28 |
def save_data_to_json():
|
29 |
# This function must be called with 'lock' acquired
|
|
|
30 |
serializable_data = {}
|
31 |
for url_id, data in monitored_urls_store.items():
|
32 |
s_data = data.copy()
|
|
|
36 |
try:
|
37 |
with open(DATA_FILE, 'w') as f:
|
38 |
json.dump(serializable_data, f, indent=2)
|
39 |
+
print(f"Data saved to {DATA_FILE}", flush=True)
|
40 |
except IOError as e:
|
41 |
+
print(f"Error saving data to {DATA_FILE}: {e}", flush=True)
|
42 |
|
43 |
def load_data_from_json():
|
44 |
global monitored_urls_store
|
|
|
46 |
try:
|
47 |
with open(DATA_FILE, 'r') as f:
|
48 |
loaded_json_data = json.load(f)
|
49 |
+
print(f"Data loaded from {DATA_FILE}", flush=True)
|
50 |
|
51 |
temp_store = {}
|
52 |
for url_id_key, data_item in loaded_json_data.items():
|
|
|
64 |
monitored_urls_store = temp_store
|
65 |
|
66 |
except json.JSONDecodeError:
|
67 |
+
print(f"Warning: Could not decode {DATA_FILE}. Starting with an empty list.", flush=True)
|
68 |
with lock: monitored_urls_store = {}
|
69 |
except Exception as e:
|
70 |
+
print(f"Error loading data from {DATA_FILE}: {e}. Starting fresh.", flush=True)
|
71 |
with lock: monitored_urls_store = {}
|
72 |
else:
|
73 |
+
print(f"{DATA_FILE} not found. Starting with an empty list.", flush=True)
|
74 |
with lock: monitored_urls_store = {}
|
75 |
|
76 |
url_ids_to_start_monitoring = []
|
|
|
90 |
ip_address = socket.gethostbyname(hostname_str)
|
91 |
return ip_address
|
92 |
except socket.gaierror:
|
93 |
+
print(f"Could not resolve hostname: {hostname_str}", flush=True)
|
94 |
return 'N/A'
|
95 |
except Exception as e:
|
96 |
+
print(f"Error processing hostname/IP for {hostname_str}: {e}", flush=True)
|
97 |
return 'N/A'
|
98 |
|
99 |
def prune_url_history(url_data_entry):
|
|
|
110 |
current_url_data = monitored_urls_store[url_id_to_check]
|
111 |
if current_url_data.get('_stop_event') and current_url_data['_stop_event'].is_set(): return
|
112 |
|
113 |
+
print(f"Checking {current_url_data['url']} (ID: {url_id_to_check})...", flush=True)
|
114 |
current_url_data['status'] = 'checking'
|
115 |
url_config_snapshot = current_url_data.copy()
|
116 |
|
|
|
127 |
if 200 <= head_response.status_code < 400:
|
128 |
final_check_status = 'ok'
|
129 |
else:
|
130 |
+
print(f"HEAD for {url_config_snapshot['url']} returned {head_response.status_code}. Trying GET.", flush=True)
|
131 |
except requests.exceptions.Timeout:
|
132 |
+
print(f"HEAD timeout for {url_config_snapshot['url']}. Trying GET...", flush=True)
|
133 |
except requests.RequestException as e_head:
|
134 |
+
print(f"HEAD failed for {url_config_snapshot['url']}: {e_head}. Trying GET...", flush=True)
|
135 |
|
136 |
if final_check_status != 'ok':
|
137 |
try:
|
|
|
139 |
if get_response.ok:
|
140 |
final_check_status = 'ok'
|
141 |
else:
|
142 |
+
print(f"GET for {url_config_snapshot['url']} status: {get_response.status_code}", flush=True)
|
143 |
final_check_status = 'error'
|
144 |
except requests.exceptions.Timeout:
|
145 |
+
print(f"GET timeout for {url_config_snapshot['url']}", flush=True)
|
146 |
final_check_status = 'error'
|
147 |
except requests.RequestException as e_get:
|
148 |
+
print(f"GET failed for {url_config_snapshot['url']}: {e_get}", flush=True)
|
149 |
final_check_status = 'error'
|
150 |
|
151 |
if final_check_status == 'ok':
|
152 |
http_response_time_ms = (time.perf_counter() - check_start_time) * 1000
|
153 |
|
154 |
except Exception as e:
|
155 |
+
print(f"Outer check exception for {url_config_snapshot['url']}: {e}", flush=True)
|
156 |
final_check_status = 'error'
|
157 |
|
158 |
with lock:
|
|
|
169 |
prune_url_history(live_url_data)
|
170 |
|
171 |
save_data_to_json() # Called while lock is held
|
172 |
+
print(f"Finished check for {live_url_data['url']}: {final_check_status}, {http_response_time_ms} ms", flush=True)
|
173 |
|
174 |
def pinger_thread_function(url_id_param, stop_event_param):
|
175 |
while not stop_event_param.is_set():
|
|
|
177 |
for _ in range(PING_INTERVAL_SECONDS):
|
178 |
if stop_event_param.is_set(): break
|
179 |
time.sleep(1)
|
180 |
+
print(f"PingerThread for {url_id_param} stopped.", flush=True)
|
181 |
|
182 |
def start_url_monitoring_thread(target_url_id):
|
183 |
with lock:
|
184 |
if target_url_id not in monitored_urls_store:
|
185 |
+
print(f"Cannot start monitoring: URL ID {target_url_id} not found.", flush=True)
|
186 |
return
|
187 |
|
188 |
url_data_entry = monitored_urls_store[target_url_id]
|
189 |
|
190 |
if "_thread" in url_data_entry and url_data_entry["_thread"].is_alive():
|
191 |
+
print(f"Monitor for URL ID {target_url_id} already running. Attempting to restart.", flush=True)
|
192 |
if "_stop_event" in url_data_entry and url_data_entry["_stop_event"]:
|
193 |
url_data_entry["_stop_event"].set()
|
194 |
url_data_entry["_thread"].join(timeout=3)
|
|
|
200 |
url_data_entry["_stop_event"] = new_stop_event
|
201 |
|
202 |
new_thread.start()
|
203 |
+
print(f"Started/Restarted monitoring for URL ID {target_url_id}: {url_data_entry['url']}", flush=True)
|
204 |
|
205 |
def stop_url_monitoring_thread(target_url_id):
|
206 |
# This function must be called with 'lock' acquired
|
207 |
if target_url_id in monitored_urls_store:
|
208 |
url_data_entry = monitored_urls_store[target_url_id]
|
209 |
if "_thread" in url_data_entry and url_data_entry["_thread"].is_alive():
|
210 |
+
print(f"Signaling stop for monitor thread of URL ID {target_url_id}", flush=True)
|
211 |
if "_stop_event" in url_data_entry and url_data_entry["_stop_event"]:
|
212 |
url_data_entry["_stop_event"].set()
|
213 |
url_data_entry.pop("_thread", None)
|
|
|
301 |
response_data = removed_url_entry.copy()
|
302 |
response_data.pop("_thread", None)
|
303 |
response_data.pop("_stop_event", None)
|
304 |
+
print(f"Deleted URL ID {target_url_id} for user {user_id}", flush=True)
|
305 |
return jsonify({"message": "URL removed", "url": response_data}), 200
|
306 |
else:
|
307 |
return jsonify({"error": "URL not found"}), 404
|