Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,352 +1,188 @@
|
|
1 |
import gradio as gr
|
2 |
-
import json
|
3 |
-
import requests
|
4 |
-
import os
|
5 |
import pandas as pd
|
6 |
import folium
|
7 |
from geopy.geocoders import Nominatim
|
8 |
-
from geopy.
|
|
|
9 |
import time
|
10 |
-
import
|
11 |
-
|
12 |
-
import io
|
13 |
|
14 |
-
#
|
15 |
-
|
16 |
-
headers = {"Authorization": f"Bearer {os.environ.get('HF_TOKEN', '')}"}
|
17 |
-
|
18 |
-
# Geocoding Service
|
19 |
-
class GeocodingService:
|
20 |
-
def __init__(self, user_agent: str = None, timeout: int = 10, rate_limit: float = 1.1):
|
21 |
-
if user_agent is None:
|
22 |
-
user_agent = f"python_geocoding_script_{random.randint(1000, 9999)}"
|
23 |
|
|
|
|
|
|
|
24 |
self.geolocator = Nominatim(
|
25 |
-
user_agent=
|
26 |
-
timeout=
|
27 |
)
|
28 |
-
self.
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
# Check cache first
|
41 |
if location in self.cache:
|
42 |
return self.cache[location]
|
43 |
|
44 |
-
for attempt in range(max_retries):
|
45 |
-
try:
|
46 |
-
self._rate_limit_wait()
|
47 |
-
location_data = self.geolocator.geocode(location)
|
48 |
-
if location_data:
|
49 |
-
# Store in cache and return
|
50 |
-
self.cache[location] = (location_data.latitude, location_data.longitude)
|
51 |
-
return self.cache[location]
|
52 |
-
# Cache None results too
|
53 |
-
self.cache[location] = None
|
54 |
-
return None
|
55 |
-
except (GeocoderTimedOut, GeocoderServiceError) as e:
|
56 |
-
if attempt == max_retries - 1:
|
57 |
-
print(f"Failed to geocode '{location}' after {max_retries} attempts: {e}")
|
58 |
-
self.cache[location] = None
|
59 |
-
return None
|
60 |
-
time.sleep(2 ** attempt) # Exponential backoff
|
61 |
-
except Exception as e:
|
62 |
-
print(f"Error geocoding '{location}': {e}")
|
63 |
-
self.cache[location] = None
|
64 |
-
return None
|
65 |
-
return None
|
66 |
-
|
67 |
-
def process_locations(self, locations: str) -> List[Optional[Tuple[float, float]]]:
|
68 |
-
if pd.isna(locations) or not locations:
|
69 |
-
return []
|
70 |
-
|
71 |
try:
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
|
77 |
-
|
78 |
-
# If regex finds nothing, fall back to simple comma splitting
|
79 |
-
if not location_list:
|
80 |
-
location_list = [loc.strip() for loc in locations.split(',') if loc.strip()]
|
81 |
-
|
82 |
-
# For debugging
|
83 |
-
print(f"Parsed '{locations}' into: {location_list}")
|
84 |
-
|
85 |
-
return [self.geocode_location(loc) for loc in location_list]
|
86 |
except Exception as e:
|
87 |
-
print(f"
|
88 |
-
# Fall back to simple method
|
89 |
-
location_list = [loc.strip() for loc in locations.split(',') if loc.strip()]
|
90 |
-
return [self.geocode_location(loc) for loc in location_list]
|
91 |
-
|
92 |
-
# Mapping Functions
|
93 |
-
def create_location_map(df: pd.DataFrame,
|
94 |
-
coordinates_col: str = 'coordinates',
|
95 |
-
places_col: str = 'places',
|
96 |
-
title_col: Optional[str] = None) -> folium.Map:
|
97 |
-
# Initialize the map
|
98 |
-
m = folium.Map(location=[0, 0], zoom_start=2)
|
99 |
-
all_coords = []
|
100 |
-
|
101 |
-
# Process each row in the DataFrame
|
102 |
-
for idx, row in df.iterrows():
|
103 |
-
coordinates = row[coordinates_col]
|
104 |
-
places_text = row[places_col] if pd.notna(row[places_col]) else ""
|
105 |
-
title = row[title_col] if title_col and pd.notna(row[title_col]) else None
|
106 |
-
|
107 |
-
# Skip if no coordinates
|
108 |
-
if not coordinates:
|
109 |
-
continue
|
110 |
-
|
111 |
-
# Parse places into a list
|
112 |
-
try:
|
113 |
-
places = [p.strip() for p in places_text.split(',') if p.strip()]
|
114 |
-
except:
|
115 |
-
# Fall back to treating it as a single place if splitting fails
|
116 |
-
places = [places_text] if places_text else []
|
117 |
|
118 |
-
|
119 |
-
|
120 |
-
while len(places) < len(coordinates):
|
121 |
-
places.append(f"Location {len(places) + 1}")
|
122 |
-
|
123 |
-
# Add markers for each coordinate
|
124 |
-
for i, coord in enumerate(coordinates):
|
125 |
-
if coord is not None: # Skip None coordinates
|
126 |
-
lat, lon = coord
|
127 |
-
|
128 |
-
# Get place name safely
|
129 |
-
if i < len(places):
|
130 |
-
place_name = places[i]
|
131 |
-
else:
|
132 |
-
place_name = f"Location {i + 1}"
|
133 |
-
|
134 |
-
# Create popup content
|
135 |
-
popup_content = f"<b>{place_name}</b>"
|
136 |
-
if title:
|
137 |
-
popup_content += f"<br>{title}"
|
138 |
-
|
139 |
-
# Add marker to the map
|
140 |
-
folium.Marker(
|
141 |
-
location=[lat, lon],
|
142 |
-
popup=folium.Popup(popup_content, max_width=300),
|
143 |
-
tooltip=place_name,
|
144 |
-
).add_to(m)
|
145 |
-
|
146 |
-
all_coords.append([lat, lon])
|
147 |
-
|
148 |
-
# If we have coordinates, fit the map bounds to include all points
|
149 |
-
if all_coords:
|
150 |
-
m.fit_bounds(all_coords)
|
151 |
-
|
152 |
-
return m
|
153 |
|
154 |
-
|
155 |
-
|
156 |
-
|
157 |
-
|
158 |
-
return None, "No file uploaded", None
|
159 |
|
160 |
-
|
161 |
-
|
162 |
-
|
163 |
-
|
164 |
-
|
165 |
-
|
166 |
-
|
167 |
-
|
168 |
-
|
169 |
-
|
170 |
-
|
171 |
-
|
172 |
-
|
173 |
-
|
174 |
-
|
175 |
-
|
176 |
-
|
177 |
-
|
178 |
-
|
179 |
-
|
180 |
-
|
181 |
-
|
182 |
-
|
183 |
-
|
184 |
-
|
185 |
-
|
186 |
-
|
187 |
-
|
188 |
-
|
189 |
-
|
190 |
-
|
191 |
-
|
192 |
-
|
193 |
-
temp_map_path = "temp_map.html"
|
194 |
-
map_obj.save(temp_map_path)
|
195 |
-
|
196 |
-
# Save the processed DataFrame to Excel
|
197 |
-
processed_file_path = "processed_data.xlsx"
|
198 |
-
df.to_excel(processed_file_path, index=False)
|
199 |
-
|
200 |
-
# Statistics
|
201 |
-
total_locations = len(df)
|
202 |
-
successful_geocodes = sum(1 for row in df['coordinates'] for coord in row if coord is not None)
|
203 |
-
failed_geocodes = sum(1 for row in df['coordinates'] for coord in row if coord is None)
|
204 |
-
|
205 |
-
stats = f"Total data rows: {total_locations}\n"
|
206 |
-
stats += f"Successfully geocoded locations: {successful_geocodes}\n"
|
207 |
-
stats += f"Failed to geocode locations: {failed_geocodes}"
|
208 |
-
|
209 |
-
return temp_map_path, stats, processed_file_path
|
210 |
-
except Exception as e:
|
211 |
-
import traceback
|
212 |
-
trace = traceback.format_exc()
|
213 |
-
print(f"Error processing file: {e}\n{trace}")
|
214 |
-
return None, f"Error processing file: {str(e)}", None
|
215 |
|
216 |
-
|
217 |
-
|
|
|
|
|
|
|
|
|
|
|
218 |
try:
|
219 |
-
#
|
220 |
-
|
221 |
-
|
222 |
-
#
|
223 |
-
|
224 |
-
"
|
225 |
-
|
226 |
-
|
227 |
-
|
228 |
-
|
229 |
-
|
230 |
-
|
231 |
-
|
232 |
-
|
233 |
-
|
234 |
-
|
235 |
-
|
236 |
-
|
237 |
-
|
238 |
-
|
239 |
-
|
240 |
-
|
241 |
-
|
|
|
|
|
242 |
|
243 |
-
|
244 |
-
|
|
|
|
|
|
|
245 |
|
246 |
-
# Handle different response formats
|
247 |
-
try:
|
248 |
-
if isinstance(result, list):
|
249 |
-
if len(result) > 0:
|
250 |
-
result_text = result[0].get("generated_text", "")
|
251 |
-
else:
|
252 |
-
return "❌ Empty result list", "{}"
|
253 |
-
else:
|
254 |
-
result_text = str(result)
|
255 |
-
|
256 |
-
# Split at output marker if present
|
257 |
-
if "<|output|>" in result_text:
|
258 |
-
parts = result_text.split("<|output|>")
|
259 |
-
if len(parts) > 1:
|
260 |
-
json_text = parts[1].strip()
|
261 |
-
else:
|
262 |
-
json_text = result_text
|
263 |
-
else:
|
264 |
-
json_text = result_text
|
265 |
-
|
266 |
-
# Try to parse as JSON
|
267 |
-
try:
|
268 |
-
extracted = json.loads(json_text)
|
269 |
-
formatted = json.dumps(extracted, indent=2)
|
270 |
-
except json.JSONDecodeError:
|
271 |
-
return "❌ JSON parsing error", json_text
|
272 |
-
|
273 |
-
return "✅ Success", formatted
|
274 |
-
except Exception as inner_e:
|
275 |
-
return f"❌ Error processing result: {str(inner_e)}", "{}"
|
276 |
except Exception as e:
|
277 |
-
|
278 |
-
|
279 |
-
|
280 |
-
|
|
|
|
|
|
|
|
|
|
|
281 |
gr.Markdown("# Historical Data Analysis Tools")
|
282 |
|
283 |
-
with gr.
|
284 |
-
|
285 |
-
|
286 |
-
|
287 |
-
with gr.
|
288 |
-
|
289 |
-
|
290 |
-
|
291 |
-
|
292 |
-
|
293 |
-
|
294 |
-
|
295 |
-
|
296 |
-
|
297 |
-
|
298 |
-
|
299 |
-
|
|
|
|
|
300 |
|
301 |
-
|
302 |
-
|
303 |
-
|
304 |
-
|
305 |
-
|
306 |
-
|
307 |
-
|
308 |
-
|
309 |
-
|
310 |
-
|
311 |
-
|
312 |
-
|
313 |
-
|
314 |
-
|
315 |
-
|
316 |
-
|
317 |
-
|
318 |
-
|
319 |
-
|
320 |
-
|
321 |
-
|
322 |
-
|
323 |
-
processed_file = gr.File(label="Processed Data", visible=True, interactive=False)
|
324 |
-
|
325 |
-
def process_and_map(file, column):
|
326 |
-
if file is None:
|
327 |
-
return None, "Please upload an Excel file", None
|
328 |
-
|
329 |
-
try:
|
330 |
-
map_path, stats, processed_path = process_excel(file, column)
|
331 |
-
|
332 |
-
if map_path and processed_path:
|
333 |
-
with open(map_path, "r") as f:
|
334 |
-
map_html = f.read()
|
335 |
-
|
336 |
-
return map_html, stats, processed_path
|
337 |
-
else:
|
338 |
-
return None, stats, None
|
339 |
-
except Exception as e:
|
340 |
-
import traceback
|
341 |
-
trace = traceback.format_exc()
|
342 |
-
print(f"Error in process_and_map: {e}\n{trace}")
|
343 |
-
return None, f"Error: {str(e)}", None
|
344 |
-
|
345 |
-
process_btn.click(
|
346 |
-
fn=process_and_map,
|
347 |
-
inputs=[excel_file, places_column],
|
348 |
-
outputs=[map_output, stats_output, processed_file]
|
349 |
-
)
|
350 |
|
|
|
351 |
if __name__ == "__main__":
|
352 |
-
|
|
|
|
|
|
|
|
|
|
1 |
import gradio as gr
|
|
|
|
|
|
|
2 |
import pandas as pd
|
3 |
import folium
|
4 |
from geopy.geocoders import Nominatim
|
5 |
+
from geopy.extra.rate_limiter import RateLimiter
|
6 |
+
import tempfile
|
7 |
import time
|
8 |
+
from typing import Optional, Tuple
|
9 |
+
import warnings
|
|
|
10 |
|
11 |
+
# Suppress geopy warnings
|
12 |
+
warnings.filterwarnings("ignore", category=UserWarning, module="geopy")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
13 |
|
14 |
+
# Configure geocoder
|
15 |
+
class Geocoder:
|
16 |
+
def __init__(self):
|
17 |
self.geolocator = Nominatim(
|
18 |
+
user_agent="historical_data_mapper",
|
19 |
+
timeout=10
|
20 |
)
|
21 |
+
self.geocode = RateLimiter(
|
22 |
+
self.geolocator.geocode,
|
23 |
+
min_delay_seconds=1,
|
24 |
+
max_retries=2,
|
25 |
+
error_wait_seconds=5
|
26 |
+
)
|
27 |
+
self.cache = {}
|
28 |
+
|
29 |
+
def get_coordinates(self, location: str) -> Optional[Tuple[float, float]]:
|
30 |
+
if not location or pd.isna(location):
|
31 |
+
return None
|
32 |
+
|
|
|
33 |
if location in self.cache:
|
34 |
return self.cache[location]
|
35 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
36 |
try:
|
37 |
+
location_data = self.geocode(location)
|
38 |
+
if location_data:
|
39 |
+
coords = (location_data.latitude, location_data.longitude)
|
40 |
+
self.cache[location] = coords
|
41 |
+
return coords
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
42 |
except Exception as e:
|
43 |
+
print(f"Geocoding error for '{location}': {str(e)}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
44 |
|
45 |
+
self.cache[location] = None
|
46 |
+
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
47 |
|
48 |
+
def create_interactive_map(df: pd.DataFrame, location_column: str) -> str:
|
49 |
+
"""Create a folium map with all valid locations"""
|
50 |
+
geocoder = Geocoder()
|
51 |
+
valid_locations = []
|
|
|
52 |
|
53 |
+
# Process all unique locations
|
54 |
+
unique_locations = df[location_column].dropna().unique()
|
55 |
+
|
56 |
+
for loc in unique_locations:
|
57 |
+
coords = geocoder.get_coordinates(str(loc))
|
58 |
+
if coords:
|
59 |
+
valid_locations.append((loc, coords))
|
60 |
+
|
61 |
+
if not valid_locations:
|
62 |
+
return "<div style='color:red;text-align:center'>No valid locations found</div>"
|
63 |
+
|
64 |
+
# Create map centered on first location
|
65 |
+
m = folium.Map(
|
66 |
+
location=valid_locations[0][1],
|
67 |
+
zoom_start=5,
|
68 |
+
tiles="CartoDB positron",
|
69 |
+
control_scale=True
|
70 |
+
)
|
71 |
+
|
72 |
+
# Add all markers
|
73 |
+
for loc, coords in valid_locations:
|
74 |
+
folium.Marker(
|
75 |
+
location=coords,
|
76 |
+
popup=folium.Popup(loc, max_width=300),
|
77 |
+
icon=folium.Icon(color="blue", icon="info-sign")
|
78 |
+
).add_to(m)
|
79 |
+
|
80 |
+
# Fit bounds if multiple locations
|
81 |
+
if len(valid_locations) > 1:
|
82 |
+
m.fit_bounds([coords for _, coords in valid_locations])
|
83 |
+
|
84 |
+
# Return HTML string
|
85 |
+
return m._repr_html_()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
86 |
|
87 |
+
def process_data(file_obj, column_name: str):
|
88 |
+
"""Process uploaded file and return results"""
|
89 |
+
start_time = time.time()
|
90 |
+
|
91 |
+
if not file_obj:
|
92 |
+
return None, "Please upload a file", None
|
93 |
+
|
94 |
try:
|
95 |
+
# Read input file
|
96 |
+
df = pd.read_excel(file_obj.name)
|
97 |
+
|
98 |
+
# Validate column exists
|
99 |
+
if column_name not in df.columns:
|
100 |
+
return None, f"Column '{column_name}' not found in data", None
|
101 |
+
|
102 |
+
# Create map
|
103 |
+
map_html = create_interactive_map(df, column_name)
|
104 |
+
|
105 |
+
# Create processed output
|
106 |
+
with tempfile.NamedTemporaryFile(suffix=".xlsx", delete=False) as tmp_file:
|
107 |
+
df.to_excel(tmp_file.name, index=False)
|
108 |
+
processed_path = tmp_file.name
|
109 |
+
|
110 |
+
# Generate statistics
|
111 |
+
total_rows = len(df)
|
112 |
+
unique_locations = df[column_name].nunique()
|
113 |
+
processing_time = round(time.time() - start_time, 2)
|
114 |
+
|
115 |
+
stats = (
|
116 |
+
f"Total rows processed: {total_rows}\n"
|
117 |
+
f"Unique locations found: {unique_locations}\n"
|
118 |
+
f"Processing time: {processing_time}s"
|
119 |
+
)
|
120 |
|
121 |
+
return (
|
122 |
+
f"<div style='width:100%; height:65vh'>{map_html}</div>",
|
123 |
+
stats,
|
124 |
+
processed_path
|
125 |
+
)
|
126 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
127 |
except Exception as e:
|
128 |
+
error_msg = f"Error processing file: {str(e)}"
|
129 |
+
print(error_msg)
|
130 |
+
return None, error_msg, None
|
131 |
+
|
132 |
+
# Gradio Interface
|
133 |
+
with gr.Blocks(
|
134 |
+
title="Historical Data Mapper",
|
135 |
+
theme=gr.themes.Soft()
|
136 |
+
) as app:
|
137 |
gr.Markdown("# Historical Data Analysis Tools")
|
138 |
|
139 |
+
with gr.Tab("Location Mapping"):
|
140 |
+
gr.Markdown("### Geocode and visualize location data from Excel files")
|
141 |
+
|
142 |
+
with gr.Row():
|
143 |
+
with gr.Column(scale=1):
|
144 |
+
file_input = gr.File(
|
145 |
+
label="Upload Excel File",
|
146 |
+
type="file",
|
147 |
+
file_types=[".xlsx", ".xls"]
|
148 |
+
)
|
149 |
+
column_input = gr.Textbox(
|
150 |
+
label="Location Column Name",
|
151 |
+
value="dateline_locations",
|
152 |
+
placeholder="Enter the column containing location names"
|
153 |
+
)
|
154 |
+
process_btn = gr.Button(
|
155 |
+
"Process and Map",
|
156 |
+
variant="primary"
|
157 |
+
)
|
158 |
|
159 |
+
with gr.Column(scale=2):
|
160 |
+
map_display = gr.HTML(
|
161 |
+
label="Interactive Map",
|
162 |
+
value="<div style='text-align:center;padding:20px;'>"
|
163 |
+
"Map will appear here after processing"
|
164 |
+
"</div>"
|
165 |
+
)
|
166 |
+
stats_output = gr.Textbox(
|
167 |
+
label="Processing Statistics",
|
168 |
+
interactive=False
|
169 |
+
)
|
170 |
+
download_output = gr.File(
|
171 |
+
label="Download Processed Data",
|
172 |
+
visible=False
|
173 |
+
)
|
174 |
+
|
175 |
+
# Configure button action
|
176 |
+
process_btn.click(
|
177 |
+
fn=process_data,
|
178 |
+
inputs=[file_input, column_input],
|
179 |
+
outputs=[map_display, stats_output, download_output]
|
180 |
+
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
181 |
|
182 |
+
# Launch settings
|
183 |
if __name__ == "__main__":
|
184 |
+
app.launch(
|
185 |
+
server_name="0.0.0.0",
|
186 |
+
server_port=7860,
|
187 |
+
share=False
|
188 |
+
)
|