Spaces:
Sleeping
Sleeping
# app.py | |
import streamlit as st | |
import streamlit.components.v1 as components | |
import os | |
import json | |
import pandas as pd | |
import uuid | |
import math | |
from streamlit_js_eval import streamlit_js_eval | |
import time # For debugging or potential delays | |
# --- Constants --- | |
SAVE_DIR = "saved_worlds" | |
PLOT_WIDTH = 50.0 | |
PLOT_DEPTH = 50.0 | |
CSV_COLUMNS = ['obj_id', 'type', 'pos_x', 'pos_y', 'pos_z', 'rot_x', 'rot_y', 'rot_z', 'rot_order'] | |
# --- Ensure Save Directory Exists --- | |
os.makedirs(SAVE_DIR, exist_ok=True) | |
# --- Helper Functions --- | |
# --- Caching Plot Metadata --- | |
# Cache plot list for an hour | |
def load_plot_metadata(): | |
"""Scans save dir for plot_X*_Z*.csv, sorts, calculates metadata.""" | |
print(f"[{time.time():.2f}] Loading plot metadata...") | |
plot_files = [] | |
try: | |
plot_files = [f for f in os.listdir(SAVE_DIR) if f.endswith(".csv") and f.startswith("plot_X")] | |
except FileNotFoundError: | |
st.error(f"Save directory '{SAVE_DIR}' not found.") | |
return [] | |
except Exception as e: | |
st.error(f"Error listing save directory '{SAVE_DIR}': {e}") | |
return [] | |
parsed_plots = [] | |
for filename in plot_files: | |
try: | |
# Check if file is empty or just header before parsing filename | |
file_path = os.path.join(SAVE_DIR, filename) | |
if os.path.getsize(file_path) <= len(",".join(CSV_COLUMNS)) + 2: # Check if smaller/equal to header size + newline | |
print(f"Skipping empty or header-only file: {filename}") | |
# Optionally delete empty files? os.remove(file_path) | |
continue | |
parts = filename[:-4].split('_') | |
grid_x = int(parts[1][1:]) | |
grid_z = int(parts[2][1:]) | |
plot_name = " ".join(parts[3:]) if len(parts) > 3 else f"Plot ({grid_x},{grid_z})" | |
parsed_plots.append({ | |
'id': filename[:-4], 'filename': filename, | |
'grid_x': grid_x, 'grid_z': grid_z, 'name': plot_name, | |
'x_offset': grid_x * PLOT_WIDTH, 'z_offset': grid_z * PLOT_DEPTH | |
}) | |
except FileNotFoundError: | |
# Should not happen if listed above, but safety check | |
st.warning(f"File {filename} not found during metadata parsing.") | |
continue | |
except (IndexError, ValueError, OSError) as e: | |
st.warning(f"Error parsing metadata from filename '{filename}': {e}. Skipping.") | |
continue | |
parsed_plots.sort(key=lambda p: (p['grid_x'], p['grid_z'])) | |
print(f"[{time.time():.2f}] Found {len(parsed_plots)} valid plots.") | |
return parsed_plots | |
# --- Loading Objects (No Cache on individual plots, handled by get_all_world_objects) --- | |
def load_single_plot_objects_relative(filename): | |
"""Loads objects from a specific CSV file, keeping coordinates relative.""" | |
file_path = os.path.join(SAVE_DIR, filename) | |
try: | |
# Check for empty file BEFORE reading CSV to avoid pandas error | |
if not os.path.exists(file_path) or os.path.getsize(file_path) == 0: | |
# print(f"Plot file is empty or missing (normal): {filename}") | |
return [] # Return empty list for non-existent or empty files | |
df = pd.read_csv(file_path) | |
if df.empty: | |
# print(f"Plot file read but DataFrame is empty: {filename}") | |
return [] | |
# --- Data Cleaning & Defaulting --- | |
# Ensure obj_id exists and is unique enough for merging | |
if 'obj_id' not in df.columns: | |
df['obj_id'] = [str(uuid.uuid4()) for _ in range(len(df))] | |
else: | |
# Fill potential NaNs in obj_id and ensure string type | |
df['obj_id'] = df['obj_id'].fillna(pd.Series([str(uuid.uuid4()) for _ in range(len(df))])).astype(str) | |
# Ensure essential columns exist | |
for col in ['type', 'pos_x', 'pos_y', 'pos_z']: | |
if col not in df.columns: | |
st.warning(f"CSV '{filename}' missing essential column '{col}'. Skipping file.") | |
return [] # Skip file if essential geometry is missing | |
# Add defaults for optional columns if they don't exist | |
for col, default in [('rot_x', 0.0), ('rot_y', 0.0), ('rot_z', 0.0), ('rot_order', 'XYZ')]: | |
if col not in df.columns: df[col] = default | |
# Fill NaNs in optional columns with defaults | |
df.fillna({'rot_x': 0.0, 'rot_y': 0.0, 'rot_z': 0.0, 'rot_order': 'XYZ'}, inplace=True) | |
# Basic type validation (optional but good) | |
for col in ['pos_x', 'pos_y', 'pos_z', 'rot_x', 'rot_y', 'rot_z']: | |
df[col] = pd.to_numeric(df[col], errors='coerce') # Convert to number, turn errors into NaN | |
df.dropna(subset=['pos_x', 'pos_y', 'pos_z'], inplace=True) # Drop rows where position failed conversion | |
# Ensure 'type' is string | |
df['type'] = df['type'].astype(str) | |
# Convert to list of dicts using the final columns | |
return df[CSV_COLUMNS].to_dict('records') | |
except pd.errors.EmptyDataError: | |
# print(f"Plot file is empty (caught by pandas): {filename}") # Normal case | |
return [] | |
except FileNotFoundError: | |
# This case should be handled by the os.path check above, but good fallback. | |
# print(f"Plot file not found: {filename}") | |
return [] | |
except Exception as e: | |
st.error(f"Error loading objects from {filename}: {e}") | |
st.exception(e) # Show full traceback in logs/console | |
return [] | |
# --- Cache the combined world state --- | |
def get_all_world_objects(): | |
"""Loads ALL objects from ALL known plots into world coordinates.""" | |
print(f"[{time.time():.2f}] Reloading ALL world objects from files...") | |
all_objects = {} # Use dict keyed by obj_id for auto-deduplication during load | |
plots_meta = load_plot_metadata() # Get the list of valid plots | |
for plot in plots_meta: | |
relative_objects = load_single_plot_objects_relative(plot['filename']) | |
for obj in relative_objects: | |
obj_id = obj.get('obj_id') | |
if not obj_id: continue # Should have ID from load_single_plot now | |
# Convert to world coordinates | |
world_obj = { | |
'obj_id': obj_id, | |
'type': obj.get('type', 'Unknown'), | |
'position': { | |
'x': obj.get('pos_x', 0.0) + plot['x_offset'], | |
'y': obj.get('pos_y', 0.0), | |
'z': obj.get('pos_z', 0.0) + plot['z_offset'] | |
}, | |
'rotation': { | |
'_x': obj.get('rot_x', 0.0), | |
'_y': obj.get('rot_y', 0.0), | |
'_z': obj.get('rot_z', 0.0), | |
'_order': obj.get('rot_order', 'XYZ') | |
} | |
} | |
# If obj_id already exists, this will overwrite. Assumes later plot file wins? | |
# Or maybe first loaded wins? Dict behavior. Let's assume last wins. | |
all_objects[obj_id] = world_obj | |
world_list = list(all_objects.values()) | |
print(f"[{time.time():.2f}] Loaded {len(world_list)} total objects.") | |
return world_list | |
def save_plot_data_merged(filename, new_objects_world_coords, plot_x_offset, plot_z_offset): | |
""" | |
Loads existing data, merges new objects (world coords), saves back relative. | |
Handles de-duplication based on obj_id (new objects overwrite). | |
Returns True on success, False otherwise. | |
""" | |
file_path = os.path.join(SAVE_DIR, filename) | |
print(f"[{time.time():.2f}] Merging and saving plot: {filename}") | |
# 1. Load existing objects (relative coordinates) | |
existing_relative_objects = load_single_plot_objects_relative(filename) | |
existing_objects_dict = {obj['obj_id']: obj for obj in existing_relative_objects if obj.get('obj_id')} | |
print(f"Found {len(existing_objects_dict)} existing objects in {filename}.") | |
# 2. Convert new objects to relative coordinates and add/overwrite in dict | |
new_object_count = 0 | |
for obj_world in new_objects_world_coords: | |
obj_id = obj_world.get('obj_id') | |
pos = obj_world.get('position') | |
rot = obj_world.get('rotation') | |
obj_type = obj_world.get('type') | |
if not all([obj_id, pos, rot, obj_type]): | |
st.warning(f"Skipping malformed new object during merge: {obj_world}") | |
continue | |
relative_obj = { | |
'obj_id': obj_id, | |
'type': obj_type, | |
'pos_x': pos.get('x', 0.0) - plot_x_offset, | |
'pos_y': pos.get('y', 0.0), | |
'pos_z': pos.get('z', 0.0) - plot_z_offset, | |
'rot_x': rot.get('_x', 0.0), | |
'rot_y': rot.get('_y', 0.0), | |
'rot_z': rot.get('_z', 0.0), | |
'rot_order': rot.get('_order', 'XYZ') | |
} | |
existing_objects_dict[obj_id] = relative_obj # Add or overwrite based on ID | |
new_object_count += 1 | |
print(f"Added/updated {new_object_count} objects for {filename}.") | |
# 3. Convert final dictionary back to list and save | |
final_relative_list = list(existing_objects_dict.values()) | |
try: | |
if not final_relative_list: | |
# If the merge results in an empty list, maybe delete the file? | |
# Or save an empty file (or just header?) | |
if os.path.exists(file_path): | |
print(f"Resulting object list for {filename} is empty. Deleting file.") | |
os.remove(file_path) | |
else: | |
print(f"Resulting object list for {filename} is empty. No file to save/delete.") | |
# Ensure metadata cache gets cleared even if file deleted | |
load_plot_metadata.clear() | |
# Clear main object cache too | |
get_all_world_objects.clear() | |
return True # Considered success? | |
df = pd.DataFrame(final_relative_list, columns=CSV_COLUMNS) | |
# Ensure required columns aren't accidentally empty after merge/conversion | |
df.dropna(subset=['obj_id','type', 'pos_x', 'pos_y', 'pos_z'], inplace=True) | |
df.to_csv(file_path, index=False) | |
st.success(f"Saved {len(df)} total objects to {filename}") | |
# --- CRITICAL: Clear caches after successful save --- | |
load_plot_metadata.clear() | |
get_all_world_objects.clear() | |
print(f"[{time.time():.2f}] Caches cleared after saving {filename}.") | |
return True | |
except Exception as e: | |
st.error(f"Failed to save merged plot data to {filename}: {e}") | |
st.exception(e) | |
return False | |
# --- Page Config --- | |
st.set_page_config(page_title="Shared World Builder (v2)", layout="wide") | |
# --- Initialize Session State --- | |
if 'selected_object' not in st.session_state: st.session_state.selected_object = 'None' | |
if 'js_save_payload' not in st.session_state: st.session_state.js_save_payload = None # Renamed from js_save_data_result for clarity | |
# --- Load Initial Data for Page Load / Rerun --- | |
# Use the cached function to get all objects | |
initial_world_state_list = get_all_world_objects() | |
# Metadata also uses its cache | |
plots_metadata = load_plot_metadata() | |
# --- Sidebar --- | |
with st.sidebar: | |
st.title("🏗️ World Controls") | |
# --- Refresh Button --- | |
st.header("World View") | |
if st.button("🔄 Refresh World View", key="refresh_button"): | |
st.info("Reloading world data...") | |
load_plot_metadata.clear() | |
get_all_world_objects.clear() | |
# Clear potential JS payload trigger if refresh clicked mid-save? | |
st.session_state.js_save_payload = None | |
st.rerun() | |
st.markdown("---") | |
# Navigation (using cached metadata) | |
st.header("Navigation (Plots)") | |
st.caption("Click to teleport player to a plot.") | |
max_cols = 2 | |
cols = st.columns(max_cols) | |
col_idx = 0 | |
# Use the potentially updated plots_metadata | |
sorted_plots_for_nav = sorted(plots_metadata, key=lambda p: (p['grid_x'], p['grid_z'])) | |
for plot in sorted_plots_for_nav: | |
button_label = f"➡️ {plot.get('name', plot['id'])} ({plot['grid_x']},{plot['grid_z']})" | |
if cols[col_idx].button(button_label, key=f"nav_{plot['id']}"): | |
target_x = plot['x_offset'] + PLOT_WIDTH / 2 | |
target_z = plot['z_offset'] + PLOT_DEPTH / 2 | |
try: | |
js_code = f"teleportPlayer({target_x}, {target_z});" | |
streamlit_js_eval(js_code=js_code, key=f"teleport_{plot['id']}") | |
except Exception as e: | |
st.error(f"Failed to send teleport command: {e}") | |
col_idx = (col_idx + 1) % max_cols | |
st.markdown("---") | |
# Object Placement | |
st.header("Place Objects") | |
object_types = ["None", "Simple House", "Tree", "Rock", "Fence Post"] | |
current_selection = st.session_state.selected_object | |
if current_selection not in object_types: current_selection = "None" | |
current_object_index = object_types.index(current_selection) | |
selected_object_type_widget = st.selectbox( | |
"Select Object:", options=object_types, index=current_object_index, key="selected_object_widget" | |
) | |
if selected_object_type_widget != st.session_state.selected_object: | |
st.session_state.selected_object = selected_object_type_widget | |
# Update JS immediately without needing full rerun if possible | |
try: | |
js_update_selection = f"updateSelectedObjectType({json.dumps(st.session_state.selected_object)});" | |
streamlit_js_eval(js_code=js_update_selection, key="update_selection_js") | |
except Exception as e: | |
st.warning(f"Could not push selection update to JS: {e}") | |
# Optional: Trigger a rerun if immediate JS update isn't enough | |
# st.rerun() | |
st.markdown("---") | |
# --- Saving --- | |
st.header("Save Work") | |
st.caption("Merges your newly placed objects into the shared world plot file.") | |
if st.button("💾 Save My New Objects", key="save_button"): | |
# Trigger JS to get data AND player position (needed to determine target plot) | |
js_get_data_code = "getSaveDataAndPosition();" | |
# Store the result in session state, process below | |
st.session_state.js_save_payload = streamlit_js_eval(js_code=js_get_data_code, key="js_save_processor", want_result=True) | |
# No automatic rerun here, processing happens below | |
# --- Process Save Data --- | |
save_data_from_js = st.session_state.pop("js_save_payload", None) # Use pop to consume | |
if save_data_from_js is not None: | |
st.info("Received save data from client...") | |
save_processed_successfully = False | |
try: | |
# Expecting { playerPosition: {x,y,z}, objectsToSave: [...] } | |
payload = json.loads(save_data_from_js) | |
if isinstance(payload, dict) and 'playerPosition' in payload and 'objectsToSave' in payload: | |
player_pos = payload['playerPosition'] | |
# These are the NEW objects placed by the user, with WORLD coordinates | |
objects_to_save_world_coords = payload['objectsToSave'] | |
if not isinstance(objects_to_save_world_coords, list): | |
st.error("Invalid 'objectsToSave' format received (expected list).") | |
elif not objects_to_save_world_coords: | |
st.warning("Save clicked, but there were no new objects reported by the client.") | |
else: | |
# Determine target plot based on player position | |
target_grid_x = math.floor(player_pos.get('x', 0.0) / PLOT_WIDTH) | |
target_grid_z = math.floor(player_pos.get('z', 0.0) / PLOT_DEPTH) | |
target_filename = f"plot_X{target_grid_x}_Z{target_grid_z}.csv" | |
target_plot_x_offset = target_grid_x * PLOT_WIDTH | |
target_plot_z_offset = target_grid_z * PLOT_DEPTH | |
st.write(f"Saving {len(objects_to_save_world_coords)} new object(s) to plot: {target_filename} (Player at: x={player_pos.get('x', 0):.1f}, z={player_pos.get('z', 0):.1f})") | |
# --- Call the MERGE save function --- | |
save_ok = save_plot_data_merged( | |
target_filename, | |
objects_to_save_world_coords, | |
target_plot_x_offset, | |
target_plot_z_offset | |
) | |
if save_ok: | |
save_processed_successfully = True | |
# Caches are cleared inside save_plot_data_merged now | |
# Tell JS to clear its local unsaved state (newlyPlacedObjects + sessionStorage) | |
try: | |
streamlit_js_eval(js_code="resetNewlyPlacedObjects();", key="reset_js_state_after_save") | |
st.success("Changes saved and merged successfully. Client state reset.") | |
except Exception as js_e: | |
st.warning(f"Save successful, but could not reset JS state: {js_e}") | |
else: | |
st.error(f"Failed to save merged plot data to file: {target_filename}") | |
else: | |
st.error("Invalid save payload structure received from client.") | |
print("Received payload structure:", type(payload), "Keys:", payload.keys() if isinstance(payload, dict) else "N/A") | |
except json.JSONDecodeError: | |
st.error("Failed to decode save data from client (invalid JSON).") | |
print("Received raw data:", save_data_from_js) | |
except Exception as e: | |
st.error(f"Error processing save: {e}") | |
st.exception(e) | |
# Rerun after processing save attempt to reflect changes / clear messages / reload data | |
st.rerun() | |
# --- Main Area --- | |
st.header("Infinite Shared 3D World") | |
st.caption("Place objects, then 'Save My New Objects'. Use 'Refresh World View' to see others' saved changes.") | |
# --- Load and Prepare HTML --- | |
html_file_path = 'index.html' | |
html_content_with_state = None | |
try: | |
with open(html_file_path, 'r', encoding='utf-8') as f: | |
html_template = f.read() | |
# --- Inject Python state into JavaScript --- | |
# Use the data loaded (potentially from cache) at the start of the script run | |
js_injection_script = f""" | |
<script> | |
// Use the global state loaded at the start of this Streamlit script run | |
window.ALL_INITIAL_OBJECTS = {json.dumps(initial_world_state_list)}; | |
window.PLOTS_METADATA = {json.dumps(plots_metadata)}; // Plot info for ground generation etc. | |
window.SELECTED_OBJECT_TYPE = {json.dumps(st.session_state.selected_object)}; // Current user's tool | |
window.PLOT_WIDTH = {json.dumps(PLOT_WIDTH)}; | |
window.PLOT_DEPTH = {json.dumps(PLOT_DEPTH)}; | |
console.log("Streamlit State Injected:", {{ | |
selectedObject: window.SELECTED_OBJECT_TYPE, | |
initialObjectsCount: window.ALL_INITIAL_OBJECTS ? window.ALL_INITIAL_OBJECTS.length : 0, | |
plotCount: window.PLOTS_METADATA ? window.PLOTS_METADATA.length : 0, | |
plotWidth: window.PLOT_WIDTH, | |
plotDepth: window.PLOT_DEPTH | |
}}); | |
</script> | |
""" | |
# Find the closing </head> tag and insert the script before it | |
html_content_with_state = html_template.replace('</head>', js_injection_script + '\n</head>', 1) | |
# --- Embed HTML Component --- | |
components.html( | |
html_content_with_state, | |
height=750, # Adjust as needed | |
scrolling=False | |
) | |
except FileNotFoundError: | |
st.error(f"CRITICAL ERROR: Could not find the file '{html_file_path}'.") | |
st.warning(f"Make sure `{html_file_path}` is in the same directory as `app.py` and `{SAVE_DIR}` exists.") | |
except Exception as e: | |
st.error(f"An critical error occurred during HTML preparation or component rendering: {e}") | |
st.exception(e) |