File size: 20,795 Bytes
41c1a97
 
 
c6b8a39
 
 
 
c910728
08066b1
c910728
c6b8a39
 
 
c910728
 
c6b8a39
c910728
c6b8a39
 
 
 
c910728
c6b8a39
c910728
 
 
 
 
 
c6b8a39
a9aa314
c910728
 
c6b8a39
a9aa314
a2141e7
c6b8a39
 
 
 
a2141e7
c6b8a39
 
a9aa314
c6b8a39
 
 
c910728
 
 
c6b8a39
a9aa314
c6b8a39
c910728
c6b8a39
 
 
 
 
c910728
c6b8a39
 
 
 
a9aa314
c6b8a39
 
a2141e7
c6b8a39
c910728
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c6b8a39
 
a9aa314
c910728
 
 
 
 
 
 
 
a9aa314
c910728
 
c6b8a39
a9aa314
c910728
 
 
 
 
 
 
 
 
c6b8a39
c910728
 
 
c6b8a39
c910728
 
 
 
a9aa314
c6b8a39
a9aa314
c910728
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c6b8a39
c910728
 
 
 
 
c6b8a39
 
c910728
a9aa314
c910728
c6b8a39
a9aa314
c910728
 
 
 
 
 
 
 
 
 
 
 
c6b8a39
c910728
 
 
c6b8a39
a9aa314
7825ef7
 
a9aa314
c910728
c6b8a39
 
c910728
c6b8a39
 
 
 
 
 
c910728
 
c6b8a39
c910728
c6b8a39
 
c910728
c6b8a39
 
 
a9aa314
c910728
c6b8a39
 
c910728
 
 
 
 
 
 
a9aa314
 
 
c910728
c6b8a39
c910728
 
 
 
 
 
 
 
 
c6b8a39
 
a9aa314
c910728
c6b8a39
c910728
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a9aa314
 
c910728
a9aa314
 
7825ef7
c910728
 
a9aa314
c910728
 
 
 
 
 
a9aa314
 
c910728
 
 
 
 
 
 
a9aa314
c910728
 
a9aa314
c910728
a9aa314
c910728
 
a9aa314
 
c910728
a9aa314
a2141e7
a9aa314
 
 
c910728
 
a9aa314
c910728
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a9aa314
c6b8a39
 
 
c910728
a9aa314
 
 
 
 
7825ef7
a9aa314
 
 
 
c910728
89b923d
7825ef7
c910728
 
 
a9aa314
 
 
c910728
 
 
a9aa314
c910728
a9aa314
 
c910728
 
a9aa314
4fa9b7a
 
c910728
a9aa314
 
 
 
 
 
 
 
 
c6b8a39
 
a9aa314
89b923d
a9aa314
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
# app.py
import streamlit as st
import streamlit.components.v1 as components
import os
import json
import pandas as pd
import uuid
import math
from streamlit_js_eval import streamlit_js_eval # For JS communication
import time # For potential throttling if needed

# --- Constants ---
SAVE_DIR = "saved_worlds"
PLOT_WIDTH = 50.0
PLOT_DEPTH = 50.0
CSV_COLUMNS = ['obj_id', 'type', 'pos_x', 'pos_y', 'pos_z', 'rot_x', 'rot_y', 'rot_z', 'rot_order']
STATE_POLL_INTERVAL_MS = 5000 # How often clients ask for updates (milliseconds)

# --- Ensure Save Directory Exists ---
os.makedirs(SAVE_DIR, exist_ok=True)

# --- Server-Side State Management ---

# Global lock could be useful for more complex state modification,
# but for simple file writes + cache clear, Python's GIL might suffice.
# Add `import threading` if using the lock.
# state_lock = threading.Lock()

@st.cache_data(ttl=3600) # Cache plot list - relatively static
def load_plot_metadata():
    """Scans save dir for plot_X*_Z*.csv, sorts, calculates metadata."""
    # (Keep your existing load_plot_metadata function code here)
    # ... (same as your original code) ...
    plots = []
    plot_files = []
    try:
        plot_files = [f for f in os.listdir(SAVE_DIR) if f.endswith(".csv") and f.startswith("plot_X")]
    except FileNotFoundError:
        st.error(f"Save directory '{SAVE_DIR}' not found.")
        return []
    except Exception as e:
        st.error(f"Error listing save directory '{SAVE_DIR}': {e}")
        return []

    parsed_plots = []
    for filename in plot_files:
        try:
            parts = filename[:-4].split('_')
            grid_x = int(parts[1][1:])
            grid_z = int(parts[2][1:])
            plot_name = " ".join(parts[3:]) if len(parts) > 3 else f"Plot ({grid_x},{grid_z})"

            parsed_plots.append({
                'id': filename[:-4],
                'filename': filename,
                'grid_x': grid_x,
                'grid_z': grid_z,
                'name': plot_name,
                'x_offset': grid_x * PLOT_WIDTH,
                'z_offset': grid_z * PLOT_DEPTH
            })
        except (IndexError, ValueError):
            st.warning(f"Could not parse grid coordinates from filename: {filename}. Skipping.")
            continue

    parsed_plots.sort(key=lambda p: (p['grid_x'], p['grid_z']))
    return parsed_plots


# --- Use cache_data to hold the authoritative world state ---
# This function loads *all* objects from *all* known plots.
# It gets re-run automatically by Streamlit if its internal state changes
# OR if we manually clear its cache after saving.
@st.cache_data(show_spinner=False) # Show spinner might be annoying for frequent polls
def get_authoritative_world_state():
    """Loads ALL objects from ALL saved plot files."""
    print("--- Reloading Authoritative World State from Files ---")
    all_objects = {} # Use dict keyed by obj_id for efficient lookup/update
    plots_meta = load_plot_metadata() # Get the list of plots first

    for plot in plots_meta:
        file_path = os.path.join(SAVE_DIR, plot['filename'])
        try:
            # Check if file is empty before reading
            if os.path.getsize(file_path) == 0:
                print(f"Skipping empty plot file: {plot['filename']}")
                continue

            df = pd.read_csv(file_path)

            # Basic validation (adjust as needed)
            if df.empty:
                continue
            if not all(col in df.columns for col in ['obj_id', 'type', 'pos_x', 'pos_y', 'pos_z']):
                 st.warning(f"CSV '{plot['filename']}' missing essential columns. Skipping some objects.")
                 # Attempt to process valid rows anyway? Or skip file entirely?
                 df = df.dropna(subset=['obj_id', 'type', 'pos_x', 'pos_y', 'pos_z']) # Drop rows missing essential info

            # Add defaults for optional columns if they don't exist at all
            for col, default in [('rot_x', 0.0), ('rot_y', 0.0), ('rot_z', 0.0), ('rot_order', 'XYZ')]:
                if col not in df.columns: df[col] = default

            # Ensure obj_id is string
            df['obj_id'] = df['obj_id'].astype(str)
            # Fill missing optional values *per row*
            df.fillna({'rot_x': 0.0, 'rot_y': 0.0, 'rot_z': 0.0, 'rot_order': 'XYZ'}, inplace=True)


            for _, row in df.iterrows():
                obj_data = row.to_dict()
                obj_id = obj_data.get('obj_id')
                if not obj_id: # Should have obj_id now, but check anyway
                    st.warning(f"Skipping object with missing ID in {plot['filename']}")
                    continue

                # Apply world offset (positions in CSV are relative to plot origin)
                obj_data['pos_x'] += plot['x_offset']
                obj_data['pos_z'] += plot['z_offset']

                # Standardize structure for JS
                world_obj = {
                    'obj_id': obj_id,
                    'type': obj_data['type'],
                    'position': {'x': obj_data['pos_x'], 'y': obj_data['pos_y'], 'z': obj_data['pos_z']},
                    'rotation': {'_x': obj_data['rot_x'], '_y': obj_data['rot_y'], '_z': obj_data['rot_z'], '_order': obj_data['rot_order']}
                }
                all_objects[obj_id] = world_obj # Store using obj_id as key

        except FileNotFoundError:
            st.error(f"File not found during object load: {plot['filename']}")
        except pd.errors.EmptyDataError:
             print(f"Plot file is empty (valid): {plot['filename']}") # Normal case
        except Exception as e:
            st.error(f"Error loading objects from {plot['filename']}: {e}")
            st.exception(e) # Print traceback for debugging

    print(f"--- Loaded {len(all_objects)} objects into authoritative state ---")
    # Return as a list for easier JSON serialization if needed, but dict is good for server
    return all_objects # Return the dictionary


def save_new_objects_to_plots(objects_to_save):
    """
    Saves a list of NEW objects (with world coordinates) to their
    respective plot CSV files. Updates existing files or creates new ones.
    Returns True if successful, False otherwise.
    """
    if not isinstance(objects_to_save, list):
        st.error("Invalid data format received for saving (expected a list).")
        return False

    # Group objects by the plot they belong to
    objects_by_plot = {} # Key: (grid_x, grid_z), Value: list of relative objects

    for obj in objects_to_save:
        pos = obj.get('position')
        obj_id = obj.get('obj_id')
        obj_type = obj.get('type')
        rot = obj.get('rotation', {'_x': 0.0, '_y': 0.0, '_z': 0.0, '_order': 'XYZ'}) # Add default rotation

        if not pos or not obj_id or not obj_type:
            st.warning(f"Skipping malformed object during save prep: {obj}")
            continue

        # Determine target plot
        grid_x = math.floor(pos.get('x', 0.0) / PLOT_WIDTH)
        grid_z = math.floor(pos.get('z', 0.0) / PLOT_DEPTH)
        plot_key = (grid_x, grid_z)

        # Calculate relative position
        relative_x = pos['x'] - (grid_x * PLOT_WIDTH)
        relative_z = pos['z'] - (grid_z * PLOT_DEPTH)

        relative_obj = {
            'obj_id': obj_id,
            'type': obj_type,
            'pos_x': relative_x,
            'pos_y': pos.get('y', 0.0),
            'pos_z': relative_z,
            'rot_x': rot.get('_x', 0.0),
            'rot_y': rot.get('_y', 0.0),
            'rot_z': rot.get('_z', 0.0),
            'rot_order': rot.get('_order', 'XYZ')
        }

        if plot_key not in objects_by_plot:
            objects_by_plot[plot_key] = []
        objects_by_plot[plot_key].append(relative_obj)

    # --- Save each plot ---
    save_successful = True
    saved_files_count = 0
    new_files_created = 0

    # with state_lock: # Optional lock if race conditions become an issue
    for (grid_x, grid_z), relative_objects in objects_by_plot.items():
        filename = f"plot_X{grid_x}_Z{grid_z}.csv"
        file_path = os.path.join(SAVE_DIR, filename)
        is_new_file = not os.path.exists(file_path)

        try:
            new_df = pd.DataFrame(relative_objects, columns=CSV_COLUMNS)

            if is_new_file:
                # Create new file
                new_df.to_csv(file_path, index=False)
                st.info(f"Created new plot file: {filename} with {len(relative_objects)} objects.")
                new_files_created += 1
            else:
                # Append to existing file (or overwrite if merging is complex)
                # Safest way is often read, concat, drop duplicates, write.
                try:
                    existing_df = pd.read_csv(file_path)
                    # Ensure obj_id is string for comparison
                    existing_df['obj_id'] = existing_df['obj_id'].astype(str)
                    new_df['obj_id'] = new_df['obj_id'].astype(str)

                    # Combine, keeping the newly added one if obj_id conflicts
                    combined_df = pd.concat([existing_df, new_df]).drop_duplicates(subset=['obj_id'], keep='last')

                except (FileNotFoundError, pd.errors.EmptyDataError):
                     # If file vanished or became empty between check and read, treat as new
                    print(f"Warning: File {filename} was empty or missing on read, creating.")
                    combined_df = new_df
                except Exception as read_e:
                     st.error(f"Error reading existing file {filename} for merge: {read_e}")
                     save_successful = False
                     continue # Skip this plot

                combined_df.to_csv(file_path, index=False)
                st.info(f"Updated plot file: {filename}. Total objects now: {len(combined_df)}")

            saved_files_count += 1

        except Exception as e:
            st.error(f"Failed to save plot data to {filename}: {e}")
            st.exception(e)
            save_successful = False

    if save_successful and saved_files_count > 0:
        st.success(f"Saved {len(objects_to_save)} objects across {saved_files_count} plot file(s).")
        # --- CRITICAL: Clear caches so other users/next poll get the update ---
        get_authoritative_world_state.clear()
        load_plot_metadata.clear() # Also update plot list if new files were made
        print("--- Server caches cleared after successful save ---")
        return True
    elif saved_files_count == 0 and len(objects_to_save) > 0:
         st.warning("Save requested, but no valid objects were processed.")
         return False # Indicate nothing was actually saved
    else:
        # Errors occurred during saving
        return False


# --- Page Config ---
st.set_page_config(page_title="Shared World Builder", layout="wide")

# --- Initialize Session State ---
# Keep track of the *selected* object type for placement (per user)
if 'selected_object' not in st.session_state:
    st.session_state.selected_object = 'None'
# Store the result from the JS save call
if 'js_save_payload' not in st.session_state:
    st.session_state.js_save_payload = None
# Store the result from the JS polling call (less critical to persist)
# if 'js_poll_result' not in st.session_state:
#     st.session_state.js_poll_result = None # Might not need this server-side

# --- Load Initial Data for THIS Client ---
# Load metadata for sidebar navigation
plots_metadata = load_plot_metadata()
# Get the current authoritative state for initial injection
initial_world_state_dict = get_authoritative_world_state()
initial_world_state_list = list(initial_world_state_dict.values()) # Convert to list for JS

# --- Sidebar ---
with st.sidebar:
    st.title("🏗️ World Controls")

    # Navigation (using cached metadata)
    st.header("Navigation (Plots)")
    st.caption("Click to teleport player to a plot.")
    max_cols = 2
    cols = st.columns(max_cols)
    col_idx = 0
    sorted_plots_for_nav = sorted(plots_metadata, key=lambda p: (p['grid_x'], p['grid_z']))
    for plot in sorted_plots_for_nav:
        button_label = f"➡️ {plot.get('name', plot['id'])} ({plot['grid_x']},{plot['grid_z']})"
        if cols[col_idx].button(button_label, key=f"nav_{plot['id']}"):
            target_x = plot['x_offset'] + PLOT_WIDTH / 2 # Center of plot
            target_z = plot['z_offset'] + PLOT_DEPTH / 2
            try:
                js_code = f"teleportPlayer({target_x}, {target_z});"
                streamlit_js_eval(js_code=js_code, key=f"teleport_{plot['id']}")
            except Exception as e:
                st.error(f"Failed to send teleport command: {e}")
        col_idx = (col_idx + 1) % max_cols

    st.markdown("---")

    # Object Placement (per-user selection)
    st.header("Place Objects")
    object_types = ["None", "Simple House", "Tree", "Rock", "Fence Post"]
    # Ensure current state selection is valid, default to None if not
    current_selection = st.session_state.selected_object
    if current_selection not in object_types:
        current_selection = "None"
        st.session_state.selected_object = "None" # Correct invalid state
    current_object_index = object_types.index(current_selection)

    selected_object_type_widget = st.selectbox(
        "Select Object:", options=object_types, index=current_object_index, key="selected_object_widget"
    )
    # Update session state ONLY if the widget's value changes
    if selected_object_type_widget != st.session_state.selected_object:
        st.session_state.selected_object = selected_object_type_widget
        # No rerun needed here, JS will pick up the change via injected state on next interaction/poll
        # Or we can force a JS update immediately:
        try:
             js_update_selection = f"updateSelectedObjectType({json.dumps(st.session_state.selected_object)});"
             streamlit_js_eval(js_code=js_update_selection, key="update_selection_js")
        except Exception as e:
             st.warning(f"Could not push selection update to JS: {e}")


    st.markdown("---")

    # Saving (triggers JS to send data)
    st.header("Save Work")
    st.caption("Saves objects you've placed since your last save.")
    if st.button("💾 Save My New Objects", key="save_button"):
        # Trigger JS to get ONLY the newly placed objects data
        # We don't need player position here anymore, save logic handles it based on obj pos
        js_get_data_code = "getNewlyPlacedObjectsForSave();"
        # Use 'want_result=True' to get the data back into python state
        st.session_state.js_save_payload = streamlit_js_eval(
            js_code=js_get_data_code,
            key="js_save_processor",
            want_result=True # Make sure we get the return value
        )
        # No automatic rerun here - we process the result below

# --- Process Save Data (if triggered) ---
save_data_from_js = st.session_state.get("js_save_payload", None)

if save_data_from_js is not None:
    st.session_state.js_save_payload = None # Consume the trigger
    st.info("Received save data from client...")
    save_processed_successfully = False
    try:
        # Expecting a JSON string representing a LIST of new objects
        new_objects = json.loads(save_data_from_js)

        if isinstance(new_objects, list):
            if not new_objects:
                st.warning("Save clicked, but there were no new objects to save.")
            else:
                # Call the function to save these objects to their plots
                save_ok = save_new_objects_to_plots(new_objects)

                if save_ok:
                    # Tell JS to clear its local list of newly placed objects
                    try:
                        streamlit_js_eval(js_code="resetNewlyPlacedObjects();", key="reset_js_state_after_save")
                        st.success("Changes saved successfully and client state reset.")
                        save_processed_successfully = True
                        # Short delay maybe? To allow caches to potentially clear before rerun?
                        # time.sleep(0.1)
                    except Exception as js_e:
                        st.warning(f"Save successful, but could not reset JS state: {js_e}")
                        # State might be slightly off until next poll/refresh
                else:
                    st.error("Failed to save new objects to plot files. Check logs.")
        else:
            st.error(f"Invalid save payload structure received (expected list): {type(new_objects)}")
            print("Received payload:", save_data_from_js)

    except json.JSONDecodeError:
        st.error("Failed to decode save data from client (was not valid JSON).")
        print("Received raw data:", save_data_from_js)
    except Exception as e:
        st.error(f"Error processing save: {e}")
        st.exception(e)

    # Rerun if save was processed (successfully or not) to update sidebar/messages
    # and potentially reload data if caches were cleared
    if save_processed_successfully:
        # Force rerun to ensure the client gets updated state eventually
         st.rerun()
    # No rerun if save failed, keep message onscreen

# --- Provide Endpoint for JS Polling ---
# This uses streamlit_js_eval in reverse: JS calls a Python function.
# We define a key that JS will use to trigger this.
# The function returns the *current* authoritative state.
poll_data = streamlit_js_eval(
    js_code="""
        // Define function in JS global scope if not already defined
        if (typeof window.requestStateUpdate !== 'function') {
            window.requestStateUpdate = () => {
                // This returns a Promise that resolves with the Python return value
                return streamlit_js_eval("get_authoritative_world_state()", want_result=True, key="get_world_state_poll");
            }
        }
        // Return something small just to indicate setup is done, or null
        null;
    """,
    key="setup_poll_function" # Unique key for this setup code
)

# This part *executes* the Python function when JS calls it via the key "get_world_state_poll"
# We use DUMPS_FUNC for potentially large JSON payloads
if 'get_world_state_poll' in st.session_state:
    print(f"Polling request received at {time.time()}")
    world_state_dict = get_authoritative_world_state()
    # Convert dict back to list for sending to JS
    world_state_list = list(world_state_dict.values())
    st.session_state.get_world_state_poll = world_state_list # Set the result for JS to pick up
    print(f"Responding to poll with {len(world_state_list)} objects.")


# --- Main Area ---
st.header("Infinite Shared 3D World")
st.caption(f"World state updates every {STATE_POLL_INTERVAL_MS / 1000}s. Use sidebar 'Save' to commit your new objects.")

# --- Load and Prepare HTML ---
html_file_path = 'index.html'
html_content_with_state = None

try:
    with open(html_file_path, 'r', encoding='utf-8') as f:
        html_template = f.read()

    # --- Inject Python state into JavaScript ---
    # Send initial state, plot metadata, selected tool, and constants
    js_injection_script = f"""
<script>
    // Initial state (authoritative at the time of page load)
    window.INITIAL_WORLD_STATE = {json.dumps(initial_world_state_list)};
    window.PLOTS_METADATA = {json.dumps(plots_metadata)}; // Plot info for ground generation etc.
    window.SELECTED_OBJECT_TYPE = {json.dumps(st.session_state.selected_object)};
    window.PLOT_WIDTH = {json.dumps(PLOT_WIDTH)};
    window.PLOT_DEPTH = {json.dumps(PLOT_DEPTH)};
    window.STATE_POLL_INTERVAL_MS = {json.dumps(STATE_POLL_INTERVAL_MS)};

    console.log("Streamlit Initial State Injected:", {{
        selectedObject: window.SELECTED_OBJECT_TYPE,
        initialObjectsCount: window.INITIAL_WORLD_STATE ? window.INITIAL_WORLD_STATE.length : 0,
        plotCount: window.PLOTS_METADATA ? window.PLOTS_METADATA.length : 0,
        plotWidth: window.PLOT_WIDTH,
        plotDepth: window.PLOT_DEPTH,
        pollInterval: window.STATE_POLL_INTERVAL_MS
    }});
</script>
"""
    # Find the closing </head> tag and insert the script before it
    html_content_with_state = html_template.replace('</head>', js_injection_script + '\n</head>', 1)

    # --- Embed HTML Component ---
    components.html(
        html_content_with_state,
        height=750,
        scrolling=False
    )

except FileNotFoundError:
    st.error(f"CRITICAL ERROR: Could not find the file '{html_file_path}'.")
    st.warning(f"Make sure `{html_file_path}` is in the same directory as `app.py` and `{SAVE_DIR}` exists.")
except Exception as e:
    st.error(f"An critical error occurred during HTML preparation or component rendering: {e}")
    st.exception(e)