Spaces:
Sleeping
Sleeping
import streamlit as st | |
import os | |
import time | |
import json | |
import re | |
from openai import OpenAI | |
# Basic config | |
st.set_page_config(page_title="Forrestdale Drawing Viewer", layout="wide") | |
st.title("π Forrestdale Technical Drawing Assistant") | |
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY") | |
ASSISTANT_ID = "asst_DjvuWBc7tCvMbAhY7n1em4BZ" | |
if not OPENAI_API_KEY: | |
st.error("β Missing OPENAI_API_KEY.") | |
st.stop() | |
client = OpenAI(api_key=OPENAI_API_KEY) | |
if "tech_thread_id" not in st.session_state: | |
thread = client.beta.threads.create() | |
st.session_state.tech_thread_id = thread.id | |
if "tech_messages" not in st.session_state: | |
st.session_state.tech_messages = [] | |
prompt = st.chat_input("Ask about plans, drawings or components (e.g. Show me all electrical plans)") | |
if prompt: | |
st.session_state.tech_messages.append({"role": "user", "content": prompt}) | |
for msg in st.session_state.tech_messages: | |
with st.chat_message(msg["role"]): | |
st.markdown(msg["content"]) | |
# Fetch assistant reply | |
if st.session_state.tech_messages and st.session_state.tech_messages[-1]["role"] == "user": | |
with st.spinner("β³ Fetching results from assistant..."): | |
try: | |
client.beta.threads.messages.create( | |
thread_id=st.session_state.tech_thread_id, | |
role="user", | |
content=st.session_state.tech_messages[-1]["content"] | |
) | |
run = client.beta.threads.runs.create( | |
thread_id=st.session_state.tech_thread_id, | |
assistant_id=ASSISTANT_ID | |
) | |
while True: | |
run_status = client.beta.threads.runs.retrieve( | |
thread_id=st.session_state.tech_thread_id, | |
run_id=run.id | |
) | |
if run_status.status in ["completed", "failed", "cancelled"]: | |
break | |
time.sleep(1) | |
if run_status.status != "completed": | |
st.error("β οΈ Assistant run failed.") | |
else: | |
messages = client.beta.threads.messages.list(thread_id=st.session_state.tech_thread_id) | |
for message in reversed(messages.data): | |
if message.role == "assistant": | |
reply_content = message.content[0].text.value.strip() | |
st.session_state.tech_messages.append({"role": "assistant", "content": reply_content}) | |
try: | |
match = re.search(r"```json\s*(.*?)```", reply_content, re.DOTALL) | |
json_str = match.group(1) if match else reply_content | |
results = json.loads(json_str) | |
if isinstance(results, list): | |
cols = st.columns(4) | |
for idx, item in enumerate(results): | |
with cols[idx % 4]: | |
with st.container(border=True): | |
st.markdown(f"### π {item.get('drawing_number')} ({item.get('discipline')})", help=item.get("summary")) | |
st.caption(item.get("summary")) | |
with st.expander("π View Drawing Details"): | |
if item.get("question"): | |
st.markdown(f"**Question Match:** {item.get('question')}") | |
if item.get("image"): | |
st.image(item.get("image"), caption=item.get("drawing_number")) | |
elif item.get("images"): | |
for i, img in enumerate(item["images"]): | |
if img.startswith("http"): | |
st.image(img, caption=f"{item['drawing_number']} β Page {i+1}") | |
except Exception as parse_error: | |
st.warning("π‘ Could not parse assistant response as JSON.") | |
break | |
except Exception as e: | |
st.error(f"β Error occurred: {e}") | |