Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
-
# app.py - FactoryRAG+:
|
2 |
|
3 |
import streamlit as st
|
4 |
import pandas as pd
|
@@ -7,41 +7,54 @@ from sentence_transformers import SentenceTransformer
|
|
7 |
from transformers import pipeline
|
8 |
from sklearn.ensemble import IsolationForest
|
9 |
|
10 |
-
# Page config
|
11 |
st.set_page_config(page_title="FactoryRAG+ Assistant", page_icon="π§ ", layout="wide")
|
12 |
|
13 |
-
#
|
14 |
st.markdown("""
|
15 |
-
<
|
16 |
-
|
17 |
-
|
18 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
19 |
""", unsafe_allow_html=True)
|
20 |
|
21 |
# Load models
|
22 |
EMBED_MODEL = SentenceTransformer('sentence-transformers/all-MiniLM-L6-v2')
|
23 |
GEN_MODEL = pipeline('text2text-generation', model='google/flan-t5-base')
|
24 |
|
25 |
-
#
|
26 |
-
st.sidebar.
|
27 |
-
uploaded_file = st.sidebar.file_uploader("Upload a CSV sensor file", type=["csv"])
|
28 |
|
29 |
if uploaded_file:
|
30 |
df = pd.read_csv(uploaded_file)
|
31 |
numeric_cols = df.select_dtypes(include=np.number).columns.tolist()
|
32 |
-
st.success("β
|
33 |
|
34 |
-
|
35 |
-
st.
|
36 |
-
st.dataframe(df.head())
|
37 |
|
38 |
-
# Convert to text chunks
|
39 |
def convert_to_chunks(df):
|
40 |
-
|
41 |
-
for idx, row in df.iterrows():
|
42 |
-
log_text = f"[Log {idx}] " + ", ".join([f"{col}: {row[col]:.2f}" for col in numeric_cols])
|
43 |
-
chunks.append(log_text)
|
44 |
-
return chunks
|
45 |
|
46 |
if 'chunks' not in st.session_state or 'embeddings' not in st.session_state:
|
47 |
chunks = convert_to_chunks(df)
|
@@ -49,26 +62,28 @@ if uploaded_file:
|
|
49 |
st.session_state.chunks = chunks
|
50 |
st.session_state.embeddings = embeddings
|
51 |
|
52 |
-
#
|
53 |
-
st.markdown("###
|
54 |
iso = IsolationForest(contamination=0.02)
|
55 |
labels = iso.fit_predict(df[numeric_cols])
|
56 |
-
df['
|
57 |
-
|
|
|
58 |
|
59 |
-
#
|
60 |
-
st.markdown("### π¬
|
61 |
roles = {
|
62 |
-
"Operator": "You are a machine operator.
|
63 |
-
"Maintenance": "You are a maintenance technician.
|
64 |
-
"Engineer": "You are a
|
65 |
}
|
66 |
-
|
|
|
67 |
|
68 |
if 'chat_history' not in st.session_state:
|
69 |
st.session_state.chat_history = []
|
70 |
|
71 |
-
user_input = st.text_input("π¨οΈ Ask about
|
72 |
|
73 |
if user_input:
|
74 |
query_vec = EMBED_MODEL.encode([user_input])[0]
|
@@ -76,7 +91,7 @@ if uploaded_file:
|
|
76 |
top_idxs = np.argsort(sims)[-3:][::-1]
|
77 |
context = "\n".join([st.session_state.chunks[i] for i in top_idxs])
|
78 |
system_prompt = roles[role]
|
79 |
-
full_prompt = f"{system_prompt}\n\nSensor Context:\n{context}\n\nUser Question: {user_input}"
|
80 |
reply = GEN_MODEL(full_prompt, max_length=256)[0]['generated_text']
|
81 |
|
82 |
st.session_state.chat_history.append((f"π€ You ({role})", user_input))
|
@@ -86,4 +101,4 @@ if uploaded_file:
|
|
86 |
st.markdown(f"<div style='margin-bottom: 10px;'><b>{speaker}:</b> {msg}</div>", unsafe_allow_html=True)
|
87 |
|
88 |
else:
|
89 |
-
st.info("π
|
|
|
1 |
+
# app.py - FactoryRAG+: AI Assistant with Real-Time Functional Status by Role
|
2 |
|
3 |
import streamlit as st
|
4 |
import pandas as pd
|
|
|
7 |
from transformers import pipeline
|
8 |
from sklearn.ensemble import IsolationForest
|
9 |
|
10 |
+
# Page config
|
11 |
st.set_page_config(page_title="FactoryRAG+ Assistant", page_icon="π§ ", layout="wide")
|
12 |
|
13 |
+
# Custom dark theme styling
|
14 |
st.markdown("""
|
15 |
+
<style>
|
16 |
+
html, body, [class*="css"] {
|
17 |
+
font-family: 'Segoe UI', sans-serif;
|
18 |
+
background-color: #0f1117;
|
19 |
+
color: #f0f0f0;
|
20 |
+
}
|
21 |
+
.stTextInput>div>div>input,
|
22 |
+
.stSelectbox>div>div>div>div {
|
23 |
+
background-color: #1a1c23;
|
24 |
+
color: #fff;
|
25 |
+
}
|
26 |
+
.stDataFrame .blank {
|
27 |
+
background-color: #0f1117 !important;
|
28 |
+
}
|
29 |
+
</style>
|
30 |
+
""", unsafe_allow_html=True)
|
31 |
+
|
32 |
+
# Header
|
33 |
+
st.markdown("""
|
34 |
+
<div style='text-align: center;'>
|
35 |
+
<h1 style='color: #58a6ff;'>π FactoryRAG+ Assistant</h1>
|
36 |
+
<p style='color: #bbb;'>AI-Powered Digital Twin | Real-Time Sensor Health</p>
|
37 |
+
<hr style='border-top: 2px solid #888;'>
|
38 |
+
</div>
|
39 |
""", unsafe_allow_html=True)
|
40 |
|
41 |
# Load models
|
42 |
EMBED_MODEL = SentenceTransformer('sentence-transformers/all-MiniLM-L6-v2')
|
43 |
GEN_MODEL = pipeline('text2text-generation', model='google/flan-t5-base')
|
44 |
|
45 |
+
# File upload
|
46 |
+
uploaded_file = st.sidebar.file_uploader("π Upload your sensor CSV", type=["csv"])
|
|
|
47 |
|
48 |
if uploaded_file:
|
49 |
df = pd.read_csv(uploaded_file)
|
50 |
numeric_cols = df.select_dtypes(include=np.number).columns.tolist()
|
51 |
+
st.success("β
Sensor log loaded!")
|
52 |
|
53 |
+
st.markdown("### π§Ύ Sensor Log Preview")
|
54 |
+
st.dataframe(df.head(), use_container_width=True)
|
|
|
55 |
|
|
|
56 |
def convert_to_chunks(df):
|
57 |
+
return [f"[Log {i}] " + ", ".join([f"{col}: {row[col]:.2f}" for col in numeric_cols]) for i, row in df.iterrows()]
|
|
|
|
|
|
|
|
|
58 |
|
59 |
if 'chunks' not in st.session_state or 'embeddings' not in st.session_state:
|
60 |
chunks = convert_to_chunks(df)
|
|
|
62 |
st.session_state.chunks = chunks
|
63 |
st.session_state.embeddings = embeddings
|
64 |
|
65 |
+
# Condition status based on Isolation Forest
|
66 |
+
st.markdown("### βοΈ Equipment Condition Status")
|
67 |
iso = IsolationForest(contamination=0.02)
|
68 |
labels = iso.fit_predict(df[numeric_cols])
|
69 |
+
df['status'] = ['β No Function' if x == -1 else 'β
Functional' for x in labels]
|
70 |
+
df['maintenance'] = ['π§ Needs Maintenance' if x == -1 else 'π’ Stable' for x in labels]
|
71 |
+
st.dataframe(df[['status', 'maintenance'] + numeric_cols].head(), use_container_width=True)
|
72 |
|
73 |
+
# Role-based chatbot
|
74 |
+
st.markdown("### π¬ Real-Time Role-Based Chat Assistant")
|
75 |
roles = {
|
76 |
+
"Operator": "You are a machine operator. Check if equipment is running properly. If not, flag it immediately.",
|
77 |
+
"Maintenance": "You are a maintenance technician. Assess faulty logs and provide service insights.",
|
78 |
+
"Engineer": "You are a systems engineer. Offer data-backed advice and failure diagnostics."
|
79 |
}
|
80 |
+
|
81 |
+
role = st.selectbox("π· Choose your role", list(roles.keys()))
|
82 |
|
83 |
if 'chat_history' not in st.session_state:
|
84 |
st.session_state.chat_history = []
|
85 |
|
86 |
+
user_input = st.text_input("π¨οΈ Ask FactoryGPT about machine status or maintenance needs")
|
87 |
|
88 |
if user_input:
|
89 |
query_vec = EMBED_MODEL.encode([user_input])[0]
|
|
|
91 |
top_idxs = np.argsort(sims)[-3:][::-1]
|
92 |
context = "\n".join([st.session_state.chunks[i] for i in top_idxs])
|
93 |
system_prompt = roles[role]
|
94 |
+
full_prompt = f"{system_prompt}\n\nSensor Log Context:\n{context}\n\nUser Question: {user_input}"
|
95 |
reply = GEN_MODEL(full_prompt, max_length=256)[0]['generated_text']
|
96 |
|
97 |
st.session_state.chat_history.append((f"π€ You ({role})", user_input))
|
|
|
101 |
st.markdown(f"<div style='margin-bottom: 10px;'><b>{speaker}:</b> {msg}</div>", unsafe_allow_html=True)
|
102 |
|
103 |
else:
|
104 |
+
st.info("π Upload a CSV file with sensor logs to begin.")
|