bhagwandas commited on
Commit
7fdcd7c
Β·
verified Β·
1 Parent(s): 0cd7d19

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +48 -33
app.py CHANGED
@@ -1,4 +1,4 @@
1
- # app.py - FactoryRAG+: Fancy Lite Version with Animation & Chatbot Only
2
 
3
  import streamlit as st
4
  import pandas as pd
@@ -7,41 +7,54 @@ from sentence_transformers import SentenceTransformer
7
  from transformers import pipeline
8
  from sklearn.ensemble import IsolationForest
9
 
10
- # Page config with emoji + layout
11
  st.set_page_config(page_title="FactoryRAG+ Assistant", page_icon="🧠", layout="wide")
12
 
13
- # Animated header
14
  st.markdown("""
15
- <h1 style='text-align: center; color: #3498db; font-size: 48px;'>
16
- 🏭 FactoryRAG+ <span style="font-size: 28px;">| AI Assistant for Smart Sensors</span>
17
- </h1>
18
- <hr style='border-top: 2px solid #bbb;' />
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
19
  """, unsafe_allow_html=True)
20
 
21
  # Load models
22
  EMBED_MODEL = SentenceTransformer('sentence-transformers/all-MiniLM-L6-v2')
23
  GEN_MODEL = pipeline('text2text-generation', model='google/flan-t5-base')
24
 
25
- # Sidebar upload
26
- st.sidebar.markdown("### πŸ“‚ Upload Sensor Log")
27
- uploaded_file = st.sidebar.file_uploader("Upload a CSV sensor file", type=["csv"])
28
 
29
  if uploaded_file:
30
  df = pd.read_csv(uploaded_file)
31
  numeric_cols = df.select_dtypes(include=np.number).columns.tolist()
32
- st.success("βœ… Data uploaded successfully!")
33
 
34
- # Animated section
35
- st.markdown("### πŸ” Sensor Log Preview")
36
- st.dataframe(df.head())
37
 
38
- # Convert to text chunks
39
  def convert_to_chunks(df):
40
- chunks = []
41
- for idx, row in df.iterrows():
42
- log_text = f"[Log {idx}] " + ", ".join([f"{col}: {row[col]:.2f}" for col in numeric_cols])
43
- chunks.append(log_text)
44
- return chunks
45
 
46
  if 'chunks' not in st.session_state or 'embeddings' not in st.session_state:
47
  chunks = convert_to_chunks(df)
@@ -49,26 +62,28 @@ if uploaded_file:
49
  st.session_state.chunks = chunks
50
  st.session_state.embeddings = embeddings
51
 
52
- # --- Anomaly Detection ---
53
- st.markdown("### 🚨 Real-Time Anomaly Scanner")
54
  iso = IsolationForest(contamination=0.02)
55
  labels = iso.fit_predict(df[numeric_cols])
56
- df['anomaly'] = ['❌ Anomaly' if x == -1 else 'βœ… Normal' for x in labels]
57
- st.dataframe(df[df['anomaly'].str.contains("❌")].head())
 
58
 
59
- # --- Chatbot Assistant ---
60
- st.markdown("### πŸ’¬ Ask FactoryGPT")
61
  roles = {
62
- "Operator": "You are a machine operator. Provide practical insights and safety warnings.",
63
- "Maintenance": "You are a maintenance technician. Suggest inspections and likely causes of sensor anomalies.",
64
- "Engineer": "You are a control systems engineer. Offer analytical interpretations and system-level advice."
65
  }
66
- role = st.selectbox("πŸ‘· Select your role", list(roles.keys()))
 
67
 
68
  if 'chat_history' not in st.session_state:
69
  st.session_state.chat_history = []
70
 
71
- user_input = st.text_input("πŸ—¨οΈ Ask about the sensor log...", key="chat_input")
72
 
73
  if user_input:
74
  query_vec = EMBED_MODEL.encode([user_input])[0]
@@ -76,7 +91,7 @@ if uploaded_file:
76
  top_idxs = np.argsort(sims)[-3:][::-1]
77
  context = "\n".join([st.session_state.chunks[i] for i in top_idxs])
78
  system_prompt = roles[role]
79
- full_prompt = f"{system_prompt}\n\nSensor Context:\n{context}\n\nUser Question: {user_input}"
80
  reply = GEN_MODEL(full_prompt, max_length=256)[0]['generated_text']
81
 
82
  st.session_state.chat_history.append((f"πŸ‘€ You ({role})", user_input))
@@ -86,4 +101,4 @@ if uploaded_file:
86
  st.markdown(f"<div style='margin-bottom: 10px;'><b>{speaker}:</b> {msg}</div>", unsafe_allow_html=True)
87
 
88
  else:
89
- st.info("πŸ‘ˆ Please upload a sensor CSV file to begin.")
 
1
+ # app.py - FactoryRAG+: AI Assistant with Real-Time Functional Status by Role
2
 
3
  import streamlit as st
4
  import pandas as pd
 
7
  from transformers import pipeline
8
  from sklearn.ensemble import IsolationForest
9
 
10
+ # Page config
11
  st.set_page_config(page_title="FactoryRAG+ Assistant", page_icon="🧠", layout="wide")
12
 
13
+ # Custom dark theme styling
14
  st.markdown("""
15
+ <style>
16
+ html, body, [class*="css"] {
17
+ font-family: 'Segoe UI', sans-serif;
18
+ background-color: #0f1117;
19
+ color: #f0f0f0;
20
+ }
21
+ .stTextInput>div>div>input,
22
+ .stSelectbox>div>div>div>div {
23
+ background-color: #1a1c23;
24
+ color: #fff;
25
+ }
26
+ .stDataFrame .blank {
27
+ background-color: #0f1117 !important;
28
+ }
29
+ </style>
30
+ """, unsafe_allow_html=True)
31
+
32
+ # Header
33
+ st.markdown("""
34
+ <div style='text-align: center;'>
35
+ <h1 style='color: #58a6ff;'>🏭 FactoryRAG+ Assistant</h1>
36
+ <p style='color: #bbb;'>AI-Powered Digital Twin | Real-Time Sensor Health</p>
37
+ <hr style='border-top: 2px solid #888;'>
38
+ </div>
39
  """, unsafe_allow_html=True)
40
 
41
  # Load models
42
  EMBED_MODEL = SentenceTransformer('sentence-transformers/all-MiniLM-L6-v2')
43
  GEN_MODEL = pipeline('text2text-generation', model='google/flan-t5-base')
44
 
45
+ # File upload
46
+ uploaded_file = st.sidebar.file_uploader("πŸ“‚ Upload your sensor CSV", type=["csv"])
 
47
 
48
  if uploaded_file:
49
  df = pd.read_csv(uploaded_file)
50
  numeric_cols = df.select_dtypes(include=np.number).columns.tolist()
51
+ st.success("βœ… Sensor log loaded!")
52
 
53
+ st.markdown("### 🧾 Sensor Log Preview")
54
+ st.dataframe(df.head(), use_container_width=True)
 
55
 
 
56
  def convert_to_chunks(df):
57
+ return [f"[Log {i}] " + ", ".join([f"{col}: {row[col]:.2f}" for col in numeric_cols]) for i, row in df.iterrows()]
 
 
 
 
58
 
59
  if 'chunks' not in st.session_state or 'embeddings' not in st.session_state:
60
  chunks = convert_to_chunks(df)
 
62
  st.session_state.chunks = chunks
63
  st.session_state.embeddings = embeddings
64
 
65
+ # Condition status based on Isolation Forest
66
+ st.markdown("### βš™οΈ Equipment Condition Status")
67
  iso = IsolationForest(contamination=0.02)
68
  labels = iso.fit_predict(df[numeric_cols])
69
+ df['status'] = ['❌ No Function' if x == -1 else 'βœ… Functional' for x in labels]
70
+ df['maintenance'] = ['πŸ”§ Needs Maintenance' if x == -1 else '🟒 Stable' for x in labels]
71
+ st.dataframe(df[['status', 'maintenance'] + numeric_cols].head(), use_container_width=True)
72
 
73
+ # Role-based chatbot
74
+ st.markdown("### πŸ’¬ Real-Time Role-Based Chat Assistant")
75
  roles = {
76
+ "Operator": "You are a machine operator. Check if equipment is running properly. If not, flag it immediately.",
77
+ "Maintenance": "You are a maintenance technician. Assess faulty logs and provide service insights.",
78
+ "Engineer": "You are a systems engineer. Offer data-backed advice and failure diagnostics."
79
  }
80
+
81
+ role = st.selectbox("πŸ‘· Choose your role", list(roles.keys()))
82
 
83
  if 'chat_history' not in st.session_state:
84
  st.session_state.chat_history = []
85
 
86
+ user_input = st.text_input("πŸ—¨οΈ Ask FactoryGPT about machine status or maintenance needs")
87
 
88
  if user_input:
89
  query_vec = EMBED_MODEL.encode([user_input])[0]
 
91
  top_idxs = np.argsort(sims)[-3:][::-1]
92
  context = "\n".join([st.session_state.chunks[i] for i in top_idxs])
93
  system_prompt = roles[role]
94
+ full_prompt = f"{system_prompt}\n\nSensor Log Context:\n{context}\n\nUser Question: {user_input}"
95
  reply = GEN_MODEL(full_prompt, max_length=256)[0]['generated_text']
96
 
97
  st.session_state.chat_history.append((f"πŸ‘€ You ({role})", user_input))
 
101
  st.markdown(f"<div style='margin-bottom: 10px;'><b>{speaker}:</b> {msg}</div>", unsafe_allow_html=True)
102
 
103
  else:
104
+ st.info("πŸ‘ˆ Upload a CSV file with sensor logs to begin.")