|
import streamlit as st |
|
import json |
|
import pandas as pd |
|
from utils import load_and_process_data, create_time_series_plot, display_statistics, call_api |
|
import plotly.express as px |
|
import plotly.graph_objects as go |
|
|
|
|
|
if 'api_token' not in st.session_state: |
|
st.session_state.api_token = "p2s8X9qL4zF7vN3mK6tR1bY5cA0wE3hJ" |
|
|
|
|
|
for key in ['current_file', 'json_data', 'api_response']: |
|
if key in st.session_state: |
|
del st.session_state[key] |
|
|
|
|
|
if 'current_file' not in st.session_state: |
|
st.session_state.current_file = None |
|
if 'json_data' not in st.session_state: |
|
st.session_state.json_data = None |
|
if 'api_response' not in st.session_state: |
|
st.session_state.api_response = None |
|
|
|
st.title("Energy Production Anomaly Detection") |
|
|
|
st.markdown(""" |
|
This service analyzes energy consumption patterns to detect anomalies and unusual behavior in your data. |
|
|
|
### Features |
|
- Real-time anomaly detection |
|
- Consumption irregularity identification |
|
- Interactive visualization of detected anomalies |
|
|
|
""") |
|
|
|
|
|
|
|
|
|
uploaded_file = st.file_uploader("Upload JSON file", type=['json']) |
|
|
|
if uploaded_file: |
|
try: |
|
file_contents = uploaded_file.read() |
|
st.session_state.current_file = file_contents |
|
st.session_state.json_data = json.loads(file_contents) |
|
|
|
dfs = load_and_process_data(st.session_state.json_data) |
|
if dfs: |
|
st.header("Input Data Analysis") |
|
tabs = st.tabs(["Visualization", "Statistics", "Raw Data"]) |
|
|
|
with tabs[0]: |
|
for unit, df in dfs.items(): |
|
st.plotly_chart(create_time_series_plot(df, unit), use_container_width=True) |
|
|
|
|
|
col1, col2, col3 = st.columns(3) |
|
with col1: |
|
st.metric("Average Consumption", |
|
f"{df['datacellar:value'].mean():.2f} {unit}") |
|
with col2: |
|
st.metric("Standard Deviation", |
|
f"{df['datacellar:value'].std():.2f} {unit}") |
|
with col3: |
|
st.metric("Total Samples", |
|
len(df)) |
|
|
|
with tabs[1]: |
|
display_statistics(dfs) |
|
|
|
with tabs[2]: |
|
st.json(st.session_state.json_data) |
|
|
|
|
|
st.subheader("Anomaly Detection") |
|
col1, col2 = st.columns(2) |
|
with col1: |
|
if st.button("Detect Anomalies", key="detect_button"): |
|
if not st.session_state.api_token: |
|
st.error("Please enter your API token in the sidebar first.") |
|
else: |
|
with st.spinner("Analyzing consumption patterns..."): |
|
|
|
modified_data = st.session_state.json_data.copy() |
|
|
|
|
|
|
|
modified_content = json.dumps(modified_data).encode('utf-8') |
|
st.session_state.api_response = call_api( |
|
modified_content, |
|
st.session_state.api_token, |
|
"inference_production_ad" |
|
) |
|
with col2: |
|
if st.button("Clear Results", key="clear_button"): |
|
st.session_state.api_response = None |
|
st.experimental_rerun() |
|
|
|
except Exception as e: |
|
st.error(f"Error processing file: {str(e)}") |
|
|
|
|
|
if st.session_state.api_response: |
|
st.header("Anomaly Detection Results") |
|
tabs = st.tabs(["Anomaly Visualization", "Raw Results"]) |
|
|
|
with tabs[0]: |
|
response_dfs = load_and_process_data( |
|
st.session_state.api_response, |
|
input_data=st.session_state.json_data |
|
) |
|
if response_dfs: |
|
anomalies=response_dfs['boolean'] |
|
anomalies=anomalies[anomalies['datacellar:value']==True] |
|
|
|
del response_dfs['boolean'] |
|
for unit, df in response_dfs.items(): |
|
|
|
fig= create_time_series_plot(df, unit, service_type="Anomaly Detection") |
|
|
|
anomaly_df=df.iloc[anomalies['datacellar:timeStamp'].index] |
|
|
|
|
|
fig.add_trace(go.Scatter(x=anomaly_df['datacellar:timeStamp'], y=anomaly_df['datacellar:value'], mode='markers', marker=dict(color='red'), name='Anomalies')) |
|
|
|
|
|
st.plotly_chart( |
|
fig, |
|
use_container_width=True |
|
) |
|
|
|
with tabs[1]: |
|
st.json(st.session_state.api_response) |
|
|