Update app.py
Browse files
app.py
CHANGED
@@ -1,67 +1,122 @@
|
|
|
|
1 |
import streamlit as st
|
2 |
import pandas as pd
|
|
|
3 |
import numpy as np
|
4 |
from sklearn.linear_model import LinearRegression
|
5 |
from sklearn.preprocessing import PolynomialFeatures
|
6 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
7 |
|
8 |
-
|
9 |
-
return pd.read_csv(file)
|
10 |
-
|
11 |
-
def plot_data(data, x_values, y_values, model=None, prediction=None):
|
12 |
-
plt.scatter(x_values, y_values, label='Data')
|
13 |
-
if model is not None and prediction is not None:
|
14 |
-
plt.plot(x_values, prediction, color='red', label='Model')
|
15 |
-
plt.xlabel('Index')
|
16 |
-
plt.ylabel('Value')
|
17 |
-
plt.legend()
|
18 |
-
plt.show()
|
19 |
-
|
20 |
-
def fit_model(data, model_type, x_values, y_values):
|
21 |
-
if model_type == 'Linear Regression':
|
22 |
-
model = LinearRegression()
|
23 |
-
x_values = x_values.reshape(-1, 1)
|
24 |
-
model.fit(x_values, y_values)
|
25 |
-
prediction = model.predict(x_values)
|
26 |
-
equation = f'y = {model.coef_[0]:.4f}x + {model.intercept_:.4f}'
|
27 |
-
elif model_type == 'Polynomial Regression':
|
28 |
-
polynomial_features = PolynomialFeatures(degree=2)
|
29 |
-
x_values_poly = polynomial_features.fit_transform(x_values.reshape(-1, 1))
|
30 |
-
model = LinearRegression()
|
31 |
-
model.fit(x_values_poly, y_values)
|
32 |
-
prediction = model.predict(x_values_poly)
|
33 |
-
equation = 'Polynomial equation (degree 2)'
|
34 |
-
else:
|
35 |
-
model = None
|
36 |
-
prediction = None
|
37 |
-
equation = "No model selected"
|
38 |
-
return model, prediction, equation
|
39 |
-
|
40 |
-
def app():
|
41 |
-
st.title('RGB and HSV Analysis and Prediction')
|
42 |
-
uploaded_file = st.file_uploader("Choose a CSV file", type="csv")
|
43 |
-
|
44 |
-
if uploaded_file is not None:
|
45 |
-
data = load_data(uploaded_file)
|
46 |
-
st.dataframe(data.head())
|
47 |
-
|
48 |
-
# Selecting R, G, B, H, S, V
|
49 |
-
color_component = st.selectbox("Select color component", ['R', 'G', 'B', 'H', 'S', 'V'])
|
50 |
-
st.write(f"Selected component: {color_component}")
|
51 |
-
selected_data = data[color_component].values
|
52 |
-
|
53 |
-
# Selecting regression model
|
54 |
-
regression_model = st.selectbox("Select a regression model", ["None", "Linear Regression", "Polynomial Regression"])
|
55 |
-
|
56 |
-
x_values = np.arange(len(selected_data))
|
57 |
-
y_values = selected_data
|
58 |
-
|
59 |
-
# Fitting the selected model
|
60 |
-
model, prediction, equation = fit_model(data, regression_model, x_values, y_values)
|
61 |
-
st.write(f"Equation: {equation}")
|
62 |
-
|
63 |
-
# Plotting the data and model
|
64 |
-
plot_data(data, x_values, y_values, model, prediction)
|
65 |
-
|
66 |
-
# Uncomment the next line to run the app locally
|
67 |
-
# app()
|
|
|
1 |
+
|
2 |
import streamlit as st
|
3 |
import pandas as pd
|
4 |
+
import matplotlib.pyplot as plt
|
5 |
import numpy as np
|
6 |
from sklearn.linear_model import LinearRegression
|
7 |
from sklearn.preprocessing import PolynomialFeatures
|
8 |
+
from sklearn.pipeline import make_pipeline
|
9 |
+
from sklearn.svm import SVR
|
10 |
+
from sklearn.ensemble import RandomForestRegressor
|
11 |
+
|
12 |
+
st.title("Webcam Color Detection Charting")
|
13 |
+
|
14 |
+
uploaded_file = st.file_uploader("Choose a CSV file", type="csv")
|
15 |
+
|
16 |
+
time_frame_options = [
|
17 |
+
"All",
|
18 |
+
"1 second",
|
19 |
+
"5 seconds",
|
20 |
+
"10 seconds",
|
21 |
+
"30 seconds",
|
22 |
+
"1 minute",
|
23 |
+
"5 minutes",
|
24 |
+
"10 minutes",
|
25 |
+
"30 minutes",
|
26 |
+
"60 minutes",
|
27 |
+
]
|
28 |
+
time_frame = st.selectbox("Data Time Frame", time_frame_options)
|
29 |
+
|
30 |
+
regression_options = [
|
31 |
+
"None",
|
32 |
+
"Linear Regression",
|
33 |
+
"Polynomial Regression",
|
34 |
+
"SVR (Support Vector Regression)",
|
35 |
+
"Random Forest Regression",
|
36 |
+
]
|
37 |
+
regression_type = st.selectbox("Regression Analysis Type", regression_options)
|
38 |
+
|
39 |
+
if uploaded_file is not None:
|
40 |
+
# Read CSV file
|
41 |
+
data = pd.read_csv(uploaded_file)
|
42 |
+
|
43 |
+
# Filter data according to the time frame
|
44 |
+
if time_frame != "All":
|
45 |
+
seconds = {
|
46 |
+
"1 second": 1,
|
47 |
+
"5 seconds": 5,
|
48 |
+
"10 seconds": 10,
|
49 |
+
"30 seconds": 30,
|
50 |
+
"1 minute": 60,
|
51 |
+
"5 minutes": 300,
|
52 |
+
"10 minutes": 600,
|
53 |
+
"30 minutes": 1800,
|
54 |
+
"60 minutes": 3600,
|
55 |
+
}
|
56 |
+
data['timestamp'] = pd.to_datetime(data['timestamp'], unit='ms')
|
57 |
+
data.set_index('timestamp', inplace=True)
|
58 |
+
data = data.resample(f"{seconds[time_frame]}S").mean().dropna().reset_index()
|
59 |
+
|
60 |
+
# Create charts
|
61 |
+
fig, axes = plt.subplots(2, 1, figsize=(10, 8))
|
62 |
+
|
63 |
+
# RGB chart
|
64 |
+
|
65 |
+
color_space_options = ["RGB", "HSV"]
|
66 |
+
color_space = st.selectbox("Select Color Space", color_space_options)
|
67 |
+
|
68 |
+
# Depending on the selection, plot the corresponding values
|
69 |
+
if color_space == "RGB":
|
70 |
+
axes[0].plot(data['R'], 'r', label='R')
|
71 |
+
axes[0].plot(data['G'], 'g', label='G')
|
72 |
+
axes[0].plot(data['B'], 'b', label='B')
|
73 |
+
elif color_space == "HSV":
|
74 |
+
axes[0].plot(data['H'], 'r', label='H')
|
75 |
+
axes[0].plot(data['S'], 'g', label='S')
|
76 |
+
axes[0].plot(data['V'], 'b', label='V')
|
77 |
+
axes[0].legend(loc='upper right')
|
78 |
+
|
79 |
+
|
80 |
+
axes[0].legend(loc='upper right')
|
81 |
+
axes[0].set_title('RGB Values')
|
82 |
+
axes[1].legend(loc='upper right')
|
83 |
+
axes[1].set_title('HSV Values')
|
84 |
+
|
85 |
+
# Perform regression analysis if selected
|
86 |
+
if regression_type != "None":
|
87 |
+
X = np.arange(len(data)).reshape(-1, 1)
|
88 |
+
|
89 |
+
# Linear Regression
|
90 |
+
if regression_type == "Linear Regression":
|
91 |
+
model = LinearRegression()
|
92 |
+
for color, code in zip(['R', 'G', 'B'], ['r', 'g', 'b']):
|
93 |
+
model.fit(X, data[color])
|
94 |
+
axes[0].plot(X, model.predict(X), f'{code}--')
|
95 |
+
st.write(f"{color}: y = {model.coef_[0]} * x + {model.intercept_}")
|
96 |
+
|
97 |
+
# Polynomial Regression
|
98 |
+
elif regression_type == "Polynomial Regression":
|
99 |
+
polynomial_features = PolynomialFeatures(degree=2)
|
100 |
+
model = make_pipeline(polynomial_features, LinearRegression())
|
101 |
+
for color, code in zip(['R', 'G', 'B'], ['r', 'g', 'b']):
|
102 |
+
model.fit(X, data[color])
|
103 |
+
axes[0].plot(X, model.predict(X), f'{code}--')
|
104 |
+
st.write("Polynomial regression equation is not easily representable.")
|
105 |
+
|
106 |
+
# SVR (Support Vector Regression)
|
107 |
+
elif regression_type == "SVR (Support Vector Regression)":
|
108 |
+
model = SVR()
|
109 |
+
for color, code in zip(['R', 'G', 'B'], ['r', 'g', 'b']):
|
110 |
+
model.fit(X, data[color])
|
111 |
+
axes[0].plot(X, model.predict(X), f'{code}--')
|
112 |
+
st.write("SVR equation is not easily representable.")
|
113 |
+
|
114 |
+
# Random Forest Regression
|
115 |
+
elif regression_type == "Random Forest Regression":
|
116 |
+
model = RandomForestRegressor()
|
117 |
+
for color, code in zip(['R', 'G', 'B'], ['r', 'g', 'b']):
|
118 |
+
model.fit(X, data[color])
|
119 |
+
axes[0].plot(X, model.predict(X), f'{code}--')
|
120 |
+
st.write("Random Forest equation is not easily representable.")
|
121 |
|
122 |
+
st.pyplot(fig)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|