Spaces:
Sleeping
Sleeping
Update 3_SimpleRegression.py
Browse files- 3_SimpleRegression.py +7 -10
3_SimpleRegression.py
CHANGED
|
@@ -8,16 +8,15 @@ from matplotlib import pyplot as plt
|
|
| 8 |
def build_model(my_learning_rate):
|
| 9 |
model = tf.keras.models.Sequential()
|
| 10 |
model.add(tf.keras.layers.Dense(units=1, input_shape=(1,)))
|
| 11 |
-
model.compile(optimizer=tf.keras.optimizers.RMSprop(
|
| 12 |
loss='mean_squared_error',
|
| 13 |
metrics=[tf.keras.metrics.RootMeanSquaredError()])
|
| 14 |
return model
|
| 15 |
|
| 16 |
# Function to train the model
|
| 17 |
def train_model(model, feature, label, epochs, batch_size):
|
| 18 |
-
history = model.fit(x=feature, y=label, batch_size=batch_size,
|
| 19 |
-
|
| 20 |
-
trained_weight = model.get_weights()[0]
|
| 21 |
trained_bias = model.get_weights()[1]
|
| 22 |
epochs = history.epoch
|
| 23 |
hist = pd.DataFrame(history.history)
|
|
@@ -40,7 +39,7 @@ def plot_the_model(trained_weight, trained_bias, feature, label):
|
|
| 40 |
y1 = trained_bias + (trained_weight * x1)
|
| 41 |
plt.plot([x0, x1], [y0, y1], c='r')
|
| 42 |
|
| 43 |
-
|
| 44 |
|
| 45 |
# Function to plot the loss curve
|
| 46 |
def plot_the_loss_curve(epochs, rmse):
|
|
@@ -51,11 +50,11 @@ def plot_the_loss_curve(epochs, rmse):
|
|
| 51 |
plt.plot(epochs, rmse, label='Loss')
|
| 52 |
plt.legend()
|
| 53 |
plt.ylim([rmse.min()*0.97, rmse.max()])
|
| 54 |
-
|
| 55 |
|
| 56 |
# Define the dataset
|
| 57 |
-
my_feature = np.array([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0])
|
| 58 |
-
my_label = np.array([5.0, 8.8, 9.6, 14.2, 18.8, 19.5, 21.4, 26.8, 28.9, 32.0, 33.8, 38.2])
|
| 59 |
|
| 60 |
# Streamlit interface
|
| 61 |
st.title("Simple Linear Regression with Synthetic Data")
|
|
@@ -70,8 +69,6 @@ if st.sidebar.button('Run'):
|
|
| 70 |
|
| 71 |
st.subheader('Model Plot')
|
| 72 |
plot_the_model(trained_weight, trained_bias, my_feature, my_label)
|
| 73 |
-
st.pyplot(plt)
|
| 74 |
|
| 75 |
st.subheader('Loss Curve')
|
| 76 |
plot_the_loss_curve(epochs, rmse)
|
| 77 |
-
st.pyplot(plt)
|
|
|
|
| 8 |
def build_model(my_learning_rate):
|
| 9 |
model = tf.keras.models.Sequential()
|
| 10 |
model.add(tf.keras.layers.Dense(units=1, input_shape=(1,)))
|
| 11 |
+
model.compile(optimizer=tf.keras.optimizers.RMSprop(learning_rate=my_learning_rate),
|
| 12 |
loss='mean_squared_error',
|
| 13 |
metrics=[tf.keras.metrics.RootMeanSquaredError()])
|
| 14 |
return model
|
| 15 |
|
| 16 |
# Function to train the model
|
| 17 |
def train_model(model, feature, label, epochs, batch_size):
|
| 18 |
+
history = model.fit(x=feature, y=label, batch_size=batch_size, epochs=epochs)
|
| 19 |
+
trained_weight = model.get_weights()[0][0]
|
|
|
|
| 20 |
trained_bias = model.get_weights()[1]
|
| 21 |
epochs = history.epoch
|
| 22 |
hist = pd.DataFrame(history.history)
|
|
|
|
| 39 |
y1 = trained_bias + (trained_weight * x1)
|
| 40 |
plt.plot([x0, x1], [y0, y1], c='r')
|
| 41 |
|
| 42 |
+
st.pyplot(plt)
|
| 43 |
|
| 44 |
# Function to plot the loss curve
|
| 45 |
def plot_the_loss_curve(epochs, rmse):
|
|
|
|
| 50 |
plt.plot(epochs, rmse, label='Loss')
|
| 51 |
plt.legend()
|
| 52 |
plt.ylim([rmse.min()*0.97, rmse.max()])
|
| 53 |
+
st.pyplot(plt)
|
| 54 |
|
| 55 |
# Define the dataset
|
| 56 |
+
my_feature = np.array([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0], dtype=float).reshape(-1, 1)
|
| 57 |
+
my_label = np.array([5.0, 8.8, 9.6, 14.2, 18.8, 19.5, 21.4, 26.8, 28.9, 32.0, 33.8, 38.2], dtype=float).reshape(-1, 1)
|
| 58 |
|
| 59 |
# Streamlit interface
|
| 60 |
st.title("Simple Linear Regression with Synthetic Data")
|
|
|
|
| 69 |
|
| 70 |
st.subheader('Model Plot')
|
| 71 |
plot_the_model(trained_weight, trained_bias, my_feature, my_label)
|
|
|
|
| 72 |
|
| 73 |
st.subheader('Loss Curve')
|
| 74 |
plot_the_loss_curve(epochs, rmse)
|
|
|