Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -17,15 +17,21 @@ async def predict(model: UploadFile = File(...), data: str = None):
|
|
17 |
temp_model_file.write(await model.read())
|
18 |
temp_model_path = temp_model_file.name
|
19 |
|
|
|
|
|
|
|
|
|
|
|
|
|
20 |
# Load the model
|
21 |
model = load_model(temp_model_path, compile=False)
|
22 |
model.compile(optimizer=Adam(learning_rate=0.001), loss='mse', run_eagerly=True)
|
23 |
print(data)
|
24 |
# Process the data
|
25 |
-
|
26 |
-
|
27 |
|
28 |
-
return {"predictions":
|
29 |
|
30 |
except Exception as e:
|
31 |
raise HTTPException(status_code=500, detail=str(e))
|
@@ -37,29 +43,21 @@ async def retrain(model: UploadFile = File(...), data: str = None):
|
|
37 |
with tempfile.NamedTemporaryFile(delete=False, suffix=".h5") as temp_model_file:
|
38 |
temp_model_file.write(await model.read())
|
39 |
temp_model_path = temp_model_file.name
|
40 |
-
print(data)
|
41 |
|
42 |
|
43 |
# Load the model and data
|
44 |
model = load_model(temp_model_path, compile=False)
|
45 |
model.compile(optimizer=Adam(learning_rate=0.001), loss='mse', run_eagerly=True)
|
46 |
|
47 |
-
|
48 |
-
|
49 |
# Normalize the data
|
50 |
scaler = MinMaxScaler()
|
51 |
-
dataset_normalized = scaler.fit_transform(
|
52 |
-
|
53 |
-
# Retrain the model
|
54 |
-
x_train = []
|
55 |
-
y_train = []
|
56 |
-
for i in range(12, len(dataset_normalized)):
|
57 |
-
x_train.append(dataset_normalized[i-12:i, 0])
|
58 |
-
y_train.append(dataset_normalized[i, 0])
|
59 |
-
|
60 |
-
x_train = np.array(x_train).reshape(-1, 12, 1)
|
61 |
-
y_train = np.array(y_train)
|
62 |
|
|
|
|
|
|
|
63 |
model.compile(optimizer=Adam(learning_rate=0.001), loss="mse", run_eagerly=True)
|
64 |
model.fit(x_train, y_train, epochs=1, batch_size=32)
|
65 |
|
|
|
17 |
temp_model_file.write(await model.read())
|
18 |
temp_model_path = temp_model_file.name
|
19 |
|
20 |
+
ds = eval(data)
|
21 |
+
ds = np.array(ds).reshape(-1, 1)
|
22 |
+
# Normalize the data
|
23 |
+
scaler = MinMaxScaler()
|
24 |
+
ds_normalized = scaler.fit_transform(ds)
|
25 |
+
|
26 |
# Load the model
|
27 |
model = load_model(temp_model_path, compile=False)
|
28 |
model.compile(optimizer=Adam(learning_rate=0.001), loss='mse', run_eagerly=True)
|
29 |
print(data)
|
30 |
# Process the data
|
31 |
+
predictions = model.predict(ds_normalized.reshape(1, 12, 1)).tolist()
|
32 |
+
predictions_rescaled = scaler.inverse_transform(predictions.reshape(-1, 1)).flatten()
|
33 |
|
34 |
+
return {"predictions": predictions_rescaled}
|
35 |
|
36 |
except Exception as e:
|
37 |
raise HTTPException(status_code=500, detail=str(e))
|
|
|
43 |
with tempfile.NamedTemporaryFile(delete=False, suffix=".h5") as temp_model_file:
|
44 |
temp_model_file.write(await model.read())
|
45 |
temp_model_path = temp_model_file.name
|
|
|
46 |
|
47 |
|
48 |
# Load the model and data
|
49 |
model = load_model(temp_model_path, compile=False)
|
50 |
model.compile(optimizer=Adam(learning_rate=0.001), loss='mse', run_eagerly=True)
|
51 |
|
52 |
+
ds = eval(data)
|
53 |
+
ds = np.array(ds).reshape(-1, 1)
|
54 |
# Normalize the data
|
55 |
scaler = MinMaxScaler()
|
56 |
+
dataset_normalized = scaler.fit_transform(ds)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
57 |
|
58 |
+
x_train = np.array([ds_normalized[i - 12:i] for i in range(12, len(ds_normalized))])
|
59 |
+
y_train = ds_normalized[12:]
|
60 |
+
|
61 |
model.compile(optimizer=Adam(learning_rate=0.001), loss="mse", run_eagerly=True)
|
62 |
model.fit(x_train, y_train, epochs=1, batch_size=32)
|
63 |
|