sdafd commited on
Commit
63c4e31
·
verified ·
1 Parent(s): 42f8809

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +78 -0
app.py ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI, UploadFile, File, HTTPException
2
+ from tensorflow.keras.models import load_model, Sequential
3
+ from tensorflow.keras.layers import Dense, LSTM
4
+ from tensorflow.keras.optimizers import Adam
5
+ from sklearn.preprocessing import MinMaxScaler
6
+ import numpy as np
7
+ import tempfile
8
+ import os
9
+
10
+ app = FastAPI()
11
+
12
+ @app.post("/predict")
13
+ async def predict(model: UploadFile = File(...), data: str = None):
14
+ try:
15
+ # Save the uploaded model to a temporary file
16
+ with tempfile.NamedTemporaryFile(delete=False, suffix=".h5") as temp_model_file:
17
+ temp_model_file.write(await model.read())
18
+ temp_model_path = temp_model_file.name
19
+
20
+ # Load the model
21
+ model = load_model(temp_model_path, compile=False)
22
+
23
+ # Process the data
24
+ data = np.array(eval(data)).reshape(1, 12, 1)
25
+ predictions = model.predict(data).tolist()
26
+
27
+ return {"predictions": predictions}
28
+
29
+ except Exception as e:
30
+ raise HTTPException(status_code=500, detail=str(e))
31
+
32
+ @app.post("/retrain")
33
+ async def retrain(model: UploadFile = File(...), data: UploadFile = File(...)):
34
+ try:
35
+ # Save the uploaded model and data to temporary files
36
+ with tempfile.NamedTemporaryFile(delete=False, suffix=".h5") as temp_model_file:
37
+ temp_model_file.write(await model.read())
38
+ temp_model_path = temp_model_file.name
39
+
40
+ with tempfile.NamedTemporaryFile(delete=False, suffix=".npy") as temp_data_file:
41
+ temp_data_file.write(await data.read())
42
+ temp_data_path = temp_data_file.name
43
+
44
+ # Load the model and data
45
+ model = load_model(temp_model_path, compile=False)
46
+ dataset = np.load(temp_data_path)
47
+
48
+ # Normalize the data
49
+ scaler = MinMaxScaler()
50
+ dataset_normalized = scaler.fit_transform(dataset)
51
+
52
+ # Retrain the model
53
+ x_train = []
54
+ y_train = []
55
+ for i in range(12, len(dataset_normalized)):
56
+ x_train.append(dataset_normalized[i-12:i, 0])
57
+ y_train.append(dataset_normalized[i, 0])
58
+
59
+ x_train = np.array(x_train).reshape(-1, 12, 1)
60
+ y_train = np.array(y_train)
61
+
62
+ model.compile(optimizer=Adam(learning_rate=0.001), loss="mse", run_eagerly=True)
63
+ model.fit(x_train, y_train, epochs=10, batch_size=32)
64
+
65
+ # Save the updated model to a temporary file
66
+ updated_model_path = temp_model_path.replace(".h5", "_updated.h5")
67
+ model.save(updated_model_path)
68
+
69
+ return {"message": "Model retrained successfully.", "updated_model_path": updated_model_path}
70
+
71
+ except Exception as e:
72
+ raise HTTPException(status_code=500, detail=str(e))
73
+ finally:
74
+ # Clean up temporary files
75
+ if os.path.exists(temp_model_path):
76
+ os.remove(temp_model_path)
77
+ if os.path.exists(temp_data_path):
78
+ os.remove(temp_data_path)