DD8943 commited on
Commit
a08f3f8
·
verified ·
1 Parent(s): f6d5234

Upload folder using huggingface_hub

Browse files
Files changed (4) hide show
  1. Dockerfile +13 -0
  2. app.py +51 -0
  3. final_model.joblib +3 -0
  4. requirements.txt +14 -0
Dockerfile ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.9-slim
2
+
3
+ # Set the working directory inside the container
4
+ WORKDIR /app
5
+
6
+ # Copy all files from the current directory to the container's working directory
7
+ COPY . .
8
+
9
+ # Install dependencies from the requirements file without using cache to reduce image size
10
+ RUN pip install --no-cache-dir --upgrade -r requirements.txt
11
+
12
+ # Define the command to start the application using Gunicorn with 4 worker processes
13
+ CMD ["gunicorn", "-w", "4", "-b", "0.0.0.0:7860", "app:superkart_api"]
app.py ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ # Import necessary libraries
3
+ import numpy as np
4
+ import joblib # For loading the serialized model
5
+ import pandas as pd # For data manipulation
6
+ from flask import Flask, request, jsonify # For creating the Flask API
7
+
8
+ # Initialize Flask app with a name
9
+ superkart_api = Flask("superkart")
10
+
11
+ # Load the trained sales prediction model
12
+ model = joblib.load("backend/final_model.joblib")
13
+
14
+ # Define a route for the home page
15
+ @superkart_api.get('/')
16
+ def home():
17
+ return "Welcome to the SuperKart Sales Prediction API!"
18
+
19
+ # Define an endpoint to predict sales for a single product-store combination
20
+ @superkart_api.post('/v1/predict')
21
+ def predict_sales():
22
+ # Get JSON data from the request
23
+ data = request.get_json()
24
+
25
+ # Extract relevant features from the input data. The order of the column names matters.
26
+ sample = {
27
+ 'Product_Weight': data['Product_Weight'],
28
+ 'Product_Sugar_Content': data['Product_Sugar_Content'],
29
+ 'Product_Allocated_Area': data['Product_Allocated_Area'],
30
+ 'Product_MRP': data['Product_MRP'],
31
+ 'Store_Size': data['Store_Size'],
32
+ 'Store_Location_City_Type': data['Store_Location_City_Type'],
33
+ 'Store_Type': data['Store_Type'],
34
+ 'Product_Id_char': data['Product_Id_char'],
35
+ 'Store_Age_Years': data['Store_Age_Years'],
36
+ 'Product_Type_Category': data['Product_Type_Category']
37
+ }
38
+
39
+ # Convert the extracted data into a DataFrame
40
+ input_data = pd.DataFrame([sample])
41
+
42
+ # Make a sales prediction using the trained model
43
+ prediction = model.predict(input_data).tolist()[0]
44
+
45
+ # Return the prediction as a JSON response
46
+ return jsonify({'Sales': prediction})
47
+
48
+
49
+ # Run the Flask app in debug mode
50
+ if __name__ == '__main__':
51
+ superkart_api.run(debug=True)
final_model.joblib ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5112d9266cf15406ca8b854cf4da49282159d2c5447e153564bec23223c88370
3
+ size 184758
requirements.txt ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ pandas==2.2.2
2
+ numpy==2.0.2
3
+ scikit-learn==1.6.1
4
+ seaborn==0.13.2
5
+ xgboost==2.1.4
6
+ joblib==1.4.2
7
+ Werkzeug==2.2.2
8
+ flask==2.2.2
9
+ gunicorn==20.1.0
10
+ requests==2.32.3
11
+ uvicorn[standard]
12
+ FuzzyTM>=0.4.0
13
+ matplotlib==3.8.4
14
+ protobuf>=5.0.0