File size: 4,692 Bytes
4058d83
 
 
 
 
 
02df12c
4058d83
 
 
 
 
 
 
 
afe0108
 
99def00
afe0108
0e963ff
afe0108
 
 
 
99def00
4058d83
 
 
 
 
 
 
d92ee7d
 
 
 
 
 
4058d83
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1e9352c
 
4058d83
1e9352c
4058d83
1e9352c
 
4058d83
1e9352c
 
d92ee7d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1e9352c
 
 
d92ee7d
 
 
 
 
afe0108
0e963ff
4058d83
1e9352c
4058d83
d92ee7d
 
ca1bfd8
 
 
 
4058d83
3ec9405
4058d83
 
 
 
 
 
 
 
0e963ff
4058d83
 
 
a86f06e
 
 
 
4058d83
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
import os
import json
import numpy as np
import pandas as pd
import onnxruntime as ort
from numpy import append, expand_dims
from decimal import Decimal, ROUND_DOWN
from pandas import read_csv, to_datetime, Timedelta

class Utilities:
    def __init__(self) -> None:
        self.model_path = './models'
        self.posttrained_path = './indonesia_stocks/modeling_datas'
        self.scaler_path = './indonesia_stocks/min_max'

    # def truncate_2_decimal(self, val: float):
    #     return float(Decimal(str(val)).quantize(Decimal('0.01'), rounding=ROUND_DOWN))
    def truncate_2_decimal(self, val: float):
        try:
            return float(Decimal(str(float(val))).quantize(Decimal('0.001'), rounding=ROUND_DOWN))
        except Exception as e:
            print("Decimal error:", e)
            return float(val)


    def denormalization(self, data, min_value, max_value):
        return (data * (max_value - min_value)) + min_value

    async def cryptocurrency_prediction_utils(self,
        days: int, sequence_length: int, model_name: str) -> tuple:

        model_path = os.path.join(self.model_path, f'{model_name}.onnx')
        # session = ort.InferenceSession(model_path)
        try:
            session = ort.InferenceSession(model_path)
        except Exception as e:
            print("ONNX model load error:", e)
            return [], []
        input_name = session.get_inputs()[0].name

        dataframe_path = os.path.join(self.posttrained_path, f'{model_name}.csv')
        dataframe = read_csv(dataframe_path, index_col='Date', parse_dates=True)

        scaler_path = os.path.join(self.scaler_path, f'{model_name}.json')
        with open(scaler_path, 'r') as f:
            scalers = json.load(f)

        min_close = scalers['min_value']['Close']
        max_close = scalers['max_value']['Close']

        lst_seq = dataframe[-sequence_length:].values
        lst_seq = expand_dims(lst_seq, axis=0)

        predicted_prices = {}
        last_date = to_datetime(dataframe.index[-1])

        # for _ in range(days):
        #     predicted = session.run(None, {input_name: lst_seq.astype(np.float32)})[0]

        #     denorm_price = self.denormalization(predicted[0][0], min_close, max_close)

        #     last_date += Timedelta(days=1)
        #     predicted_prices[last_date] = denorm_price.flatten()[0]

        #     lst_seq = np.roll(lst_seq, shift=-1, axis=1)
        #     lst_seq[:, -1, -1] = predicted[0][0][0]


        
        # for _ in range(days):
        #     predicted = session.run(None, {input_name: lst_seq.astype(np.float32)})[0]
        
        #     value = np.array(predicted).flatten()[0]
        #     denorm_price = (value * (max_close - min_close)) + min_close
        
        #     # last_date += pd.Timedelta(days=1)
        #     last_date = pd.to_datetime(last_date) + pd.Timedelta(days=1)
        #     # predicted_prices[last_date.strftime('%Y-%m-%d')] = float(denorm_price)
        #     predicted_prices[last_date] = self.truncate_2_decimal(denorm_price)
        
        #     lst_seq = np.roll(lst_seq, shift=-1, axis=1)
        #     lst_seq[:, -1, -1] = value
        
        for _ in range(days):
            predicted = session.run(None, {input_name: lst_seq.astype(np.float32)})[0]
            value = np.array(predicted).flatten()[0]
            if np.isnan(value):
                continue
            denorm_price = self.denormalization(value, min_close, max_close)
            if np.isnan(denorm_price):
                continue
            last_date = pd.to_datetime(last_date) + pd.Timedelta(days=1)
            predicted_prices[last_date] = self.truncate_2_decimal(denorm_price)
            lst_seq = np.roll(lst_seq, shift=-1, axis=1)
            lst_seq[:, -1, -1] = value

            

        # predictions = [
        #     {'date': date.strftime('%Y-%m-%d'), 'price': float(price)}
        #     for date, price in predicted_prices.items()
        # ]
        predictions = [
            {'date': date.strftime('%Y-%m-%d'), 'price': price}
            for date, price in predicted_prices.items()
        ]

        df_date = dataframe.index[-sequence_length:]
        close_values = dataframe.iloc[-sequence_length:]['Close'].values
        close_denorm = self.denormalization(close_values, min_close, max_close)

        actuals = [
            {'date': to_datetime(date).strftime('%Y-%m-%d'), 'price': self.truncate_2_decimal(price)}
            for date, price in zip(df_date, close_denorm)
        ]

        os.system(f'ls -al {self.model_path}')
        os.system(f'ls -al {self.posttrained_path}')
        os.system(f'ls -al {self.scaler_path}')

        return actuals, predictions