File size: 8,634 Bytes
123d5ff
06c863c
123d5ff
 
ccb28f5
123d5ff
fbf7879
 
123d5ff
 
ad021df
 
bb3e08f
 
ad021df
 
123d5ff
fbf7879
 
123d5ff
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
cae6a38
123d5ff
 
 
06c863c
123d5ff
 
06c863c
123d5ff
 
06c863c
123d5ff
 
cabf258
123d5ff
 
cabf258
ad021df
 
 
bb3e08f
 
 
ad021df
 
 
123d5ff
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
cae6a38
 
 
fbf7879
 
cae6a38
 
 
 
fbf7879
 
 
 
 
 
 
123d5ff
 
 
 
ccb28f5
123d5ff
ccb28f5
123d5ff
 
 
 
 
56d8047
 
bb3e08f
123d5ff
 
 
ccb28f5
bb3e08f
123d5ff
 
 
56d8047
bb3e08f
ae5e7a8
123d5ff
 
 
ae5e7a8
 
 
 
 
 
 
 
fbf7879
123d5ff
ae5e7a8
 
 
 
 
 
123d5ff
 
 
 
 
ae5e7a8
123d5ff
 
 
 
 
 
 
ae5e7a8
123d5ff
 
 
 
ad021df
 
 
 
 
 
 
 
bb3e08f
ad021df
bb3e08f
 
 
 
 
 
123d5ff
ad021df
 
 
 
 
 
ae5e7a8
ad021df
ae5e7a8
 
 
 
 
 
ccb28f5
123d5ff
 
fbf7879
 
 
ccb28f5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fbf7879
 
 
cae6a38
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fbf7879
cae6a38
 
 
 
 
ccb28f5
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
import time
from datetime import datetime

import streamlit as st
from st_aggrid import AgGrid, ColumnsAutoSizeMode

from apps.dump_analysis import dump_analysis_space
from queries.process_all_db import process_all_tech_db, process_all_tech_db_with_stats
from queries.process_gsm import process_gsm_data_to_excel
from queries.process_lte import process_lte_data_to_excel

# from queries.process_mal import process_mal_data_to_excel
from queries.process_mrbts import process_mrbts_data_to_excel
from queries.process_neighbors import process_neighbors_data_to_excel

# from queries.process_trx import process_trx_with_bts_name_data_to_excel
from queries.process_wcdma import process_wcdma_data_to_excel
from utils.check_sheet_exist import DumpType, Technology, execute_checks_sheets_exist
from utils.utils_vars import GsmAnalysisData, UtilsVars, WcdmaAnalysisData

st.title("Database processing")

uploaded_file = st.file_uploader("Upload updated dump file", type="xlsb")


def process_database(process_func, database_type):
    if uploaded_file is not None:
        start_time = time.time()
        process_func(uploaded_file)
        execution_time = time.time() - start_time
        st.write(
            f"{database_type} database is generated. Execution time: {execution_time:.2f} seconds"
        )
        download_button(database_type)


@st.fragment()
def download_button(database_type):
    if database_type == "2G":
        data = UtilsVars.final_gsm_database
        file_name = f"2G database_{datetime.now()}.xlsx"
    elif database_type == "3G":
        data = UtilsVars.final_wcdma_database
        file_name = f"3G database_{datetime.now()}.xlsx"
    elif database_type == "LTE":
        data = UtilsVars.final_lte_database
        file_name = f"LTE database_{datetime.now()}.xlsx"
    elif database_type == "All":
        data = UtilsVars.final_all_database
        file_name = f"All databases_{datetime.now()}.xlsx"
    elif database_type == "NEI":
        data = UtilsVars.neighbors_database
        file_name = f"Neighbors databases_{datetime.now()}.xlsx"
    # elif database_type == "TRX":
    #     data = UtilsVars.final_trx_database
    #     file_name = f"TRX database_{datetime.now()}.xlsx"
    elif database_type == "MRBTS":
        data = UtilsVars.final_mrbts_database
        file_name = f"MRBTS database_{datetime.now()}.xlsx"
    # elif database_type == "MAL":
    #     data = UtilsVars.final_mal_database
    #     file_name = f"MAL database_{datetime.now()}.xlsx"
    st.download_button(
        type="primary",
        label=f"Download {database_type} Database File",
        data=data,
        file_name=file_name,
        mime="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
    )


def execute_process_all_tech_db(uploaded_file):
    if uploaded_file is not None:
        start_time = time.time()
        process_all_tech_db(uploaded_file)
        execution_time = time.time() - start_time
        st.write(
            f"All databases are generated. Execution time: {execution_time:.2f} seconds"
        )
        download_button("All")


# def execute_process_all_tech_db_with_stats(uploaded_file: str, region_list: list):
def execute_process_all_tech_db_with_stats(uploaded_file: str):

    if uploaded_file is not None:
        start_time = time.time()
        process_all_tech_db_with_stats(
            uploaded_file,
            #    region_list
        )
        execution_time = time.time() - start_time
        st.write(
            f"All databases are generated. Execution time: {execution_time:.2f} seconds"
        )
        download_button("All")


col1, col2, col3, col4 = st.columns(4)
col5, col6, col7, col8 = st.columns(4)
if uploaded_file is not None:
    # UtilsVars.file_path = uploaded_file

    try:

        execute_checks_sheets_exist(uploaded_file)
        if (
            Technology.gsm == False
            and Technology.wcdma == False
            and Technology.lte == False
            and Technology.neighbors == False
            and Technology.trx == False
            and Technology.mrbts == False
        ):
            st.error(
                """
                    Uploaded file does not contain required sheets for any technology.
                    "gsm": ["BTS", "BCF", "TRX","MAL"],
                    "wcdma": ["WCEL", "WBTS", "WNCEL"],
                    "lte": ["LNBTS", "LNCEL", "LNCEL_FDD", "LNCEL_TDD"],
                    "neighbors": ["ADCE", "ADJS", "ADJI", "ADJG", "ADJW", "BTS", "WCEL"],
                    "trx": ["TRX", "BTS"],
                    "mrbts": ["MRBTS"],
                    "mal": ["MAL"],
                """
            )

        if (
            Technology.gsm == True
            and Technology.wcdma == True
            and Technology.lte == True
            and Technology.trx == True
            and Technology.mrbts == True
            and Technology.mal == True
        ):
            DumpType.full_dump = True
            with col1:
                st.button(
                    "Generate All DBs",
                    on_click=lambda: execute_process_all_tech_db(uploaded_file),
                )
        if Technology.gsm == True:
            with col2:
                st.button(
                    "Generate 2G DB",
                    on_click=lambda: process_database(process_gsm_data_to_excel, "2G"),
                )
        if Technology.wcdma == True:
            with col3:
                st.button(
                    "Generate 3G DB",
                    on_click=lambda: process_database(
                        process_wcdma_data_to_excel, "3G"
                    ),
                )
        if Technology.lte == True:
            with col4:
                st.button(
                    "Generate LTE DB",
                    on_click=lambda: process_database(process_lte_data_to_excel, "LTE"),
                )
        # if Technology.trx == True:
        #     with col5:
        #         st.button(
        #             "Generate TRX DB",
        #             on_click=lambda: process_database(
        #                 process_trx_with_bts_name_data_to_excel, "TRX"
        #             ),
        #         )
        if Technology.mrbts == True:
            with col5:
                st.button(
                    "Generate MRBTS",
                    on_click=lambda: process_database(
                        process_mrbts_data_to_excel, "MRBTS"
                    ),
                )

        # if Technology.mal == True:
        #     with col7:
        #         st.button(
        #             "Generate MAL",
        #             on_click=lambda: process_database(process_mal_data_to_excel, "MAL"),
        #         )
        if Technology.neighbors == True:
            with col6:
                st.button(
                    "Generate NEI DB",
                    on_click=lambda: process_database(
                        process_neighbors_data_to_excel, "NEI"
                    ),
                )

    except Exception as e:
        st.error(f"Error: {e}")


######################## ANALYTICS AND STATS ####################################
@st.fragment
def table_data():
    if UtilsVars.all_db_dfs_names != []:
        selected_table = st.selectbox("Choose Data", UtilsVars.all_db_dfs_names)
        table_df = UtilsVars.all_db_dfs[
            UtilsVars.all_db_dfs_names.index(selected_table)
        ]
        st.write(f"### {selected_table} Data")
        AgGrid(
            table_df,
            fit_columns_on_grid_load=True,
            theme="streamlit",
            enable_enterprise_modules=True,
            filter=True,
            # columns_auto_size_mode=ColumnsAutoSizeMode.FIT_CONTENTS,
        )


if uploaded_file is not None:
    if DumpType.full_dump == True:
        # regions = st.multiselect(
        #     "Select the region(s) you want to analyze",
        #     ["Test BTS", "SKS", "SEG", "TBC", "KDL", "KKO", "GAO", "MPT", "KYS"],
        #     default=[
        #         "Test BTS",
        #         "SKS",
        #         "SEG",
        #         "TBC",
        #         "KDL",
        #         "KKO",
        #         "GAO",
        #         "MPT",
        #         "KYS",
        #     ],
        # )
        if st.button("Generate All DBs and Show Stats"):
            # if regions:
            execute_process_all_tech_db_with_stats(
                uploaded_file,
                #    regions
            )
            tab1, tab2 = st.tabs(["πŸ—ƒ Data", "πŸ“ˆ Chart"])
            with tab1:
                table_data()
            with tab2:
                dump_analysis_space()