Adding Aggrid- 1st trial
Browse files- app.py +2 -2
- apps/database_page.py +27 -2
- apps/dump_analysis.py +1 -0
- queries/process_all_db.py +12 -2
- queries/process_gsm.py +6 -2
- queries/process_lte.py +6 -2
- queries/process_mrbts.py +1 -0
- queries/process_wcdma.py +4 -1
- requirements.txt +0 -0
- test.py +48 -0
- utils/utils_vars.py +4 -0
app.py
CHANGED
@@ -3,10 +3,10 @@ import streamlit as st
|
|
3 |
st.set_page_config(
|
4 |
page_title="NPO DB Query",
|
5 |
page_icon="💻",
|
6 |
-
layout="
|
7 |
initial_sidebar_state="expanded",
|
8 |
menu_items={
|
9 |
-
"About": "**📡 NPO DB Query v0.2.
|
10 |
},
|
11 |
)
|
12 |
|
|
|
3 |
st.set_page_config(
|
4 |
page_title="NPO DB Query",
|
5 |
page_icon="💻",
|
6 |
+
layout="wide",
|
7 |
initial_sidebar_state="expanded",
|
8 |
menu_items={
|
9 |
+
"About": "**📡 NPO DB Query v0.2.3**",
|
10 |
},
|
11 |
)
|
12 |
|
apps/database_page.py
CHANGED
@@ -2,6 +2,7 @@ import time
|
|
2 |
from datetime import datetime
|
3 |
|
4 |
import streamlit as st
|
|
|
5 |
|
6 |
from apps.dump_analysis import dump_analysis_space
|
7 |
from queries.process_all_db import process_all_tech_db, process_all_tech_db_with_stats
|
@@ -99,7 +100,9 @@ col1, col2, col3, col4 = st.columns(4)
|
|
99 |
col5, col6, col7, col8 = st.columns(4)
|
100 |
if uploaded_file is not None:
|
101 |
# UtilsVars.file_path = uploaded_file
|
|
|
102 |
try:
|
|
|
103 |
execute_checks_sheets_exist(uploaded_file)
|
104 |
if (
|
105 |
Technology.gsm == False
|
@@ -111,7 +114,7 @@ if uploaded_file is not None:
|
|
111 |
):
|
112 |
st.error(
|
113 |
"""
|
114 |
-
|
115 |
"gsm": ["BTS", "BCF", "TRX","MAL"],
|
116 |
"wcdma": ["WCEL", "WBTS", "WNCEL"],
|
117 |
"lte": ["LNBTS", "LNCEL", "LNCEL_FDD", "LNCEL_TDD"],
|
@@ -187,11 +190,29 @@ if uploaded_file is not None:
|
|
187 |
process_neighbors_data_to_excel, "NEI"
|
188 |
),
|
189 |
)
|
|
|
190 |
except Exception as e:
|
191 |
st.error(f"Error: {e}")
|
192 |
|
193 |
|
194 |
######################## ANALYTICS AND STATS ####################################
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
195 |
|
196 |
if uploaded_file is not None:
|
197 |
if DumpType.full_dump == True:
|
@@ -216,4 +237,8 @@ if uploaded_file is not None:
|
|
216 |
uploaded_file,
|
217 |
# regions
|
218 |
)
|
219 |
-
|
|
|
|
|
|
|
|
|
|
2 |
from datetime import datetime
|
3 |
|
4 |
import streamlit as st
|
5 |
+
from st_aggrid import AgGrid, ColumnsAutoSizeMode
|
6 |
|
7 |
from apps.dump_analysis import dump_analysis_space
|
8 |
from queries.process_all_db import process_all_tech_db, process_all_tech_db_with_stats
|
|
|
100 |
col5, col6, col7, col8 = st.columns(4)
|
101 |
if uploaded_file is not None:
|
102 |
# UtilsVars.file_path = uploaded_file
|
103 |
+
|
104 |
try:
|
105 |
+
|
106 |
execute_checks_sheets_exist(uploaded_file)
|
107 |
if (
|
108 |
Technology.gsm == False
|
|
|
114 |
):
|
115 |
st.error(
|
116 |
"""
|
117 |
+
Uploaded file does not contain required sheets for any technology.
|
118 |
"gsm": ["BTS", "BCF", "TRX","MAL"],
|
119 |
"wcdma": ["WCEL", "WBTS", "WNCEL"],
|
120 |
"lte": ["LNBTS", "LNCEL", "LNCEL_FDD", "LNCEL_TDD"],
|
|
|
190 |
process_neighbors_data_to_excel, "NEI"
|
191 |
),
|
192 |
)
|
193 |
+
|
194 |
except Exception as e:
|
195 |
st.error(f"Error: {e}")
|
196 |
|
197 |
|
198 |
######################## ANALYTICS AND STATS ####################################
|
199 |
+
@st.fragment
|
200 |
+
def table_data():
|
201 |
+
if UtilsVars.all_db_dfs_names != []:
|
202 |
+
selected_table = st.selectbox("Choose Data", UtilsVars.all_db_dfs_names)
|
203 |
+
table_df = UtilsVars.all_db_dfs[
|
204 |
+
UtilsVars.all_db_dfs_names.index(selected_table)
|
205 |
+
]
|
206 |
+
st.write(f"### {selected_table} Data")
|
207 |
+
AgGrid(
|
208 |
+
table_df,
|
209 |
+
fit_columns_on_grid_load=True,
|
210 |
+
theme="streamlit",
|
211 |
+
enable_enterprise_modules=True,
|
212 |
+
filter=True,
|
213 |
+
# columns_auto_size_mode=ColumnsAutoSizeMode.FIT_CONTENTS,
|
214 |
+
)
|
215 |
+
|
216 |
|
217 |
if uploaded_file is not None:
|
218 |
if DumpType.full_dump == True:
|
|
|
237 |
uploaded_file,
|
238 |
# regions
|
239 |
)
|
240 |
+
tab1, tab2 = st.tabs(["🗃 Data", "📈 Chart"])
|
241 |
+
with tab1:
|
242 |
+
table_data()
|
243 |
+
with tab2:
|
244 |
+
dump_analysis_space()
|
apps/dump_analysis.py
CHANGED
@@ -20,6 +20,7 @@ def dump_analysis_space():
|
|
20 |
st.title("ANALYTICS DATA")
|
21 |
|
22 |
####################### GSM ANALYTICS DATA #######################################
|
|
|
23 |
st.subheader(":blue[GSM ANALYTICS DATA]")
|
24 |
|
25 |
(
|
|
|
20 |
st.title("ANALYTICS DATA")
|
21 |
|
22 |
####################### GSM ANALYTICS DATA #######################################
|
23 |
+
|
24 |
st.subheader(":blue[GSM ANALYTICS DATA]")
|
25 |
|
26 |
(
|
queries/process_all_db.py
CHANGED
@@ -8,6 +8,10 @@ from utils.utils_vars import UtilsVars
|
|
8 |
|
9 |
def all_dbs(filepath: str):
|
10 |
UtilsVars.all_db_dfs.clear()
|
|
|
|
|
|
|
|
|
11 |
combined_gsm_database(filepath)
|
12 |
process_wcdma_data(filepath)
|
13 |
process_lte_data(filepath),
|
@@ -23,10 +27,16 @@ def process_all_tech_db(filepath: str):
|
|
23 |
)
|
24 |
|
25 |
|
26 |
-
def process_all_tech_db_with_stats(
|
|
|
|
|
|
|
27 |
all_dbs(filepath)
|
28 |
gsm_analaysis(filepath)
|
29 |
-
wcdma_analaysis(
|
|
|
|
|
|
|
30 |
lte_fdd_analaysis(filepath)
|
31 |
lte_tdd_analaysis(filepath)
|
32 |
UtilsVars.final_all_database = convert_dfs(
|
|
|
8 |
|
9 |
def all_dbs(filepath: str):
|
10 |
UtilsVars.all_db_dfs.clear()
|
11 |
+
UtilsVars.all_db_dfs_names.clear()
|
12 |
+
UtilsVars.gsm_dfs.clear()
|
13 |
+
UtilsVars.wcdma_dfs.clear()
|
14 |
+
UtilsVars.lte_dfs.clear()
|
15 |
combined_gsm_database(filepath)
|
16 |
process_wcdma_data(filepath)
|
17 |
process_lte_data(filepath),
|
|
|
27 |
)
|
28 |
|
29 |
|
30 |
+
def process_all_tech_db_with_stats(
|
31 |
+
filepath: str,
|
32 |
+
# region_list: list
|
33 |
+
):
|
34 |
all_dbs(filepath)
|
35 |
gsm_analaysis(filepath)
|
36 |
+
wcdma_analaysis(
|
37 |
+
filepath,
|
38 |
+
# region_list
|
39 |
+
)
|
40 |
lte_fdd_analaysis(filepath)
|
41 |
lte_tdd_analaysis(filepath)
|
42 |
UtilsVars.final_all_database = convert_dfs(
|
queries/process_gsm.py
CHANGED
@@ -169,6 +169,8 @@ def combined_gsm_database(file_path: str):
|
|
169 |
trx_df = process_trx_with_bts_name(file_path)
|
170 |
|
171 |
UtilsVars.all_db_dfs.extend([gsm_df, mal_df, trx_df])
|
|
|
|
|
172 |
return [gsm_df, mal_df, trx_df]
|
173 |
|
174 |
|
@@ -187,9 +189,11 @@ def process_gsm_data_to_excel(file_path: str):
|
|
187 |
|
188 |
|
189 |
def gsm_analaysis(file_path: str):
|
190 |
-
gsm_df = process_gsm_data(file_path)
|
191 |
-
trx_df = process_trx_with_bts_name(file_path)
|
192 |
|
|
|
|
|
193 |
# df to count number of site per bsc
|
194 |
df_site_per_bsc = gsm_df[["BSC", "code"]]
|
195 |
df_site_per_bsc = df_site_per_bsc.drop_duplicates(subset=["code"], keep="first")
|
|
|
169 |
trx_df = process_trx_with_bts_name(file_path)
|
170 |
|
171 |
UtilsVars.all_db_dfs.extend([gsm_df, mal_df, trx_df])
|
172 |
+
UtilsVars.gsm_dfs.extend([gsm_df, mal_df, trx_df])
|
173 |
+
UtilsVars.all_db_dfs_names.extend(["GSM", "MAL", "TRX"])
|
174 |
return [gsm_df, mal_df, trx_df]
|
175 |
|
176 |
|
|
|
189 |
|
190 |
|
191 |
def gsm_analaysis(file_path: str):
|
192 |
+
# gsm_df = process_gsm_data(file_path)
|
193 |
+
# trx_df = process_trx_with_bts_name(file_path)
|
194 |
|
195 |
+
gsm_df: pd.DataFrame = UtilsVars.gsm_dfs[0]
|
196 |
+
trx_df: pd.DataFrame = UtilsVars.gsm_dfs[2]
|
197 |
# df to count number of site per bsc
|
198 |
df_site_per_bsc = gsm_df[["BSC", "code"]]
|
199 |
df_site_per_bsc = df_site_per_bsc.drop_duplicates(subset=["code"], keep="first")
|
queries/process_lte.py
CHANGED
@@ -203,6 +203,8 @@ def process_lte_data(file_path: str):
|
|
203 |
# save_dataframe(df_fdd_final, "fdd")
|
204 |
# save_dataframe(df_tdd_final, "tdd")
|
205 |
UtilsVars.all_db_dfs.extend([df_fdd_final, df_tdd_final])
|
|
|
|
|
206 |
|
207 |
return [df_fdd_final, df_tdd_final]
|
208 |
# add the fdd and tdd to the list
|
@@ -219,7 +221,8 @@ def process_lte_data_to_excel(file_path: str):
|
|
219 |
|
220 |
|
221 |
def lte_fdd_analaysis(file_path: str):
|
222 |
-
df_fdd = process_lte_data(file_path)[0]
|
|
|
223 |
|
224 |
LteFddAnalysisData.total_number_of_lncel = len(df_fdd["ID_LNCEL"].unique())
|
225 |
LteFddAnalysisData.total_number_of_site = len(df_fdd["code"].unique())
|
@@ -238,7 +241,8 @@ def lte_fdd_analaysis(file_path: str):
|
|
238 |
|
239 |
|
240 |
def lte_tdd_analaysis(file_path: str):
|
241 |
-
df_tdd = process_lte_data(file_path)[1]
|
|
|
242 |
|
243 |
LteTddAnalysisData.total_number_of_lncel = len(df_tdd["ID_LNCEL"].unique())
|
244 |
LteTddAnalysisData.total_number_of_site = len(df_tdd["code"].unique())
|
|
|
203 |
# save_dataframe(df_fdd_final, "fdd")
|
204 |
# save_dataframe(df_tdd_final, "tdd")
|
205 |
UtilsVars.all_db_dfs.extend([df_fdd_final, df_tdd_final])
|
206 |
+
UtilsVars.lte_dfs.extend([df_fdd_final, df_tdd_final])
|
207 |
+
UtilsVars.all_db_dfs_names.extend(["LTE_FDD", "LTE_TDD"])
|
208 |
|
209 |
return [df_fdd_final, df_tdd_final]
|
210 |
# add the fdd and tdd to the list
|
|
|
221 |
|
222 |
|
223 |
def lte_fdd_analaysis(file_path: str):
|
224 |
+
# df_fdd = process_lte_data(file_path)[0]
|
225 |
+
df_fdd: pd.DataFrame = UtilsVars.lte_dfs[0]
|
226 |
|
227 |
LteFddAnalysisData.total_number_of_lncel = len(df_fdd["ID_LNCEL"].unique())
|
228 |
LteFddAnalysisData.total_number_of_site = len(df_fdd["code"].unique())
|
|
|
241 |
|
242 |
|
243 |
def lte_tdd_analaysis(file_path: str):
|
244 |
+
# df_tdd = process_lte_data(file_path)[1]
|
245 |
+
df_tdd: pd.DataFrame = UtilsVars.lte_dfs[1]
|
246 |
|
247 |
LteTddAnalysisData.total_number_of_lncel = len(df_tdd["ID_LNCEL"].unique())
|
248 |
LteTddAnalysisData.total_number_of_site = len(df_tdd["code"].unique())
|
queries/process_mrbts.py
CHANGED
@@ -27,6 +27,7 @@ def process_mrbts_data(file_path: str) -> pd.DataFrame:
|
|
27 |
df_mrbts = df_mrbts[["MRBTS", "CODE", "name", "btsName"]]
|
28 |
|
29 |
UtilsVars.all_db_dfs.append(df_mrbts)
|
|
|
30 |
return df_mrbts
|
31 |
|
32 |
|
|
|
27 |
df_mrbts = df_mrbts[["MRBTS", "CODE", "name", "btsName"]]
|
28 |
|
29 |
UtilsVars.all_db_dfs.append(df_mrbts)
|
30 |
+
UtilsVars.all_db_dfs_names.append("MRBTS")
|
31 |
return df_mrbts
|
32 |
|
33 |
|
queries/process_wcdma.py
CHANGED
@@ -162,6 +162,8 @@ def process_wcdma_data(file_path: str):
|
|
162 |
# save_dataframe(df_wncel, "wncel")
|
163 |
# df_3g = save_dataframe(df_3g, "3G")
|
164 |
UtilsVars.all_db_dfs.append(df_3g)
|
|
|
|
|
165 |
|
166 |
# UtilsVars.final_wcdma_database = convert_dfs([df_3g], ["WCDMA"])
|
167 |
return df_3g
|
@@ -194,7 +196,8 @@ def wcdma_analaysis(
|
|
194 |
Args:
|
195 |
filepath (str): The path to the file.
|
196 |
"""
|
197 |
-
wcdma_df = process_wcdma_data(filepath)
|
|
|
198 |
|
199 |
# filter per list of regions
|
200 |
# wcdma_df = wcdma_df.loc[wcdma_df["Region"].isin(region_list)]
|
|
|
162 |
# save_dataframe(df_wncel, "wncel")
|
163 |
# df_3g = save_dataframe(df_3g, "3G")
|
164 |
UtilsVars.all_db_dfs.append(df_3g)
|
165 |
+
UtilsVars.wcdma_dfs.append(df_3g)
|
166 |
+
UtilsVars.all_db_dfs_names.append("WCDMA")
|
167 |
|
168 |
# UtilsVars.final_wcdma_database = convert_dfs([df_3g], ["WCDMA"])
|
169 |
return df_3g
|
|
|
196 |
Args:
|
197 |
filepath (str): The path to the file.
|
198 |
"""
|
199 |
+
# wcdma_df = process_wcdma_data(filepath)
|
200 |
+
wcdma_df: pd.DataFrame = UtilsVars.wcdma_dfs[0]
|
201 |
|
202 |
# filter per list of regions
|
203 |
# wcdma_df = wcdma_df.loc[wcdma_df["Region"].isin(region_list)]
|
requirements.txt
CHANGED
Binary files a/requirements.txt and b/requirements.txt differ
|
|
test.py
ADDED
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
|
3 |
+
import pandas as pd
|
4 |
+
import pytest
|
5 |
+
|
6 |
+
from queries.process_all_db import (
|
7 |
+
all_dbs,
|
8 |
+
process_all_tech_db,
|
9 |
+
process_all_tech_db_with_stats,
|
10 |
+
)
|
11 |
+
from utils.utils_vars import UtilsVars
|
12 |
+
|
13 |
+
|
14 |
+
class TestProcessAllDB:
|
15 |
+
def setup_method(self):
|
16 |
+
UtilsVars.all_db_dfs = []
|
17 |
+
UtilsVars.final_all_database = None
|
18 |
+
|
19 |
+
def test_all_dbs(self):
|
20 |
+
filepath = r"C:\Users\David\Documents\PROJECTS\2023\PROJET 2023\DUMP\DUMP\NOVEMBRE\20241127_21145_27112024_Dump.xml.gz.xlsb"
|
21 |
+
all_dbs(filepath)
|
22 |
+
assert len(UtilsVars.all_db_dfs) == 7
|
23 |
+
assert isinstance(UtilsVars.all_db_dfs[0], pd.DataFrame)
|
24 |
+
|
25 |
+
def test_process_all_tech_db(self):
|
26 |
+
filepath = r"C:\Users\David\Documents\PROJECTS\2023\PROJET 2023\DUMP\DUMP\NOVEMBRE\20241127_21145_27112024_Dump.xml.gz.xlsb"
|
27 |
+
process_all_tech_db(filepath)
|
28 |
+
assert UtilsVars.final_all_database is not None
|
29 |
+
|
30 |
+
def test_process_all_tech_db_with_stats(self):
|
31 |
+
filepath = r"C:\Users\David\Documents\PROJECTS\2023\PROJET 2023\DUMP\DUMP\NOVEMBRE\20241127_21145_27112024_Dump.xml.gz.xlsb"
|
32 |
+
process_all_tech_db_with_stats(filepath)
|
33 |
+
assert UtilsVars.final_all_database is not None
|
34 |
+
|
35 |
+
# def test_all_dbs_empty_file(self):
|
36 |
+
# filepath = r"C:\Users\HP\Desktop\LTE\PROJET 2023\DUMP\2024\SEPTEMBRE\empty.xlsb"
|
37 |
+
# all_dbs(filepath)
|
38 |
+
# assert len(UtilsVars.all_db_dfs) == 0
|
39 |
+
|
40 |
+
# def test_process_all_tech_db_empty_file(self):
|
41 |
+
# filepath = r"C:\Users\HP\Desktop\LTE\PROJET 2023\DUMP\2024\SEPTEMBRE\empty.xlsb"
|
42 |
+
# process_all_tech_db(filepath)
|
43 |
+
# assert UtilsVars.final_all_database is None
|
44 |
+
|
45 |
+
# def test_process_all_tech_db_with_stats_empty_file(self):
|
46 |
+
# filepath = r"C:\Users\HP\Desktop\LTE\PROJET 2023\DUMP\2024\SEPTEMBRE\empty.xlsb"
|
47 |
+
# process_all_tech_db_with_stats(filepath)
|
48 |
+
# assert UtilsVars.final_all_database is None
|
utils/utils_vars.py
CHANGED
@@ -53,7 +53,11 @@ class UtilsVars:
|
|
53 |
final_trx_database = ""
|
54 |
final_mrbts_database = ""
|
55 |
final_mal_database = ""
|
|
|
|
|
|
|
56 |
all_db_dfs = []
|
|
|
57 |
final_all_database = None
|
58 |
neighbors_database = ""
|
59 |
file_path = ""
|
|
|
53 |
final_trx_database = ""
|
54 |
final_mrbts_database = ""
|
55 |
final_mal_database = ""
|
56 |
+
gsm_dfs = []
|
57 |
+
wcdma_dfs = []
|
58 |
+
lte_dfs = []
|
59 |
all_db_dfs = []
|
60 |
+
all_db_dfs_names = []
|
61 |
final_all_database = None
|
62 |
neighbors_database = ""
|
63 |
file_path = ""
|