Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -8,11 +8,20 @@ import os, re
|
|
8 |
from collections import defaultdict
|
9 |
from datetime import datetime
|
10 |
|
|
|
11 |
HF_DATASET_REPO = "leadingbridge/ammu"
|
12 |
TEMPLATE_FILENAME = "AMMU-order-form-template.xlsx"
|
|
|
13 |
LOCAL_TEMPLATE_FALLBACK = os.path.join(os.path.dirname(__file__), TEMPLATE_FILENAME)
|
14 |
|
|
|
|
|
|
|
15 |
def _normalize_power(val):
|
|
|
|
|
|
|
|
|
16 |
if val is None:
|
17 |
return None
|
18 |
s = str(val).strip()
|
@@ -30,6 +39,10 @@ def _normalize_power(val):
|
|
30 |
return f"{num:.2f}"
|
31 |
|
32 |
def _power_to_triplet_digits(power_str: str) -> str:
|
|
|
|
|
|
|
|
|
33 |
if power_str is None:
|
34 |
return None
|
35 |
s = power_str.strip().lstrip("+").replace("-", "")
|
@@ -42,6 +55,10 @@ def _power_to_triplet_digits(power_str: str) -> str:
|
|
42 |
return digits.zfill(3)
|
43 |
|
44 |
def _find_header_row(ws: Worksheet, required_headers):
|
|
|
|
|
|
|
|
|
45 |
req = {h.lower() for h in required_headers}
|
46 |
for r in range(1, 11):
|
47 |
header_map = {}
|
@@ -58,6 +75,9 @@ def _find_header_row(ws: Worksheet, required_headers):
|
|
58 |
raise ValueError(f"Could not locate a header row containing: {required_headers}")
|
59 |
|
60 |
def _download_template():
|
|
|
|
|
|
|
61 |
if os.path.exists(LOCAL_TEMPLATE_FALLBACK):
|
62 |
return LOCAL_TEMPLATE_FALLBACK
|
63 |
return hf_hub_download(
|
@@ -65,7 +85,9 @@ def _download_template():
|
|
65 |
)
|
66 |
|
67 |
def _auto_fit_columns(ws: Worksheet, max_col: int, max_row: int):
|
68 |
-
|
|
|
|
|
69 |
for c in range(1, max_col + 1):
|
70 |
max_len = 0
|
71 |
col_letter = get_column_letter(c)
|
@@ -75,16 +97,19 @@ def _auto_fit_columns(ws: Worksheet, max_col: int, max_row: int):
|
|
75 |
max_len = max(max_len, len(str(val)))
|
76 |
ws.column_dimensions[col_letter].width = max(10, max_len + 2)
|
77 |
|
|
|
|
|
|
|
78 |
def process(input_file):
|
79 |
try:
|
80 |
if input_file is None:
|
81 |
return None, "Please upload an Excel file first."
|
82 |
|
83 |
-
# --- INPUT: detect headers
|
84 |
wb_in = load_workbook(input_file.name, data_only=True)
|
85 |
ws_in = wb_in.active
|
86 |
|
87 |
-
#
|
88 |
in_max_row = ws_in.max_row
|
89 |
in_max_col = ws_in.max_column
|
90 |
|
@@ -97,6 +122,7 @@ def process(input_file):
|
|
97 |
|
98 |
header_values = [ws_in.cell(row=header_row_idx, column=c).value for c in range(1, in_max_col + 1)]
|
99 |
|
|
|
100 |
entries = []
|
101 |
rows_scanned = 0
|
102 |
for r in range(header_row_idx + 1, ws_in.max_row + 1):
|
@@ -110,6 +136,7 @@ def process(input_file):
|
|
110 |
rows_scanned += 1
|
111 |
|
112 |
power = _normalize_power(pov)
|
|
|
113 |
try:
|
114 |
q = int(qty) if qty is not None and str(qty).strip() != "" else 0
|
115 |
except Exception:
|
@@ -125,11 +152,15 @@ def process(input_file):
|
|
125 |
"row_values": row_values
|
126 |
})
|
127 |
|
128 |
-
# --- OUTPUT template
|
129 |
template_path = _download_template()
|
130 |
wb_out = load_workbook(template_path)
|
131 |
ws_out = wb_out.active
|
132 |
|
|
|
|
|
|
|
|
|
133 |
mysku_header_row = None
|
134 |
mysku_col_idx = None
|
135 |
power_label_row = None
|
@@ -139,12 +170,14 @@ def process(input_file):
|
|
139 |
|
140 |
for r in range(1, 11):
|
141 |
row_vals = [ws_out.cell(row=r, column=c).value for c in range(1, ws_out.max_column + 1)]
|
142 |
-
|
|
|
143 |
for c, v in enumerate(row_vals, start=1):
|
144 |
if isinstance(v, str) and v.strip().lower() == "my sku":
|
145 |
mysku_header_row = r
|
146 |
mysku_col_idx = c
|
147 |
-
|
|
|
148 |
labels = {}
|
149 |
for c, v in enumerate(row_vals, start=1):
|
150 |
if isinstance(v, str):
|
@@ -154,7 +187,8 @@ def process(input_file):
|
|
154 |
if len(labels) >= 5 and power_label_row is None:
|
155 |
power_label_row = r
|
156 |
power_col_map = labels
|
157 |
-
|
|
|
158 |
trip = {}
|
159 |
for c, v in enumerate(row_vals, start=1):
|
160 |
if isinstance(v, str) and re.fullmatch(r"\d{2,3}", v.strip()):
|
@@ -177,28 +211,33 @@ def process(input_file):
|
|
177 |
sku_to_row[str(val).strip()] = r
|
178 |
|
179 |
# Classify entries and aggregate matches
|
180 |
-
agg = defaultdict(int)
|
181 |
-
unmatched_rows = []
|
182 |
|
183 |
for rec in entries:
|
184 |
sku, power, qty = rec["sku"], rec["power"], rec["qty"]
|
|
|
185 |
if not sku or qty <= 0 or power is None:
|
186 |
unmatched_rows.append(rec["row_values"])
|
187 |
continue
|
|
|
188 |
row_idx = sku_to_row.get(sku)
|
189 |
if row_idx is None:
|
190 |
unmatched_rows.append(rec["row_values"])
|
191 |
continue
|
|
|
192 |
col_idx = power_col_map.get(power) if power_col_map else None
|
193 |
if col_idx is None and triplet_col_map:
|
194 |
key = _power_to_triplet_digits(power)
|
195 |
col_idx = triplet_col_map.get(key)
|
|
|
196 |
if col_idx is None:
|
197 |
unmatched_rows.append(rec["row_values"])
|
198 |
continue
|
|
|
199 |
agg[(sku, power)] += qty
|
200 |
|
201 |
-
# Write aggregated matches
|
202 |
written_count = 0
|
203 |
for (sku, power), qty in agg.items():
|
204 |
row_idx = sku_to_row.get(sku)
|
@@ -210,6 +249,7 @@ def process(input_file):
|
|
210 |
col_idx = triplet_col_map.get(key)
|
211 |
if col_idx is None:
|
212 |
continue
|
|
|
213 |
current = ws_out.cell(row=row_idx, column=col_idx).value
|
214 |
try:
|
215 |
current_val = int(current) if current is not None and str(current).strip() != "" else 0
|
@@ -221,28 +261,37 @@ def process(input_file):
|
|
221 |
ws_out.cell(row=row_idx, column=col_idx).value = current_val + int(qty)
|
222 |
written_count += 1
|
223 |
|
224 |
-
#
|
|
|
|
|
225 |
add_name = "additional order"
|
226 |
if add_name in wb_out.sheetnames:
|
227 |
wb_out.remove(wb_out[add_name])
|
228 |
ws_add = wb_out.create_sheet(title=add_name)
|
229 |
|
230 |
-
# header + rows
|
231 |
for c, val in enumerate(header_values, start=1):
|
232 |
ws_add.cell(row=1, column=c).value = val
|
233 |
for i, row_vals in enumerate(unmatched_rows, start=2):
|
234 |
for c, val in enumerate(row_vals, start=1):
|
235 |
ws_add.cell(row=i, column=c).value = val
|
236 |
|
237 |
-
#
|
238 |
-
|
|
|
|
|
|
|
|
|
239 |
|
240 |
-
#
|
241 |
-
|
|
|
|
|
242 |
if raw_name in wb_out.sheetnames:
|
243 |
wb_out.remove(wb_out[raw_name])
|
244 |
ws_raw = wb_out.create_sheet(title=raw_name)
|
245 |
|
|
|
246 |
for r in range(1, in_max_row + 1):
|
247 |
for c in range(1, in_max_col + 1):
|
248 |
ws_raw.cell(row=r, column=c).value = ws_in.cell(row=r, column=c).value
|
@@ -250,13 +299,16 @@ def process(input_file):
|
|
250 |
# Auto-fit raw data
|
251 |
_auto_fit_columns(ws_raw, max_col=in_max_col, max_row=in_max_row)
|
252 |
|
253 |
-
#
|
|
|
|
|
254 |
yymmdd = datetime.now().strftime("%y%m%d")
|
255 |
tmpdir = tempfile.mkdtemp()
|
256 |
out_filename = f"AMMU-Order-Form-Leading-Bridge-{yymmdd}.xlsx"
|
257 |
out_path = os.path.join(tmpdir, out_filename)
|
258 |
wb_out.save(out_path)
|
259 |
|
|
|
260 |
log_lines = [
|
261 |
f"Rows scanned in input: {rows_scanned}",
|
262 |
f"Unique matched (SKU, power) pairs aggregated: {len(agg)}",
|
@@ -271,14 +323,17 @@ def process(input_file):
|
|
271 |
except Exception as e:
|
272 |
return None, f"Error: {e}"
|
273 |
|
|
|
|
|
|
|
274 |
with gr.Blocks(title="AMMU Order Form Filler") as demo:
|
275 |
gr.Markdown(
|
276 |
"### AMMU Order Form Filler\n"
|
277 |
"• Uses **MY SKU** column to map rows\n"
|
278 |
"• Matches power columns (text like `-1.25` or fallback triplets like `125`)\n"
|
279 |
"• Aggregates quantities for matched lines\n"
|
280 |
-
"• Copies **unmatched lines** to **`additional order`** (auto-fit columns)\n"
|
281 |
-
"• Copies **entire input** to **`
|
282 |
"• Exports as **AMMU-Order-Form-Leading-Bridge-YYMMDD.xlsx**"
|
283 |
)
|
284 |
with gr.Row():
|
@@ -291,5 +346,26 @@ with gr.Blocks(title="AMMU Order Form Filler") as demo:
|
|
291 |
|
292 |
run_btn.click(fn=process, inputs=in_file, outputs=[out_file, log_box])
|
293 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
294 |
if __name__ == "__main__":
|
295 |
demo.launch()
|
|
|
8 |
from collections import defaultdict
|
9 |
from datetime import datetime
|
10 |
|
11 |
+
# Hugging Face dataset + template config
|
12 |
HF_DATASET_REPO = "leadingbridge/ammu"
|
13 |
TEMPLATE_FILENAME = "AMMU-order-form-template.xlsx"
|
14 |
+
# If you place the template in the Space repo, this will be used first (offline-friendly)
|
15 |
LOCAL_TEMPLATE_FALLBACK = os.path.join(os.path.dirname(__file__), TEMPLATE_FILENAME)
|
16 |
|
17 |
+
# -----------------------
|
18 |
+
# Helpers
|
19 |
+
# -----------------------
|
20 |
def _normalize_power(val):
|
21 |
+
"""
|
22 |
+
Normalize input "Product Option Value" to match template textual labels, e.g. '0.00', '-1.25'.
|
23 |
+
Accepts variations like 'Plano', '0', '-1', '-1.0', etc.
|
24 |
+
"""
|
25 |
if val is None:
|
26 |
return None
|
27 |
s = str(val).strip()
|
|
|
39 |
return f"{num:.2f}"
|
40 |
|
41 |
def _power_to_triplet_digits(power_str: str) -> str:
|
42 |
+
"""
|
43 |
+
Convert textual power to numeric triplet fallback:
|
44 |
+
'-1.25' -> '125', '0.00' -> '000', '-4.00' -> '400'
|
45 |
+
"""
|
46 |
if power_str is None:
|
47 |
return None
|
48 |
s = power_str.strip().lstrip("+").replace("-", "")
|
|
|
55 |
return digits.zfill(3)
|
56 |
|
57 |
def _find_header_row(ws: Worksheet, required_headers):
|
58 |
+
"""
|
59 |
+
In the INPUT sheet: find a header row (top 10 rows) that contains all required headers (case-insensitive).
|
60 |
+
Returns (row_index, {header_lower: col_index})
|
61 |
+
"""
|
62 |
req = {h.lower() for h in required_headers}
|
63 |
for r in range(1, 11):
|
64 |
header_map = {}
|
|
|
75 |
raise ValueError(f"Could not locate a header row containing: {required_headers}")
|
76 |
|
77 |
def _download_template():
|
78 |
+
"""
|
79 |
+
Get the template file path, preferring a local copy, else downloading from HF dataset.
|
80 |
+
"""
|
81 |
if os.path.exists(LOCAL_TEMPLATE_FALLBACK):
|
82 |
return LOCAL_TEMPLATE_FALLBACK
|
83 |
return hf_hub_download(
|
|
|
85 |
)
|
86 |
|
87 |
def _auto_fit_columns(ws: Worksheet, max_col: int, max_row: int):
|
88 |
+
"""
|
89 |
+
Fit columns based on longest string length in column with small padding.
|
90 |
+
"""
|
91 |
for c in range(1, max_col + 1):
|
92 |
max_len = 0
|
93 |
col_letter = get_column_letter(c)
|
|
|
97 |
max_len = max(max_len, len(str(val)))
|
98 |
ws.column_dimensions[col_letter].width = max(10, max_len + 2)
|
99 |
|
100 |
+
# -----------------------
|
101 |
+
# Core processing
|
102 |
+
# -----------------------
|
103 |
def process(input_file):
|
104 |
try:
|
105 |
if input_file is None:
|
106 |
return None, "Please upload an Excel file first."
|
107 |
|
108 |
+
# --- INPUT: read and detect headers
|
109 |
wb_in = load_workbook(input_file.name, data_only=True)
|
110 |
ws_in = wb_in.active
|
111 |
|
112 |
+
# For "raw data" tab later
|
113 |
in_max_row = ws_in.max_row
|
114 |
in_max_col = ws_in.max_column
|
115 |
|
|
|
122 |
|
123 |
header_values = [ws_in.cell(row=header_row_idx, column=c).value for c in range(1, in_max_col + 1)]
|
124 |
|
125 |
+
# Collect entries (line-by-line) so we can keep unmatched verbatim
|
126 |
entries = []
|
127 |
rows_scanned = 0
|
128 |
for r in range(header_row_idx + 1, ws_in.max_row + 1):
|
|
|
136 |
rows_scanned += 1
|
137 |
|
138 |
power = _normalize_power(pov)
|
139 |
+
# robust int conversion
|
140 |
try:
|
141 |
q = int(qty) if qty is not None and str(qty).strip() != "" else 0
|
142 |
except Exception:
|
|
|
152 |
"row_values": row_values
|
153 |
})
|
154 |
|
155 |
+
# --- OUTPUT: load the AMMU template
|
156 |
template_path = _download_template()
|
157 |
wb_out = load_workbook(template_path)
|
158 |
ws_out = wb_out.active
|
159 |
|
160 |
+
# Find:
|
161 |
+
# (A) "MY SKU" header to build SKU->row map (instead of "SKU")
|
162 |
+
# (B) textual power labels (e.g. 0.00, -1.25)
|
163 |
+
# (C) triplet labels (e.g. 000, 125) as fallback
|
164 |
mysku_header_row = None
|
165 |
mysku_col_idx = None
|
166 |
power_label_row = None
|
|
|
170 |
|
171 |
for r in range(1, 11):
|
172 |
row_vals = [ws_out.cell(row=r, column=c).value for c in range(1, ws_out.max_column + 1)]
|
173 |
+
|
174 |
+
# (A) MY SKU
|
175 |
for c, v in enumerate(row_vals, start=1):
|
176 |
if isinstance(v, str) and v.strip().lower() == "my sku":
|
177 |
mysku_header_row = r
|
178 |
mysku_col_idx = c
|
179 |
+
|
180 |
+
# (B) textual powers
|
181 |
labels = {}
|
182 |
for c, v in enumerate(row_vals, start=1):
|
183 |
if isinstance(v, str):
|
|
|
187 |
if len(labels) >= 5 and power_label_row is None:
|
188 |
power_label_row = r
|
189 |
power_col_map = labels
|
190 |
+
|
191 |
+
# (C) numeric triplets
|
192 |
trip = {}
|
193 |
for c, v in enumerate(row_vals, start=1):
|
194 |
if isinstance(v, str) and re.fullmatch(r"\d{2,3}", v.strip()):
|
|
|
211 |
sku_to_row[str(val).strip()] = r
|
212 |
|
213 |
# Classify entries and aggregate matches
|
214 |
+
agg = defaultdict(int) # (sku, power) -> total qty
|
215 |
+
unmatched_rows = [] # verbatim rows to copy to "additional order"
|
216 |
|
217 |
for rec in entries:
|
218 |
sku, power, qty = rec["sku"], rec["power"], rec["qty"]
|
219 |
+
# Basic validation
|
220 |
if not sku or qty <= 0 or power is None:
|
221 |
unmatched_rows.append(rec["row_values"])
|
222 |
continue
|
223 |
+
|
224 |
row_idx = sku_to_row.get(sku)
|
225 |
if row_idx is None:
|
226 |
unmatched_rows.append(rec["row_values"])
|
227 |
continue
|
228 |
+
|
229 |
col_idx = power_col_map.get(power) if power_col_map else None
|
230 |
if col_idx is None and triplet_col_map:
|
231 |
key = _power_to_triplet_digits(power)
|
232 |
col_idx = triplet_col_map.get(key)
|
233 |
+
|
234 |
if col_idx is None:
|
235 |
unmatched_rows.append(rec["row_values"])
|
236 |
continue
|
237 |
+
|
238 |
agg[(sku, power)] += qty
|
239 |
|
240 |
+
# Write aggregated matches to template
|
241 |
written_count = 0
|
242 |
for (sku, power), qty in agg.items():
|
243 |
row_idx = sku_to_row.get(sku)
|
|
|
249 |
col_idx = triplet_col_map.get(key)
|
250 |
if col_idx is None:
|
251 |
continue
|
252 |
+
|
253 |
current = ws_out.cell(row=row_idx, column=col_idx).value
|
254 |
try:
|
255 |
current_val = int(current) if current is not None and str(current).strip() != "" else 0
|
|
|
261 |
ws_out.cell(row=row_idx, column=col_idx).value = current_val + int(qty)
|
262 |
written_count += 1
|
263 |
|
264 |
+
# -----------------------
|
265 |
+
# "additional order" tab
|
266 |
+
# -----------------------
|
267 |
add_name = "additional order"
|
268 |
if add_name in wb_out.sheetnames:
|
269 |
wb_out.remove(wb_out[add_name])
|
270 |
ws_add = wb_out.create_sheet(title=add_name)
|
271 |
|
272 |
+
# header + unmatched rows
|
273 |
for c, val in enumerate(header_values, start=1):
|
274 |
ws_add.cell(row=1, column=c).value = val
|
275 |
for i, row_vals in enumerate(unmatched_rows, start=2):
|
276 |
for c, val in enumerate(row_vals, start=1):
|
277 |
ws_add.cell(row=i, column=c).value = val
|
278 |
|
279 |
+
# Add custom note after the last data row
|
280 |
+
note_row = len(unmatched_rows) + 2 # header(1) + data(n)
|
281 |
+
ws_add.cell(row=note_row, column=1).value = "20片裝如無貨可以2盒10片裝代替"
|
282 |
+
|
283 |
+
# Auto-fit "additional order"
|
284 |
+
_auto_fit_columns(ws_add, max_col=in_max_col, max_row=max(note_row, 1))
|
285 |
|
286 |
+
# -----------------------
|
287 |
+
# "Raw Data(please ignore)" tab
|
288 |
+
# -----------------------
|
289 |
+
raw_name = "Raw Data(please ignore)"
|
290 |
if raw_name in wb_out.sheetnames:
|
291 |
wb_out.remove(wb_out[raw_name])
|
292 |
ws_raw = wb_out.create_sheet(title=raw_name)
|
293 |
|
294 |
+
# Copy ENTIRE input verbatim
|
295 |
for r in range(1, in_max_row + 1):
|
296 |
for c in range(1, in_max_col + 1):
|
297 |
ws_raw.cell(row=r, column=c).value = ws_in.cell(row=r, column=c).value
|
|
|
299 |
# Auto-fit raw data
|
300 |
_auto_fit_columns(ws_raw, max_col=in_max_col, max_row=in_max_row)
|
301 |
|
302 |
+
# -----------------------
|
303 |
+
# Save output with YYMMDD suffix
|
304 |
+
# -----------------------
|
305 |
yymmdd = datetime.now().strftime("%y%m%d")
|
306 |
tmpdir = tempfile.mkdtemp()
|
307 |
out_filename = f"AMMU-Order-Form-Leading-Bridge-{yymmdd}.xlsx"
|
308 |
out_path = os.path.join(tmpdir, out_filename)
|
309 |
wb_out.save(out_path)
|
310 |
|
311 |
+
# Log
|
312 |
log_lines = [
|
313 |
f"Rows scanned in input: {rows_scanned}",
|
314 |
f"Unique matched (SKU, power) pairs aggregated: {len(agg)}",
|
|
|
323 |
except Exception as e:
|
324 |
return None, f"Error: {e}"
|
325 |
|
326 |
+
# -----------------------
|
327 |
+
# UI
|
328 |
+
# -----------------------
|
329 |
with gr.Blocks(title="AMMU Order Form Filler") as demo:
|
330 |
gr.Markdown(
|
331 |
"### AMMU Order Form Filler\n"
|
332 |
"• Uses **MY SKU** column to map rows\n"
|
333 |
"• Matches power columns (text like `-1.25` or fallback triplets like `125`)\n"
|
334 |
"• Aggregates quantities for matched lines\n"
|
335 |
+
"• Copies **unmatched lines** to **`additional order`** (auto-fit columns + note)\n"
|
336 |
+
"• Copies **entire input** to **`Raw Data(please ignore)`** (auto-fit columns)\n"
|
337 |
"• Exports as **AMMU-Order-Form-Leading-Bridge-YYMMDD.xlsx**"
|
338 |
)
|
339 |
with gr.Row():
|
|
|
346 |
|
347 |
run_btn.click(fn=process, inputs=in_file, outputs=[out_file, log_box])
|
348 |
|
349 |
+
# Links block at the bottom
|
350 |
+
gr.HTML(
|
351 |
+
"""
|
352 |
+
<div style="text-align: center; font-size: 16px; margin-top: 20px;">
|
353 |
+
<h3>Shipping Tools</h3>
|
354 |
+
<a href="https://huggingface.co/spaces/leadingbridge/shipping-dhl-e-commerce">DHL</a> |
|
355 |
+
<a href="https://huggingface.co/spaces/leadingbridge/shipping-ec-ship">EC-Ship</a> |
|
356 |
+
<a href="https://huggingface.co/spaces/leadingbridge/shipping-fedex">Fedex</a> |
|
357 |
+
<a href="https://huggingface.co/spaces/leadingbridge/shipping-UPS">UPS</a> |
|
358 |
+
<a href="https://huggingface.co/spaces/leadingbridge/shipping-yunexpress">Yunexpress</a>
|
359 |
+
</div>
|
360 |
+
<div style="text-align: center; font-size: 16px; margin-top: 20px;">
|
361 |
+
<h3>Administration Tools</h3>
|
362 |
+
<a href="https://huggingface.co/spaces/leadingbridge/email-template">Email Template</a> |
|
363 |
+
<a href="https://huggingface.co/spaces/leadingbridge/product-feed">Google Merchant</a> |
|
364 |
+
<a href="https://huggingface.co/spaces/leadingbridge/tss-order">Order Processing</a> |
|
365 |
+
<a href="https://huggingface.co/spaces/leadingbridge/ammu-order">Ammu Order</a>
|
366 |
+
</div>
|
367 |
+
"""
|
368 |
+
)
|
369 |
+
|
370 |
if __name__ == "__main__":
|
371 |
demo.launch()
|