codys12 commited on
Commit
90e73d9
·
verified ·
1 Parent(s): 107e208

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +86 -184
app.py CHANGED
@@ -12,223 +12,125 @@ import gradio_client.utils
12
 
13
  """NetCom → WooCommerce transformer (Try 1 schema)
14
  =================================================
15
- Drop a *Reseller Schedule* CSV and get back a WooCommerce‑ready CSV that matches
16
- `Try 1 - WooCommerce_Mapped_Data__Fixed_Attributes_and_Agenda_.csv` exactly –
17
- including `Stock` and `Sold individually?` columns that NetCom doesn’t supply.
18
-
19
- Highlights
20
- ----------
21
- * Empty cells are skipped – no wasted GPT calls.
22
- * GPT‑4o mini used with a tiny disk cache (`ai_response_cache/`).
23
- * Brand → logo URLs hard‑coded below (update when media library changes).
24
  """
25
- #
26
- # ---------------------------------------------------------------------------
27
- # Gradio JSON‑schema helper hot‑patch (bool schema bug)
28
- # ---------------------------------------------------------------------------
29
  _original = gradio_client.utils._json_schema_to_python_type
30
 
31
  def _fixed_json_schema_to_python_type(schema, defs=None):
32
- if isinstance(schema, bool): # gradio 4.29 bug
33
  return "any"
34
  return _original(schema, defs)
35
 
36
  gradio_client.utils._json_schema_to_python_type = _fixed_json_schema_to_python_type # type: ignore
37
 
38
- # ---------------------------------------------------------------------------
39
- # Tiny disk cache for OpenAI responses
40
- # ---------------------------------------------------------------------------
41
  CACHE_DIR = Path("ai_response_cache"); CACHE_DIR.mkdir(exist_ok=True)
42
 
 
 
43
 
44
- def _cache_path(prompt: str) -> Path:
45
- return CACHE_DIR / f"{hashlib.md5(prompt.encode()).hexdigest()}.json"
46
-
47
-
48
- def _get_cached(prompt: str):
49
  try:
50
- return json.loads(_cache_path(prompt).read_text("utf-8"))["response"]
51
  except Exception:
52
  return None
53
 
54
-
55
- def _set_cache(prompt: str, rsp: str):
56
  try:
57
- _cache_path(prompt).write_text(json.dumps({"prompt": prompt, "response": rsp}), "utf-8")
58
  except Exception:
59
  pass
60
 
61
- # ---------------------------------------------------------------------------
62
- # Async GPT helpers
63
- # ---------------------------------------------------------------------------
64
- async def _gpt(client: openai.AsyncOpenAI, prompt: str) -> str:
65
- cached = _get_cached(prompt)
66
- if cached is not None:
67
- return cached
68
  try:
69
- cmp = await client.chat.completions.create(
70
- model="gpt-4o-mini",
71
- messages=[{"role": "user", "content": prompt}],
72
- temperature=0,
73
- )
74
- txt = cmp.choices[0].message.content
75
  except Exception as e:
76
- txt = f"Error: {e}"
77
- _set_cache(prompt, txt)
78
- return txt
79
-
80
-
81
- async def _batch(texts: list[str], instruction: str) -> list[str]:
82
- """Return len(texts) list. Blank inputs remain blank."""
83
- res = ["" for _ in texts]
84
- idx, prompts = [], []
85
- for i, t in enumerate(texts):
86
- if isinstance(t, str) and t.strip():
87
- idx.append(i); prompts.append(f"{instruction}\n\nText: {t}")
88
- if not prompts:
89
- return res
90
  client = openai.AsyncOpenAI(api_key=os.getenv("OPENAI_API_KEY"))
91
- tasks = [_gpt(client, p) for p in prompts]
92
- outs = await asyncio.gather(*tasks)
93
- for k, v in enumerate(outs):
94
- res[idx[k]] = v
95
- return res
96
-
97
- # ---------------------------------------------------------------------------
98
- # Main converter
99
- # ---------------------------------------------------------------------------
100
-
101
- def process_woocommerce_data_in_memory(netcom_file):
102
- """Return BytesIO of Woo CSV."""
103
- # Brand logos
104
- brand_logo_map = {
105
- "Amazon Web Services": "/wp-content/uploads/2025/04/aws.png",
106
- "Cisco": "/wp-content/uploads/2025/04/cisco-e1738593292198-1.webp",
107
- "Microsoft": "/wp-content/uploads/2025/04/Microsoft-e1737494120985-1.png",
108
- "Google Cloud": "/wp-content/uploads/2025/04/Google_Cloud.png",
109
- "EC Council": "/wp-content/uploads/2025/04/Ec_Council.png",
110
- "ITIL": "/wp-content/uploads/2025/04/ITIL.webp",
111
- "PMI": "/wp-content/uploads/2025/04/PMI.png",
112
- "Comptia": "/wp-content/uploads/2025/04/Comptia.png",
113
- "Autodesk": "/wp-content/uploads/2025/04/autodesk.png",
114
- "ISC2": "/wp-content/uploads/2025/04/ISC2.png",
115
- "AICerts": "/wp-content/uploads/2025/04/aicerts-logo-1.png",
116
- }
117
- default_prereq = (
118
- "No specific prerequisites are required for this course. "
119
- "Basic computer literacy and familiarity with fundamental concepts in the subject area are recommended for the best learning experience."
120
- )
121
- # Load NetCom CSV
122
- df = pd.read_csv(netcom_file.name, encoding="latin1"); df.columns = df.columns.str.strip()
123
- def _col(opts):
124
- return next((c for c in opts if c in df.columns), None)
125
- # Column aliases
126
- col_desc = _col(["Description", "Decription"])
127
- col_obj = _col(["Objectives", "objectives"])
128
- col_pre = _col(["RequiredPrerequisite", "Required Pre-requisite"])
129
- col_out = _col(["Outline"])
130
- col_dur = _col(["Duration"])
131
- col_sid = _col(["Course SID", "Course SID"])
132
- if col_dur is None:
133
- df["Duration"] = ""; col_dur = "Duration"
134
- # AI prep lists
135
- descs, objs, pres, outs = (df.get(c, pd.Series([""]*len(df))).fillna("").tolist() for c in (col_desc, col_obj, col_pre, col_out))
136
- loop = asyncio.new_event_loop(); asyncio.set_event_loop(loop)
137
- short_d, long_d, fmt_obj, fmt_out = loop.run_until_complete(asyncio.gather(
138
- _batch(descs, "Create a concise 250-character summary of this course description:"),
139
- _batch(descs, "Condense this description to a maximum of 750 characters in paragraph format, with clean formatting:"),
140
- _batch(objs, "Format these objectives into a bullet list with clean formatting. Start each bullet with '• ':"),
141
- _batch(outs, "Format this agenda into a bullet list with clean formatting. Start each bullet with '• ':"),
142
- )); loop.close()
143
- fmt_pre = [default_prereq if not str(p).strip() else asyncio.run(_batch([p], "Format these prerequisites into a bullet list with clean formatting. Start each bullet with '• ':"))[0] for p in pres]
144
- # Attach processed cols
145
- df["Short_Description"] = short_d; df["Condensed_Description"] = long_d
146
- df["Formatted_Objectives"] = fmt_obj; df["Formatted_Agenda"] = fmt_out; df["Formatted_Prerequisites"] = fmt_pre
147
- # Dates
148
  df["Course Start Date"] = pd.to_datetime(df["Course Start Date"], errors="coerce")
149
  df["Date_fmt"] = df["Course Start Date"].dt.strftime("%-m/%-d/%Y")
150
- df_sorted = df.sort_values(["Course ID", "Course Start Date"])
151
- date_agg = df_sorted.groupby("Course ID")["Date_fmt"].apply(lambda s: ",".join(s.dropna().unique())).reset_index(name="Aggregated_Dates")
152
- time_agg = df_sorted.groupby("Course ID").apply(lambda g: ",".join(f"{st}-{et} {tz}" for st, et, tz in zip(g["Course Start Time"], g["Course End Time"], g["Time Zone"]))).reset_index(name="Aggregated_Times")
153
- parents = df_sorted.drop_duplicates("Course ID").merge(date_agg).merge(time_agg)
154
- # Parent rows
155
- woo_parent = pd.DataFrame({
156
- "Type": "variable",
157
- "SKU": parents["Course ID"],
158
- "Name": parents["Course Name"],
159
- "Published": 1,
160
- "Visibility in catalog": "visible",
161
- "Short description": parents["Short_Description"],
162
- "Description": parents["Condensed_Description"],
163
- "Tax status": "taxable",
164
- "In stock?": 1,
165
- "Stock": 1,
166
- "Sold individually?": 1,
167
- "Regular price": parents["SRP Pricing"].replace("[\\$,]", "", regex=True),
168
- "Categories": "courses",
169
- "Images": parents["Vendor"].map(brand_logo_map).fillna(""),
170
- "Parent": "",
171
- "Brands": parents["Vendor"],
172
- # Attributes
173
- "Attribute 1 name": "Date", "Attribute 1 value(s)": parents["Aggregated_Dates"], "Attribute 1 visible": "visible", "Attribute 1 global": 1,
174
- "Attribute 2 name": "Location", "Attribute 2 value(s)": "Virtual", "Attribute 2 visible": "visible", "Attribute 2 global": 1,
175
- "Attribute 3 name": "Time", "Attribute 3 value(s)": parents["Aggregated_Times"], "Attribute 3 visible": "visible", "Attribute 3 global": 1,
176
- # Meta
177
- "Meta: outline": parents["Formatted_Agenda"], "Meta: days": parents[col_dur], "Meta: location": "Virtual",
178
- "Meta: overview": parents["Target Audience"], "Meta: objectives": parents["Formatted_Objectives"],
179
- "Meta: prerequisites": parents["Formatted_Prerequisites"], "Meta: agenda": parents["Formatted_Agenda"],
180
- })
181
- # Child rows
182
- woo_child = pd.DataFrame({
183
- "Type": "variation, virtual",
184
- "SKU": df_sorted[col_sid].astype(str).str.strip(),
185
- "Name": df_sorted["Course Name"],
186
- "Published": 1,
187
- "Visibility in catalog": "visible",
188
- "Short description": df_sorted["Short_Description"],
189
- "Description": df_sorted["Condensed_Description"],
190
- "Tax status": "taxable",
191
- "In stock?": 1,
192
- "Stock": 1,
193
- "Sold individually?": 1,
194
- "Regular price": df_sorted["SRP Pricing"].replace("[\\$,]", "", regex=True),
195
- "Categories": "courses",
196
- "Images": df_sorted["Vendor"].map(brand_logo_map).fillna(""),
197
- "Parent": df_sorted["Course ID"],
198
- "Brands": df_sorted["Vendor"],
199
- "Attribute 1 name": "Date", "Attribute 1 value(s)": df_sorted["Date_fmt"], "Attribute 1 visible": "visible", "Attribute 1 global": 1,
200
- "Attribute 2 name": "Location", "Attribute 2 value(s)": "Virtual", "Attribute 2 visible": "visible", "Attribute 2 global": 1,
201
- "Attribute 3 name": "Time", "Attribute 3 value(s)": df_sorted.apply(lambda r: f"{r['Course Start Time']}-{r['Course End Time']} {r['Time Zone']}", axis=1), "Attribute 3 visible": "visible", "Attribute 3 global": 1,
202
- "Meta: outline": df_sorted["Formatted_Agenda"], "Meta: days": df_sorted[col_dur], "Meta: location": "Virtual",
203
- "Meta: overview": df_sorted["Target Audience"], "Meta: objectives": df_sorted["Formatted_Objectives"],
204
- "Meta: prerequisites": df_sorted["Formatted_Prerequisites"], "Meta: agenda": df_sorted["Formatted_Agenda"],
205
- })
206
- # Combine & order
207
- combined = pd.concat([woo_parent, woo_child], ignore_index=True)
208
- column_order = [
209
- "Type","SKU","Name","Published","Visibility in catalog","Short description","Description","Tax status","In stock?","Stock","Sold individually?","Regular price","Categories","Images","Parent","Brands", "Attribute 1 name","Attribute 1 value(s)","Attribute 1 visible","Attribute 1 global","Attribute 2 name","Attribute 2 value(s)","Attribute 2 visible","Attribute 2 global","Attribute 3 name","Attribute 3 value(s)","Attribute 3 visible","Attribute 3 global","Meta: outline","Meta: days","Meta: location","Meta: overview","Meta: objectives","Meta: prerequisites","Meta: agenda"
210
- ]
211
- combined = combined[column_order]
212
- buf = BytesIO(); combined.to_csv(buf, index=False, encoding="utf-8-sig"); buf.seek(0); return buf
213
-
214
- # ---------------------------------------------------------------------------
215
- # Gradio wrapper
216
- # ---------------------------------------------------------------------------
217
 
218
- def process_file(upload):
219
- return process_woocommerce_data_in_memory(upload)
 
 
 
 
 
 
220
 
 
 
 
 
 
 
 
221
 
222
- interface = gr.Interface(
223
  fn=process_file,
224
- inputs=gr.File(label="Upload NetCom CSV", file_types=[".csv"]),
225
  outputs=gr.File(label="Download WooCommerce CSV"),
226
  title="NetCom → WooCommerce CSV Processor",
227
- description="Upload a NetCom Reseller Schedule CSV to generate a WooCommerce‑import CSV (Try 1 schema).",
228
  analytics_enabled=False,
229
  )
230
 
231
  if __name__ == "__main__":
232
  if not os.getenv("OPENAI_API_KEY"):
233
- print("⚠️ OPENAI_API_KEY not set – AI paraphrasing will error out")
234
- interface.launch()
 
12
 
13
  """NetCom → WooCommerce transformer (Try 1 schema)
14
  =================================================
15
+ *Accept CSV **or** Excel schedule files and output the WooCommerce CSV.*
16
+
17
+ Fixes vs last run
18
+ -----------------
19
+ * Output written to a **temporary file path** (Gradio BytesIO bug fixed).
20
+ * **Excel upload** support.
21
+ * **Pandas future‑warning** silenced (`group_keys=False`).
 
 
22
  """
23
+
24
+ # -------- Gradio bool‑schema hot‑patch --------------------------------------
 
 
25
  _original = gradio_client.utils._json_schema_to_python_type
26
 
27
  def _fixed_json_schema_to_python_type(schema, defs=None):
28
+ if isinstance(schema, bool):
29
  return "any"
30
  return _original(schema, defs)
31
 
32
  gradio_client.utils._json_schema_to_python_type = _fixed_json_schema_to_python_type # type: ignore
33
 
34
+ # -------- Tiny disk cache ----------------------------------------------------
 
 
35
  CACHE_DIR = Path("ai_response_cache"); CACHE_DIR.mkdir(exist_ok=True)
36
 
37
+ def _cache_path(p: str):
38
+ return CACHE_DIR / f"{hashlib.md5(p.encode()).hexdigest()}.json"
39
 
40
+ def _get_cached(p: str):
 
 
 
 
41
  try:
42
+ return json.loads(_cache_path(p).read_text("utf-8"))["response"]
43
  except Exception:
44
  return None
45
 
46
+ def _set_cache(p: str, r: str):
 
47
  try:
48
+ _cache_path(p).write_text(json.dumps({"prompt": p, "response": r}), "utf-8")
49
  except Exception:
50
  pass
51
 
52
+ # -------- Async GPT helpers --------------------------------------------------
53
+ async def _gpt(client, prompt):
54
+ c = _get_cached(prompt)
55
+ if c is not None:
56
+ return c
 
 
57
  try:
58
+ msg = await client.chat.completions.create(model="gpt-4o-mini", messages=[{"role": "user", "content": prompt}], temperature=0)
59
+ text = msg.choices[0].message.content
 
 
 
 
60
  except Exception as e:
61
+ text = f"Error: {e}"
62
+ _set_cache(prompt, text)
63
+ return text
64
+
65
+ async def _batch(lst, instr):
66
+ out = ["" for _ in lst]; idx,prompts=[],[]
67
+ for i,t in enumerate(lst):
68
+ if isinstance(t,str) and t.strip(): idx.append(i); prompts.append(f"{instr}\n\nText: {t}")
69
+ if not prompts: return out
 
 
 
 
 
70
  client = openai.AsyncOpenAI(api_key=os.getenv("OPENAI_API_KEY"))
71
+ res = await asyncio.gather(*[_gpt(client,p) for p in prompts])
72
+ for j,val in enumerate(res): out[idx[j]] = val
73
+ return out
74
+
75
+ # -------- Core converter -----------------------------------------------------
76
+
77
+ def _read(path: str):
78
+ return pd.read_excel(path) if path.lower().endswith((".xlsx",".xls")) else pd.read_csv(path, encoding="latin1")
79
+
80
+ def convert(path: str) -> BytesIO:
81
+ logos = {"Amazon Web Services":"/wp-content/uploads/2025/04/aws.png","Cisco":"/wp-content/uploads/2025/04/cisco-e1738593292198-1.webp","Microsoft":"/wp-content/uploads/2025/04/Microsoft-e1737494120985-1.png","Google Cloud":"/wp-content/uploads/2025/04/Google_Cloud.png","EC Council":"/wp-content/uploads/2025/04/Ec_Council.png","ITIL":"/wp-content/uploads/2025/04/ITIL.webp","PMI":"/wp-content/uploads/2025/04/PMI.png","Comptia":"/wp-content/uploads/2025/04/Comptia.png","Autodesk":"/wp-content/uploads/2025/04/autodesk.png","ISC2":"/wp-content/uploads/2025/04/ISC2.png","AICerts":"/wp-content/uploads/2025/04/aicerts-logo-1.png"}
82
+ default_pre = "No specific prerequisites are required for this course. Basic computer literacy and familiarity with fundamental concepts in the subject area are recommended for the best learning experience."
83
+
84
+ df = _read(path); df.columns = df.columns.str.strip()
85
+ c = lambda *o: next((x for x in o if x in df.columns), None)
86
+ dcol, ocol, pcol, acol, dur, sid = c("Description","Decription"), c("Objectives","objectives"), c("RequiredPrerequisite","Required Pre-requisite"), c("Outline"), c("Duration"), c("Course SID","Course SID")
87
+ if dur is None: df["Duration"]=""; dur="Duration"
88
+
89
+ loop=asyncio.new_event_loop(); asyncio.set_event_loop(loop)
90
+ sdesc, ldesc, fobj, fout = loop.run_until_complete(asyncio.gather(
91
+ _batch(df.get(dcol,"").fillna("").tolist(), "Create a concise 250-character summary of this course description:"),
92
+ _batch(df.get(dcol,"").fillna("").tolist(), "Condense this description to a maximum of 750 characters in paragraph format, with clean formatting:"),
93
+ _batch(df.get(ocol,"").fillna("").tolist(), "Format these objectives into a bullet list with clean formatting. Start each bullet with '• ':") ,
94
+ _batch(df.get(acol,"").fillna("").tolist(), "Format this agenda into a bullet list with clean formatting. Start each bullet with '• ':")))
95
+ loop.close()
96
+ fpre=[default_pre if not str(p).strip() else asyncio.run(_batch([p],"Format these prerequisites into a bullet list with clean formatting. Start each bullet with '• ':"))[0] for p in df.get(pcol,"").fillna("").tolist()]
97
+
98
+ df["Short_Description"],df["Condensed_Description"],df["Formatted_Objectives"],df["Formatted_Agenda"],df["Formatted_Prerequisites"] = sdesc,ldesc,fobj,fout,fpre
99
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
100
  df["Course Start Date"] = pd.to_datetime(df["Course Start Date"], errors="coerce")
101
  df["Date_fmt"] = df["Course Start Date"].dt.strftime("%-m/%-d/%Y")
102
+ dsorted=df.sort_values(["Course ID","Course Start Date"])
103
+ d_agg = dsorted.groupby("Course ID")["Date_fmt"].apply(lambda s: ",".join(s.dropna().unique())).reset_index(name="Dates")
104
+ t_agg = dsorted.groupby("Course ID",group_keys=False).apply(lambda g: ",".join(f"{st}-{et} {tz}" for st,et,tz in zip(g["Course Start Time"],g["Course End Time"],g["Time Zone"]))).reset_index(name="Times")
105
+ parents = dsorted.drop_duplicates("Course ID").merge(d_agg).merge(t_agg)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
106
 
107
+ parent = pd.DataFrame({
108
+ "Type":"variable","SKU":parents["Course ID"],"Name":parents["Course Name"],"Published":1,"Visibility in catalog":"visible","Short description":parents["Short_Description"],"Description":parents["Condensed_Description"],"Tax status":"taxable","In stock?":1,"Stock":1,"Sold individually?":1,"Regular price":parents["SRP Pricing"].replace("[\\$,]","",regex=True),"Categories":"courses","Images":parents["Vendor"].map(logos).fillna(""),"Parent":"","Brands":parents["Vendor"],"Attribute 1 name":"Date","Attribute 1 value(s)":parents["Dates"],"Attribute 1 visible":"visible","Attribute 1 global":1,"Attribute 2 name":"Location","Attribute 2 value(s)":"Virtual","Attribute 2 visible":"visible","Attribute 2 global":1,"Attribute 3 name":"Time","Attribute 3 value(s)":parents["Times"],"Attribute 3 visible":"visible","Attribute 3 global":1,"Meta: outline":parents["Formatted_Agenda"],"Meta: days":parents[dur],"Meta: location":"Virtual","Meta: overview":parents["Target Audience"],"Meta: objectives":parents["Formatted_Objectives"],"Meta: prerequisites":parents["Formatted_Prerequisites"],"Meta: agenda":parents["Formatted_Agenda"]})
109
+ child = pd.DataFrame({
110
+ "Type":"variation, virtual","SKU":dsorted[sid].astype(str).str.strip(),"Name":dsorted["Course Name"],"Published":1,"Visibility in catalog":"visible","Short description":dsorted["Short_Description"],"Description":dsorted["Condensed_Description"],"Tax status":"taxable","In stock?":1,"Stock":1,"Sold individually?":1,"Regular price":dsorted["SRP Pricing"].replace("[\\$,]","",regex=True),"Categories":"courses","Images":dsorted["Vendor"].map(logos).fillna(""),"Parent":dsorted["Course ID"],"Brands":dsorted["Vendor"],"Attribute 1 name":"Date","Attribute 1 value(s)":dsorted["Date_fmt"],"Attribute 1 visible":"visible","Attribute 1 global":1,"Attribute 2 name":"Location","Attribute 2 value(s)":"Virtual","Attribute 2 visible":"visible","Attribute 2 global":1,"Attribute 3 name":"Time","Attribute 3 value(s)":dsorted.apply(lambda r:f"{r['Course Start Time']}-{r['Course End Time']} {r['Time Zone']}",axis=1),"Attribute 3 visible":"visible","Attribute 3 global":1,"Meta: outline":dsorted["Formatted_Agenda"],"Meta: days":dsorted[dur],"Meta: location":"Virtual","Meta: overview":dsorted["Target Audience"],"Meta: objectives":dsorted["Formatted_Objectives"],"Meta: prerequisites":dsorted["Formatted_Prerequisites"],"Meta: agenda":dsorted["Formatted_Agenda"]})
111
+
112
+ all_rows = pd.concat([parent,child],ignore_index=True)
113
+ order=["Type","SKU","Name","Published","Visibility in catalog","Short description","Description","Tax status","In stock?","Stock","Sold individually?","Regular price","Categories","Images","Parent","Brands","Attribute 1 name","Attribute 1 value(s)","Attribute 1 visible","Attribute 1 global","Attribute 2 name","Attribute 2 value(s)","Attribute 2 visible","Attribute 2 global","Attribute 3 name","Attribute 3 value(s)","Attribute 3 visible","Attribute 3 global","Meta: outline","Meta: days","Meta: location","Meta: overview","Meta: objectives","Meta: prerequisites","Meta: agenda"]
114
+ out=BytesIO(); all_rows[order].to_csv(out,index=False,encoding="utf-8-sig"); out.seek(0); return out
115
 
116
+ # -------- Gradio wrappers ----------------------------------------------------
117
+
118
+ def process_file(upload):
119
+ csv_bytes = convert(upload.name)
120
+ with tempfile.NamedTemporaryFile(delete=False, suffix=".csv") as tmp:
121
+ tmp.write(csv_bytes.getvalue()); path = tmp.name
122
+ return path
123
 
124
+ ui = gr.Interface(
125
  fn=process_file,
126
+ inputs=gr.File(label="Upload NetCom CSV / Excel", file_types=[".csv",".xlsx",".xls"]),
127
  outputs=gr.File(label="Download WooCommerce CSV"),
128
  title="NetCom → WooCommerce CSV Processor",
129
+ description="Upload NetCom schedule (.csv/.xlsx) to get the Try 1‑formatted WooCommerce CSV.",
130
  analytics_enabled=False,
131
  )
132
 
133
  if __name__ == "__main__":
134
  if not os.getenv("OPENAI_API_KEY"):
135
+ print("⚠️ OPENAI_API_KEY not set – AI features will error")
136
+ ui.launch()