oberbics commited on
Commit
19377ce
Β·
verified Β·
1 Parent(s): 62bfa62

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -31
app.py CHANGED
@@ -26,16 +26,7 @@ def load_model():
26
  print(f"❌ Model loading failed: {str(e)}")
27
  return None
28
 
29
- # 2. Warm Start Mechanism
30
- def keep_model_warm():
31
- """Periodic ping to prevent Hugging Face from unloading the model"""
32
- if extractor:
33
- try:
34
- extractor("ping", max_length=1)
35
- except:
36
- pass
37
-
38
- # 3. Processing Function with Streamed Output
39
  def extract_structure(template, text):
40
  # Input validation
41
  if not text.strip():
@@ -71,7 +62,7 @@ def extract_structure(template, text):
71
  )[0]['generated_text']
72
 
73
  # Format output
74
- formatted_json = json.dumps(json.loads(result), indent=2)
75
  html_output = f"""
76
  <div style='
77
  padding: 15px;
@@ -81,7 +72,7 @@ def extract_structure(template, text):
81
  margin-top: 10px;
82
  '>
83
  <h3 style='margin-top:0'>Extracted Data</h3>
84
- <pre style='white-space: pre-wrap'>{formatted_json}</pre>
85
  </div>
86
  """
87
 
@@ -91,7 +82,7 @@ def extract_structure(template, text):
91
  error_msg = f"❌ Processing error: {str(e)}"
92
  yield error_msg, "", f"<p style='color:red'>{error_msg}</p>"
93
 
94
- # 4. Gradio Interface
95
  with gr.Blocks(theme=gr.themes.Soft(), title="NuExtract 1.5") as demo:
96
  # Header
97
  gr.Markdown("""
@@ -136,13 +127,9 @@ with gr.Blocks(theme=gr.themes.Soft(), title="NuExtract 1.5") as demo:
136
  gr.Markdown("### πŸ“€ Results")
137
  status = gr.Textbox(
138
  label="Status",
139
- value="🟒 System Ready",
140
- interactive=False
141
- )
142
- json_output = gr.JSON(
143
- label="Structured Output",
144
- interactive=False
145
  )
 
146
  html_output = gr.HTML(
147
  label="Formatted View",
148
  value="<div style='min-height:200px'></div>"
@@ -160,28 +147,19 @@ with gr.Blocks(theme=gr.themes.Soft(), title="NuExtract 1.5") as demo:
160
  )
161
 
162
  clear_btn.click(
163
- fn=lambda: ["", "", "", "<div></div>"],
164
  inputs=[],
165
  outputs=[template_input, text_input, json_output, html_output]
166
  )
167
 
168
- # 5. Launch Configuration
169
  if __name__ == "__main__":
170
  # Initialize model
171
  extractor = load_model()
172
 
173
- # Start keep-alive thread
174
- import threading
175
- threading.Thread(
176
- target=lambda: [keep_model_warm() for _ in iter(int, 1)],
177
- daemon=True
178
- ).start()
179
-
180
  # Launch app
181
  demo.launch(
182
  server_name="0.0.0.0",
183
  server_port=7860,
184
- show_error=True,
185
- share=False,
186
- favicon_path="https://huggingface.co/favicon.ico"
187
  )
 
26
  print(f"❌ Model loading failed: {str(e)}")
27
  return None
28
 
29
+ # 2. Processing Function with Streamed Output
 
 
 
 
 
 
 
 
 
30
  def extract_structure(template, text):
31
  # Input validation
32
  if not text.strip():
 
62
  )[0]['generated_text']
63
 
64
  # Format output
65
+ formatted_json = json.loads(result) # Parse to validate JSON
66
  html_output = f"""
67
  <div style='
68
  padding: 15px;
 
72
  margin-top: 10px;
73
  '>
74
  <h3 style='margin-top:0'>Extracted Data</h3>
75
+ <pre style='white-space: pre-wrap'>{json.dumps(formatted_json, indent=2)}</pre>
76
  </div>
77
  """
78
 
 
82
  error_msg = f"❌ Processing error: {str(e)}"
83
  yield error_msg, "", f"<p style='color:red'>{error_msg}</p>"
84
 
85
+ # 3. Gradio Interface
86
  with gr.Blocks(theme=gr.themes.Soft(), title="NuExtract 1.5") as demo:
87
  # Header
88
  gr.Markdown("""
 
127
  gr.Markdown("### πŸ“€ Results")
128
  status = gr.Textbox(
129
  label="Status",
130
+ value="🟒 System Ready"
 
 
 
 
 
131
  )
132
+ json_output = gr.JSON(label="Structured Output") # Removed interactive parameter
133
  html_output = gr.HTML(
134
  label="Formatted View",
135
  value="<div style='min-height:200px'></div>"
 
147
  )
148
 
149
  clear_btn.click(
150
+ fn=lambda: ["", "", {}, "<div></div>"],
151
  inputs=[],
152
  outputs=[template_input, text_input, json_output, html_output]
153
  )
154
 
155
+ # 4. Launch Configuration
156
  if __name__ == "__main__":
157
  # Initialize model
158
  extractor = load_model()
159
 
 
 
 
 
 
 
 
160
  # Launch app
161
  demo.launch(
162
  server_name="0.0.0.0",
163
  server_port=7860,
164
+ show_error=True
 
 
165
  )