HistorySpace / app.py
oberbics's picture
Update app.py
23c0ee1 verified
raw
history blame
4.83 kB
import os
import json
import re
import gradio as gr
import requests
# Hugging Face API details
API_URL = "https://api-inference.huggingface.co/models/numind/NuExtract-1.5"
api_token = os.environ.get("HF_TOKEN", "") # Get token from environment variable
headers = {"Authorization": f"Bearer {api_token}"}
def query_api(payload):
try:
response = requests.post(API_URL, headers=headers, json=payload)
print("API STATUS CODE:", response.status_code)
print("RAW RESPONSE:", response.text)
return response.json()
except Exception as e:
return {"error": f"Request failed: {str(e)}"}
def extract_structure(template, text):
try:
prompt = f"<|input|>\n### Template:\n{template}\n### Text:\n{text}\n\n<|output|>"
payload = {
"inputs": prompt,
"parameters": {
"max_new_tokens": 2000,
"temperature": 0.01,
"return_full_text": True
}
}
response = query_api(payload)
if isinstance(response, dict) and "error" in response:
return f"API Error: {response['error']}", "{}", f"<p>Error: {response['error']}</p>"
if isinstance(response, list) and len(response) > 0:
output = response[0].get("generated_text", "")
print("Generated Text:", output)
if "<|output|>" in output:
result = output.split("<|output|>")[-1].strip()
else:
match = re.search(r'({[\s\S]+})', output)
result = match.group(1) if match else output.strip()
try:
parsed = json.loads(result)
result = json.dumps(parsed, indent=2)
except Exception:
pass
highlighted = f"<p>βœ… Successfully processed input of length {len(text)} characters.</p>"
return "βœ… Extraction Complete", result, highlighted
return "⚠️ Unexpected API Response", json.dumps(response, indent=2), "<p>Unexpected format.</p>"
except Exception as e:
return f"❌ Error: {str(e)}", "{}", f"<p>Processing failed: {str(e)}</p>"
# Gradio App
with gr.Blocks() as demo:
gr.Markdown("# 🧠 NuExtract-1.5 Information Extractor")
if not api_token:
gr.Markdown("## ⚠️ No API token found. Please set `HF_TOKEN` in environment variables.")
with gr.Row():
with gr.Column():
template_input = gr.Textbox(
label="Template (JSON)",
value='{"name": "", "email": ""}',
lines=5
)
text_input = gr.Textbox(
label="Input Text",
value="Contact: John Smith ([email protected])",
lines=10
)
submit_btn = gr.Button("Extract Information")
with gr.Column():
progress_output = gr.Textbox(label="Progress")
result_output = gr.Textbox(label="Extracted Information")
html_output = gr.HTML(label="Info")
submit_btn.click(
fn=extract_structure,
inputs=[template_input, text_input],
outputs=[progress_output, result_output, html_output]
)
gr.Examples(
[
[
'{"name": "", "email": ""}',
'Contact: John Smith ([email protected])'
],
[
'''{
"Model": {
"Name": "",
"Number of parameters": "",
"Architecture": []
},
"Usage": {
"Use case": [],
"License": ""
}
}''',
'''We introduce Mistral 7B, a 7-billion-parameter language model engineered for superior performance and efficiency. Mistral 7B outperforms the best open 13B model (Llama 2) across all evaluated benchmarks, and the best released 34B model (Llama 1) in reasoning, mathematics, and code generation. Our model is released under the Apache 2.0 license.'''
]
],
[template_input, text_input]
)
def test_api_connection():
print("===== Application Startup =====")
if not api_token:
print("❌ HF_TOKEN not set. Please set your API token.")
else:
test_payload = {
"inputs": "<|input|>\n### Template:\n{\"test\": \"\"}\n### Text:\nHello world\n\n<|output|>",
"parameters": {
"max_new_tokens": 100,
"temperature": 0.01
}
}
response = query_api(test_payload)
if isinstance(response, list):
print("βœ… Connection to Hugging Face API successful!")
else:
print("⚠️ API may not be returning expected format:", response)
if __name__ == "__main__":
test_api_connection()
demo.launch(debug=True) # You can add share=True or server_name/port if needed