Spaces:
Sleeping
Sleeping
File size: 2,157 Bytes
99cae8f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 |
import streamlit as st
import requests
import time
# Hugging Face API key from secrets
API_KEY = st.secrets["API_KEY"]
HEADERS = {"Authorization": f"Bearer {API_KEY}"}
API_URLS = {
"Summarizer": "https://api-inference.huggingface.co/models/facebook/bart-large-cnn",
"Sentiment": "https://api-inference.huggingface.co/models/finiteautomata/bertweet-base-sentiment-analysis"
}
def query(api_url, payload):
try:
res = requests.post(api_url, headers=HEADERS, json=payload, timeout=60)
if res.status_code != 200:
return {"error": f"HTTP {res.status_code}: {res.text}"}
return res.json()
except Exception as e:
return {"error": str(e)}
st.set_page_config(page_title="NLP Toolkit", page_icon="π§ ", layout="centered")
st.title("π§ AI NLP Toolkit")
st.write("Summarization & Sentiment Analysis using Hugging Face APIs π")
tab1, tab2 = st.tabs(["π Summarizer", "π Sentiment Analysis"])
with tab1:
text = st.text_area("Enter text to summarize:", height=200)
if st.button("Summarize"):
if not text.strip():
st.warning("β Please enter some text.")
else:
with st.spinner("Generating summary..."):
time.sleep(1)
res = query(API_URLS["Summarizer"], {"inputs": text})
if "error" in res:
st.error(res["error"])
else:
st.success("β
Summary Generated")
st.write(res[0]['summary_text'])
with tab2:
text = st.text_area("Enter text for sentiment analysis:", height=200, key="sent_text")
if st.button("Analyze Sentiment"):
if not text.strip():
st.warning("β Please enter some text.")
else:
with st.spinner("Analyzing sentiment..."):
time.sleep(1)
res = query(API_URLS["Sentiment"], {"inputs": text})
if "error" in res:
st.error(res["error"])
else:
st.success("β
Sentiment Analysis Complete")
for item in res[0]:
st.write(f"**{item['label']}** β {item['score']:.2f}")
|