File size: 1,293 Bytes
e6dc0b7 f3aae5e e6dc0b7 f3aae5e f5b51a2 7a38a78 253188c f3aae5e 253188c e6dc0b7 f3aae5e 253188c e6dc0b7 f3aae5e 253188c e6dc0b7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 |
import streamlit as st
from google.cloud import language_v1
import os
# Your existing function (replace this part with your actual code)
def sample_analyze_entities(text_content):
st.write("Debug: Entered sample_analyze_entities")
try:
client = language_v1.LanguageServiceClient()
type_ = language_v1.Document.Type.PLAIN_TEXT
language = "en"
document = {"content": text_content, "type_": type_, "language": language}
encoding_type = language_v1.EncodingType.UTF8
st.write("Debug: Making API call...")
response = client.analyze_entities(request={"document": document, "encoding_type": encoding_type})
st.write("Debug: API call completed.")
for entity in response.entities:
st.write(f"Entity: {entity.name}, Type: {language_v1.Entity.Type(entity.type_).name}, Salience: {entity.salience}")
except Exception as e:
st.write(f"Debug: An error occurred: {e}")
# Streamlit UI
st.title('Google Cloud NLP Entity Analyzer')
user_input = st.text_area('Enter text to analyze', '')
if st.button('Analyze'):
st.write("Debug: Analyze button clicked")
if user_input:
st.write(f"Debug: User input received: {user_input}")
sample_analyze_entities(user_input)
|