File size: 2,698 Bytes
a70a295 e08081f a70a295 e08081f a70a295 e08081f a70a295 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 |
import spacy
import requests
import wikipedia
import gradio as gr
# 1) Load spaCy small English model (make sure to add en_core_web_sm in requirements.txt)
nlp = spacy.load("en_core_web_sm")
# 2) Helper: Overpass query for POIs
def fetch_osm(lat, lon, osm_filter, limit=5):
overpass = """
[out:json][timeout:25];
(
node{filt}(around:1000,{lat},{lon});
way{filt}(around:1000,{lat},{lon});
rel{filt}(around:1000,{lat},{lon});
);
out center {lim};
""".format(filt=osm_filter, lat=lat, lon=lon, lim=limit)
r = requests.post("https://overpass-api.de/api/interpreter", data={"data": overpass})
elems = r.json().get("elements", [])
results = []
for el in elems:
name = el.get("tags", {}).get("name")
if name:
results.append({"name": name, **({"info": el["tags"].get("cuisine")} if "cuisine" in el["tags"] else {})})
return results
# 3) Geocode via Nominatim
def geocode(place: str):
r = requests.get(
"https://nominatim.openstreetmap.org/search",
params={"q": place, "format": "json", "limit": 1},
headers={"User-Agent":"iVoiceContext/1.0"}
)
data = r.json()
if not data: return None
return float(data[0]["lat"]), float(data[0]["lon"])
# 4) Main context extractor
def get_context(text):
doc = nlp(text)
out = {}
# gather unique entities of interest
for ent in {e.text for e in doc.ents if e.label_ in ("GPE","LOC","PERSON","ORG")}:
label = next(e.label_ for e in doc.ents if e.text == ent)
if label in ("GPE","LOC"):
geo = geocode(ent)
if not geo:
out[ent] = {"type":"location","error":"could not geocode"}
else:
lat, lon = geo
out[ent] = {
"type": "location",
"restaurants": fetch_osm(lat, lon, '["amenity"="restaurant"]'),
"attractions": fetch_osm(lat, lon, '["tourism"="attraction"]'),
}
else: # PERSON or ORG
try:
summ = wikipedia.summary(ent, sentences=2)
except Exception:
summ = "No summary available"
out[ent] = {"type":"wiki","summary": summ}
if not out:
return {"error":"no named entities found"}
return out
# 5) Gradio interface
iface = gr.Interface(
fn=get_context,
inputs=gr.Textbox(lines=3, placeholder="Enter or paste your translated text…"),
outputs="json",
title="iVoice Context-Aware API",
description="Extracts people, places, orgs from text and returns nearby POIs or Wikipedia summaries."
)
if __name__ == "__main__":
iface.launch()
|