apexherbert200's picture
Added test1
358f05c
raw
history blame
2.77 kB
from fastapi import Query
@app.get("/search_leads")
async def search_leads(
query: str = Query(..., description="Search term for business leads")
):
logger.info(f"Searching Google Maps for: {query}")
async with async_playwright() as p:
browser = await p.chromium.launch(headless=True)
page = await browser.new_page()
try:
# Go to Google Maps
await page.goto("https://www.google.com/maps", wait_until="networkidle")
# Accept cookies if present (optional, depends on region)
try:
await page.click('button[aria-label="Accept all"]', timeout=3000)
except:
pass
# Type the query in the search box and press Enter
await page.fill('input#searchboxinput', query)
await page.click('button#searchbox-searchbutton')
# Wait for search results to load - selector for listings container
await page.wait_for_selector('div[role="article"]', timeout=10000)
# Scroll results container to load more items (optional)
# For now, scrape the visible ones
# Extract data from listings
results = await page.evaluate("""
() => {
const listings = [];
const elements = document.querySelectorAll('div[role="article"]');
elements.forEach(el => {
const nameEl = el.querySelector('h3 span');
const name = nameEl ? nameEl.innerText : null;
const addressEl = el.querySelector('[data-tooltip="Address"]');
const address = addressEl ? addressEl.innerText : null;
const phoneEl = el.querySelector('button[data-tooltip="Copy phone number"]');
const phone = phoneEl ? phoneEl.getAttribute('aria-label')?.replace('Copy phone number ', '') : null;
const websiteEl = el.querySelector('a[aria-label*="Website"]');
const website = websiteEl ? websiteEl.href : null;
listings.push({name, address, phone, website});
});
return listings;
}
""")
await browser.close()
# Filter out empty entries
filtered = [r for r in results if r['name']]
return {"query": query, "results_count": len(filtered), "results": filtered}
except Exception as e:
await browser.close()
logger.error(f"Error during Google Maps search scraping: {str(e)}")
raise HTTPException(status_code=500, detail=f"Search scraping error: {str(e)}")