|
import gradio as gr |
|
import requests |
|
import bs4 |
|
|
|
def link_find(url): |
|
out = [] |
|
source = requests.get(url) |
|
if source.status_code ==200: |
|
|
|
soup = bs4.BeautifulSoup(source.content,'html.parser') |
|
|
|
rawp=(f'RAW TEXT RETURNED: {soup.text}') |
|
cnt=0 |
|
cnt+=len(rawp) |
|
|
|
|
|
q=("a","p","span","content","article") |
|
for p in soup.find_all("a"): |
|
out.append({"LINK TITLE":p.get('title'),"URL":p.get('href'),"STRING":p.string,"TREE":[]}) |
|
else: |
|
pass |
|
return out |
|
|
|
|
|
def sitemap(url): |
|
uri="" |
|
if url != "" and url != None: |
|
link1=link_find(url) |
|
for i,ea in enumerate(link1): |
|
print(ea) |
|
try: |
|
if not ea['URL'].startswith("http"): |
|
|
|
uri1=url.split("//")[0] |
|
uri2=url.split("//")[1] |
|
uri3=uri2.split("/")[0] |
|
uri=f'{uri1}//{uri3}' |
|
print(uri) |
|
out_list=link_find(f"{uri}{ea['URL']}") |
|
link1[i]['TREE']=out_list |
|
except Exception as e: |
|
print (e) |
|
return link1 |
|
with gr.Blocks() as app: |
|
inp=gr.Textbox() |
|
btn=gr.Button() |
|
outp=gr.JSON() |
|
btn.click(sitemap,inp,outp) |
|
app.launch() |