Spaces:
Sleeping
Sleeping
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,94 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import requests
|
2 |
+
from bs4 import BeautifulSoup
|
3 |
+
import streamlit as st
|
4 |
+
from urllib.parse import quote
|
5 |
+
|
6 |
+
@st.cache_resource
|
7 |
+
def display_glossary_entity(k):
|
8 |
+
search_urls = {
|
9 |
+
"🚀🌌ArXiv": lambda k: f"/?q={quote(k)}",
|
10 |
+
"🃏Analyst": lambda k: f"/?q={quote(k)}-{quote(PromptPrefix)}",
|
11 |
+
"📚PyCoder": lambda k: f"/?q={quote(k)}-{quote(PromptPrefix2)}",
|
12 |
+
"🔬JSCoder": lambda k: f"/?q={quote(k)}-{quote(PromptPrefix3)}",
|
13 |
+
"📖Wiki": lambda k: f"https://en.wikipedia.org/wiki/{quote(k)}",
|
14 |
+
"🔍Google": lambda k: f"https://www.google.com/search?q={quote(k)}",
|
15 |
+
"🔎Bing": lambda k: f"https://www.bing.com/search?q={quote(k)}",
|
16 |
+
"🎥YouTube": lambda k: f"https://www.youtube.com/results?search_query={quote(k)}",
|
17 |
+
"🐦Twitter": lambda k: f"https://twitter.com/search?q={quote(k)}",
|
18 |
+
}
|
19 |
+
links_md = ' '.join([f"[{emoji}]({url(k)})" for emoji, url in search_urls.items()])
|
20 |
+
st.markdown(f"**{k}** <small>{links_md}</small>", unsafe_allow_html=True)
|
21 |
+
|
22 |
+
def perform_search(search_query, search_engine):
|
23 |
+
if search_engine == "Google":
|
24 |
+
url = f"https://www.google.com/search?q={search_query}"
|
25 |
+
response = requests.get(url)
|
26 |
+
soup = BeautifulSoup(response.text, "html.parser")
|
27 |
+
search_results = soup.select(".yuRUbf a")
|
28 |
+
elif search_engine == "Bing":
|
29 |
+
url = f"https://www.bing.com/search?q={search_query}"
|
30 |
+
response = requests.get(url)
|
31 |
+
soup = BeautifulSoup(response.text, "html.parser")
|
32 |
+
search_results = soup.select(".b_algo h2 a")
|
33 |
+
elif search_engine == "Wikipedia":
|
34 |
+
url = f"https://en.wikipedia.org/w/index.php?search={search_query}"
|
35 |
+
response = requests.get(url)
|
36 |
+
soup = BeautifulSoup(response.text, "html.parser")
|
37 |
+
search_results = soup.select(".mw-search-result-heading a")
|
38 |
+
elif search_engine == "Twitter":
|
39 |
+
url = f"https://twitter.com/search?q={search_query}"
|
40 |
+
response = requests.get(url)
|
41 |
+
soup = BeautifulSoup(response.text, "html.parser")
|
42 |
+
search_results = soup.select(".css-4rbku5")
|
43 |
+
|
44 |
+
results = []
|
45 |
+
for result in search_results:
|
46 |
+
title = result.text
|
47 |
+
link = result["href"]
|
48 |
+
if search_engine == "Twitter":
|
49 |
+
link = f"https://twitter.com{link}"
|
50 |
+
results.append({"title": title, "link": link})
|
51 |
+
|
52 |
+
return results
|
53 |
+
|
54 |
+
def main():
|
55 |
+
st.set_page_config(page_title="Web Search App", page_icon=":mag:", layout="wide")
|
56 |
+
|
57 |
+
st.title("Web Search App")
|
58 |
+
st.write("Search Google, Bing, Wikipedia, and Twitter simultaneously!")
|
59 |
+
|
60 |
+
search_query = st.text_input("Enter your search query")
|
61 |
+
display_glossary_entity(search_query)
|
62 |
+
|
63 |
+
col1, col2, col3, col4 = st.columns(4)
|
64 |
+
|
65 |
+
with col1:
|
66 |
+
st.header("Google Search Results")
|
67 |
+
if st.button("Search Google"):
|
68 |
+
google_results = perform_search(search_query, "Google")
|
69 |
+
for result in google_results:
|
70 |
+
st.write(f"[{result['title']}]({result['link']})")
|
71 |
+
|
72 |
+
with col2:
|
73 |
+
st.header("Bing Search Results")
|
74 |
+
if st.button("Search Bing"):
|
75 |
+
bing_results = perform_search(search_query, "Bing")
|
76 |
+
for result in bing_results:
|
77 |
+
st.write(f"[{result['title']}]({result['link']})")
|
78 |
+
|
79 |
+
with col3:
|
80 |
+
st.header("Wikipedia Search Results")
|
81 |
+
if st.button("Search Wikipedia"):
|
82 |
+
wikipedia_results = perform_search(search_query, "Wikipedia")
|
83 |
+
for result in wikipedia_results:
|
84 |
+
st.write(f"[{result['title']}]({result['link']})")
|
85 |
+
|
86 |
+
with col4:
|
87 |
+
st.header("Twitter Search Results")
|
88 |
+
if st.button("Search Twitter"):
|
89 |
+
twitter_results = perform_search(search_query, "Twitter")
|
90 |
+
for result in twitter_results:
|
91 |
+
st.write(f"[{result['title']}]({result['link']})")
|
92 |
+
|
93 |
+
if __name__ == "__main__":
|
94 |
+
main()
|