Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,26 +1,49 @@
|
|
1 |
import requests
|
2 |
from bs4 import BeautifulSoup
|
3 |
import streamlit as st
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
4 |
|
5 |
def perform_search(search_query, search_engine):
|
6 |
if search_engine == "Google":
|
7 |
url = f"https://www.google.com/search?q={search_query}"
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
response = requests.get(url)
|
12 |
-
soup = BeautifulSoup(response.text, "html.parser")
|
13 |
-
|
14 |
-
# Extract search results based on the search engine
|
15 |
-
if search_engine == "Google":
|
16 |
search_results = soup.select(".yuRUbf a")
|
17 |
elif search_engine == "Bing":
|
|
|
|
|
|
|
18 |
search_results = soup.select(".b_algo h2 a")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
19 |
|
20 |
results = []
|
21 |
for result in search_results:
|
22 |
title = result.text
|
23 |
link = result["href"]
|
|
|
|
|
24 |
results.append({"title": title, "link": link})
|
25 |
|
26 |
return results
|
@@ -29,11 +52,12 @@ def main():
|
|
29 |
st.set_page_config(page_title="Web Search App", page_icon=":mag:", layout="wide")
|
30 |
|
31 |
st.title("Web Search App")
|
32 |
-
st.write("Search Google and
|
33 |
|
34 |
search_query = st.text_input("Enter your search query")
|
|
|
35 |
|
36 |
-
col1, col2 = st.columns(
|
37 |
|
38 |
with col1:
|
39 |
st.header("Google Search Results")
|
@@ -48,6 +72,20 @@ def main():
|
|
48 |
bing_results = perform_search(search_query, "Bing")
|
49 |
for result in bing_results:
|
50 |
st.write(f"[{result['title']}]({result['link']})")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
51 |
|
52 |
if __name__ == "__main__":
|
53 |
main()
|
|
|
1 |
import requests
|
2 |
from bs4 import BeautifulSoup
|
3 |
import streamlit as st
|
4 |
+
from urllib.parse import quote
|
5 |
+
|
6 |
+
@st.cache_resource
|
7 |
+
def display_glossary_entity(k):
|
8 |
+
search_urls = {
|
9 |
+
"🚀🌌ArXiv": lambda k: f"https://arxiv.org/search/?query={quote(k)}&searchtype=all&source=header",
|
10 |
+
"📖Wiki": lambda k: f"https://en.wikipedia.org/wiki/{quote(k)}",
|
11 |
+
"🔍Google": lambda k: f"https://www.google.com/search?q={quote(k)}",
|
12 |
+
"🔎Bing": lambda k: f"https://www.bing.com/search?q={quote(k)}",
|
13 |
+
"🎥YouTube": lambda k: f"https://www.youtube.com/results?search_query={quote(k)}",
|
14 |
+
"🐦Twitter": lambda k: f"https://twitter.com/search?q={quote(k)}",
|
15 |
+
}
|
16 |
+
links_md = ' '.join([f"[{emoji}]({url(k)})" for emoji, url in search_urls.items()])
|
17 |
+
st.markdown(f"**{k}** <small>{links_md}</small>", unsafe_allow_html=True)
|
18 |
|
19 |
def perform_search(search_query, search_engine):
|
20 |
if search_engine == "Google":
|
21 |
url = f"https://www.google.com/search?q={search_query}"
|
22 |
+
response = requests.get(url)
|
23 |
+
soup = BeautifulSoup(response.text, "html.parser")
|
|
|
|
|
|
|
|
|
|
|
|
|
24 |
search_results = soup.select(".yuRUbf a")
|
25 |
elif search_engine == "Bing":
|
26 |
+
url = f"https://www.bing.com/search?q={search_query}"
|
27 |
+
response = requests.get(url)
|
28 |
+
soup = BeautifulSoup(response.text, "html.parser")
|
29 |
search_results = soup.select(".b_algo h2 a")
|
30 |
+
elif search_engine == "Wikipedia":
|
31 |
+
url = f"https://en.wikipedia.org/w/index.php?search={search_query}"
|
32 |
+
response = requests.get(url)
|
33 |
+
soup = BeautifulSoup(response.text, "html.parser")
|
34 |
+
search_results = soup.select(".mw-search-result-heading a")
|
35 |
+
elif search_engine == "Twitter":
|
36 |
+
url = f"https://twitter.com/search?q={search_query}"
|
37 |
+
response = requests.get(url)
|
38 |
+
soup = BeautifulSoup(response.text, "html.parser")
|
39 |
+
search_results = soup.select(".css-4rbku5")
|
40 |
|
41 |
results = []
|
42 |
for result in search_results:
|
43 |
title = result.text
|
44 |
link = result["href"]
|
45 |
+
if search_engine == "Twitter":
|
46 |
+
link = f"https://twitter.com{link}"
|
47 |
results.append({"title": title, "link": link})
|
48 |
|
49 |
return results
|
|
|
52 |
st.set_page_config(page_title="Web Search App", page_icon=":mag:", layout="wide")
|
53 |
|
54 |
st.title("Web Search App")
|
55 |
+
st.write("Search Google, Bing, Wikipedia, and Twitter simultaneously!")
|
56 |
|
57 |
search_query = st.text_input("Enter your search query")
|
58 |
+
display_glossary_entity(search_query)
|
59 |
|
60 |
+
col1, col2, col3, col4 = st.columns(4)
|
61 |
|
62 |
with col1:
|
63 |
st.header("Google Search Results")
|
|
|
72 |
bing_results = perform_search(search_query, "Bing")
|
73 |
for result in bing_results:
|
74 |
st.write(f"[{result['title']}]({result['link']})")
|
75 |
+
|
76 |
+
with col3:
|
77 |
+
st.header("Wikipedia Search Results")
|
78 |
+
if st.button("Search Wikipedia"):
|
79 |
+
wikipedia_results = perform_search(search_query, "Wikipedia")
|
80 |
+
for result in wikipedia_results:
|
81 |
+
st.write(f"[{result['title']}]({result['link']})")
|
82 |
+
|
83 |
+
with col4:
|
84 |
+
st.header("Twitter Search Results")
|
85 |
+
if st.button("Search Twitter"):
|
86 |
+
twitter_results = perform_search(search_query, "Twitter")
|
87 |
+
for result in twitter_results:
|
88 |
+
st.write(f"[{result['title']}]({result['link']})")
|
89 |
|
90 |
if __name__ == "__main__":
|
91 |
main()
|