Skip to content

Commit

Permalink
Update open_google_results.py (#7085)
Browse files Browse the repository at this point in the history
* update crawl_google_results.py

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* Update and rename crawl_google_results.py to open_google_results.py

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* Create crawl_google_results.py

* Update web_programming/open_google_results.py

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* Update open_google_results.py

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Co-authored-by: Christian Clauss <cclauss@me.com>
  • Loading branch information
3 people authored Oct 13, 2022
1 parent d5a9f64 commit f176786
Showing 1 changed file with 42 additions and 0 deletions.
42 changes: 42 additions & 0 deletions web_programming/open_google_results.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
import webbrowser
from sys import argv
from urllib.parse import quote, parse_qs
from fake_useragent import UserAgent

import requests
from bs4 import BeautifulSoup

if __name__ == "__main__":
if len(argv) > 1:
query = "%20".join(argv[1:])
else:
query = quote(str(input("Search: ")))

print("Googling.....")

url = f"https://www.google.com/search?q={query}&num=100"

res = requests.get(
url,
headers={
"User-Agent": str(UserAgent().random)
},
)

try:
link = (
BeautifulSoup(res.text, "html.parser")
.find("div", attrs={"class": "yuRUbf"})
.find("a")
.get("href")
)

except AttributeError:
link = parse_qs(
BeautifulSoup(res.text, "html.parser")
.find("div", attrs={"class": "kCrYT"})
.find("a")
.get("href")
)["url"][0]

webbrowser.open(link)

0 comments on commit f176786

Please sign in to comment.