Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
221 changes: 114 additions & 107 deletions sherlock_project/sherlock.py
Original file line number Diff line number Diff line change
Expand Up @@ -528,12 +528,9 @@ def timeout_check(value):


def handler(signal_received, frame):
"""Exit gracefully without throwing errors

Source: https://www.devdungeon.com/content/python-catch-sigint-ctrl-c
"""
sys.exit(0)

"""Handle SIGINT (Ctrl-C) gracefully and stop threads immediately."""
print("\nAction cancelled by user.", file=sys.stderr)
os._exit(1) # hard exit avoids threading shutdown noise

def main():
parser = ArgumentParser(
Expand Down Expand Up @@ -820,57 +817,90 @@ def main():
all_usernames.append(name)
else:
all_usernames.append(username)
for username in all_usernames:
results = sherlock(
username,
site_data,
query_notify,
dump_response=args.dump_response,
proxy=args.proxy,
timeout=args.timeout,
)

if args.output:
result_file = args.output
elif args.folderoutput:
# The usernames results should be stored in a targeted folder.
# If the folder doesn't exist, create it first
os.makedirs(args.folderoutput, exist_ok=True)
result_file = os.path.join(args.folderoutput, f"{username}.txt")
else:
result_file = f"{username}.txt"

if args.output_txt:
with open(result_file, "w", encoding="utf-8") as file:
exists_counter = 0
for website_name in results:
dictionary = results[website_name]
if dictionary.get("status").status == QueryStatus.CLAIMED:
exists_counter += 1
file.write(dictionary["url_user"] + "\n")
file.write(f"Total Websites Username Detected On : {exists_counter}\n")

if args.csv:
result_file = f"{username}.csv"
if args.folderoutput:
try:
for username in all_usernames:
results = sherlock(
username,
site_data,
query_notify,
dump_response=args.dump_response,
proxy=args.proxy,
timeout=args.timeout,
)

if args.output:
result_file = args.output
elif args.folderoutput:
# The usernames results should be stored in a targeted folder.
# If the folder doesn't exist, create it first
os.makedirs(args.folderoutput, exist_ok=True)
result_file = os.path.join(args.folderoutput, result_file)

with open(result_file, "w", newline="", encoding="utf-8") as csv_report:
writer = csv.writer(csv_report)
writer.writerow(
[
"username",
"name",
"url_main",
"url_user",
"exists",
"http_status",
"response_time_s",
]
)
result_file = os.path.join(args.folderoutput, f"{username}.txt")
else:
result_file = f"{username}.txt"

if args.output_txt:
with open(result_file, "w", encoding="utf-8") as file:
exists_counter = 0
for website_name in results:
dictionary = results[website_name]
if dictionary.get("status").status == QueryStatus.CLAIMED:
exists_counter += 1
file.write(dictionary["url_user"] + "\n")
file.write(f"Total Websites Username Detected On : {exists_counter}\n")

if args.csv:
result_file = f"{username}.csv"
if args.folderoutput:
# The usernames results should be stored in a targeted folder.
os.makedirs(args.folderoutput, exist_ok=True)
result_file = os.path.join(args.folderoutput, result_file)

with open(result_file, "w", newline="", encoding="utf-8") as csv_report:
writer = csv.writer(csv_report)
writer.writerow(
[
"username",
"name",
"url_main",
"url_user",
"exists",
"http_status",
"response_time_s",
]
)
for site in results:
if (
args.print_found
and not args.print_all
and results[site]["status"].status != QueryStatus.CLAIMED
):
continue

response_time_s = results[site]["status"].query_time
if response_time_s is None:
response_time_s = ""
writer.writerow(
[
username,
site,
results[site]["url_main"],
results[site]["url_user"],
str(results[site]["status"].status),
results[site]["http_status"],
response_time_s,
]
)

if args.xlsx:
usernames = []
names = []
url_main = []
url_user = []
exists = []
http_status = []
response_time_s = []

for site in results:
if (
args.print_found
Expand All @@ -879,62 +909,39 @@ def main():
):
continue

response_time_s = results[site]["status"].query_time
if response_time_s is None:
response_time_s = ""
writer.writerow(
[
username,
site,
results[site]["url_main"],
results[site]["url_user"],
str(results[site]["status"].status),
results[site]["http_status"],
response_time_s,
]
)
if args.xlsx:
usernames = []
names = []
url_main = []
url_user = []
exists = []
http_status = []
response_time_s = []

for site in results:
if (
args.print_found
and not args.print_all
and results[site]["status"].status != QueryStatus.CLAIMED
):
continue

if response_time_s is None:
response_time_s.append("")
else:
response_time_s.append(results[site]["status"].query_time)
usernames.append(username)
names.append(site)
url_main.append(results[site]["url_main"])
url_user.append(results[site]["url_user"])
exists.append(str(results[site]["status"].status))
http_status.append(results[site]["http_status"])

DataFrame = pd.DataFrame(
{
"username": usernames,
"name": names,
"url_main": [f'=HYPERLINK(\"{u}\")' for u in url_main],
"url_user": [f'=HYPERLINK(\"{u}\")' for u in url_user],
"exists": exists,
"http_status": http_status,
"response_time_s": response_time_s,
}
)
DataFrame.to_excel(f"{username}.xlsx", sheet_name="sheet1", index=False)
response_time_s.append("")
else:
response_time_s.append(results[site]["status"].query_time)
usernames.append(username)
names.append(site)
url_main.append(results[site]["url_main"])
url_user.append(results[site]["url_user"])
exists.append(str(results[site]["status"].status))
http_status.append(results[site]["http_status"])

DataFrame = pd.DataFrame(
{
"username": usernames,
"name": names,
"url_main": [f'=HYPERLINK(\"{u}\")' for u in url_main],
"url_user": [f'=HYPERLINK(\"{u}\")' for u in url_user],
"exists": exists,
"http_status": http_status,
"response_time_s": response_time_s,
}
)
DataFrame.to_excel(f"{username}.xlsx", sheet_name="sheet1", index=False)

print()
except KeyboardInterrupt:
print("\nAction cancelled by user.", file=sys.stderr)
try:
query_notify.finish()
except Exception:
pass
sys.exit(1)

print()
query_notify.finish()


Expand Down