From 2d1938ec308f07073c69b996b14270b2109f6871 Mon Sep 17 00:00:00 2001 From: Xavier Mendez Date: Thu, 23 Oct 2014 00:07:21 +0100 Subject: [PATCH] Importing wfuzz 2.1 --- dictio.py | 58 -- encoders.py | 358 --------- externals/__init__.py | 0 externals/moduleman/__init__.py | 0 externals/moduleman/loader.py | 157 ++++ externals/moduleman/modulefilter.py | 113 +++ externals/moduleman/plugin.py | 32 + externals/moduleman/registrant.py | 118 +++ externals/reqresp/Request.py | 424 +++++++++++ externals/reqresp/Response.py | 156 ++++ .../reqresp/TextParser.py | 21 +- externals/reqresp/Variables.py | 124 +++ externals/reqresp/__init__.py | 2 + externals/reqresp/cache.py | 43 ++ externals/reqresp/exceptions.py | 7 + externals/settings/__init__.py | 0 externals/settings/settings.py | 90 +++ framework/__init__.py | 0 framework/core/__init__.py | 0 framework/core/facade.py | 94 +++ framework/core/myexception.py | 8 + framework/fuzzer/Fuzzer.py | 213 ++++++ framework/fuzzer/__init__.py | 0 framework/fuzzer/base.py | 246 ++++++ framework/fuzzer/dictio.py | 78 ++ framework/fuzzer/filter.py | 178 +++++ framework/fuzzer/fuzzobjects.py | 573 ++++++++++++++ framework/fuzzer/myhttp.py | 176 +++++ framework/plugins/__init__.py | 0 framework/plugins/api.py | 127 ++++ framework/plugins/jobs.py | 136 ++++ framework/plugins/pluginobjects.py | 26 + framework/ui/__init__.py | 0 framework/ui/console/__init__.py | 0 framework/ui/console/clparser.py | 358 +++++++++ framework/ui/console/common.py | 103 +++ framework/ui/console/controller.py | 86 +++ framework/ui/console/getch.py | 81 ++ framework/ui/console/keystroke.py | 36 + framework/ui/console/output.py | 141 ++++ framework/utils/__init__.py | 0 framework/utils/dispatcher.py | 23 + framework/utils/myqueue.py | 155 ++++ iterations.py | 37 - patterns/__init__.py | 0 patterns/singleton.py | 33 + payloads.py | 288 ------- plugins/__init__.py | 0 plugins/encoders.py | 406 ++++++++++ plugins/iterations.py | 107 +++ plugins/payloads.py | 397 ++++++++++ plugins/printers.py | 301 ++++++++ plugins/scripts/__init__.py | 0 plugins/scripts/cookies.py | 23 + plugins/scripts/grep.py | 25 + plugins/scripts/headers.py | 39 + plugins/scripts/links.py | 57 ++ plugins/scripts/listing.py | 39 + plugins/scripts/robots.py | 32 + plugins/scripts/screenshot.py | 21 + plugins/scripts/sitemap.py | 33 + plugins/scripts/svn_extractor.py | 109 +++ printers.py | 113 --- reqresp.py | 713 ------------------ wfuzz_bash_completion | 49 -- wordlist/general/http_methods.txt | 32 + 66 files changed, 5778 insertions(+), 1617 deletions(-) delete mode 100644 dictio.py delete mode 100644 encoders.py create mode 100644 externals/__init__.py create mode 100644 externals/moduleman/__init__.py create mode 100644 externals/moduleman/loader.py create mode 100644 externals/moduleman/modulefilter.py create mode 100644 externals/moduleman/plugin.py create mode 100644 externals/moduleman/registrant.py create mode 100644 externals/reqresp/Request.py create mode 100644 externals/reqresp/Response.py rename TextParser.py => externals/reqresp/TextParser.py (91%) mode change 100644 => 100755 create mode 100644 externals/reqresp/Variables.py create mode 100644 externals/reqresp/__init__.py create mode 100644 externals/reqresp/cache.py create mode 100644 externals/reqresp/exceptions.py create mode 100644 externals/settings/__init__.py create mode 100644 externals/settings/settings.py create mode 100644 framework/__init__.py create mode 100644 framework/core/__init__.py create mode 100644 framework/core/facade.py create mode 100644 framework/core/myexception.py create mode 100644 framework/fuzzer/Fuzzer.py create mode 100644 framework/fuzzer/__init__.py create mode 100644 framework/fuzzer/base.py create mode 100644 framework/fuzzer/dictio.py create mode 100644 framework/fuzzer/filter.py create mode 100644 framework/fuzzer/fuzzobjects.py create mode 100644 framework/fuzzer/myhttp.py create mode 100644 framework/plugins/__init__.py create mode 100644 framework/plugins/api.py create mode 100644 framework/plugins/jobs.py create mode 100644 framework/plugins/pluginobjects.py create mode 100644 framework/ui/__init__.py create mode 100644 framework/ui/console/__init__.py create mode 100644 framework/ui/console/clparser.py create mode 100644 framework/ui/console/common.py create mode 100644 framework/ui/console/controller.py create mode 100644 framework/ui/console/getch.py create mode 100644 framework/ui/console/keystroke.py create mode 100644 framework/ui/console/output.py create mode 100644 framework/utils/__init__.py create mode 100644 framework/utils/dispatcher.py create mode 100644 framework/utils/myqueue.py delete mode 100644 iterations.py create mode 100644 patterns/__init__.py create mode 100644 patterns/singleton.py delete mode 100644 payloads.py create mode 100644 plugins/__init__.py create mode 100644 plugins/encoders.py create mode 100644 plugins/iterations.py create mode 100644 plugins/payloads.py create mode 100644 plugins/printers.py create mode 100644 plugins/scripts/__init__.py create mode 100644 plugins/scripts/cookies.py create mode 100644 plugins/scripts/grep.py create mode 100644 plugins/scripts/headers.py create mode 100644 plugins/scripts/links.py create mode 100644 plugins/scripts/listing.py create mode 100644 plugins/scripts/robots.py create mode 100644 plugins/scripts/screenshot.py create mode 100644 plugins/scripts/sitemap.py create mode 100644 plugins/scripts/svn_extractor.py delete mode 100644 printers.py delete mode 100644 reqresp.py delete mode 100644 wfuzz_bash_completion create mode 100644 wordlist/general/http_methods.txt diff --git a/dictio.py b/dictio.py deleted file mode 100644 index ad2a0f95..00000000 --- a/dictio.py +++ /dev/null @@ -1,58 +0,0 @@ -#!/usr/bin/python - -#Covered by GPL V2.0 - -from encoders import * -from payloads import * - -# generate_dictio evolution -class dictionary: - def __init__(self,dicc=None): - if dicc: - self.__payload=dicc.getpayload() - self.__encoder=dicc.getencoder() - else: - self.__payload=payload() - self.__encoder = [lambda x: encoder().encode(x)] - self.restart() - - def count (self): - return self.__payload.count() * len(self.__encoder) - - def setpayload(self,payl): - self.__payload = payl - self.restart() - - def setencoder(self,encd): - self.__encoder=encd - self.generator = self.gen() - - def getpayload (self): - return self.__payload - - def getencoder (self): - return self.__encoder - - def generate_all(self): - dicc=[] - for i in self.__payload: - dicc.append(self.__encoder.encode(i)) - return dicc - - def __iter__(self): - self.restart() - return self - - def gen(self): - while 1: - pl=self.iter.next() - for encode in self.__encoder: - yield encode(pl) - - def next(self): - return self.generator.next() - - def restart(self): - self.iter=self.__payload.__iter__() - self.generator = self.gen() - diff --git a/encoders.py b/encoders.py deleted file mode 100644 index 6de7ccd9..00000000 --- a/encoders.py +++ /dev/null @@ -1,358 +0,0 @@ -import urllib -import base64 -import re -import binascii -import random -import hashlib - - -# SUPERCLASS - -class encoder: - def __init__(self): - pass - - def encode (self,string): - return string - - -####################################################### -###################################################### -######## Inheritances -####################################################### -###################################################### - -class encoder_none: - text="none" - def __init__(self): - pass - - def encode (self,string): - return string - -class encoder_urlencode (encoder): - text="urlencode" - def __init__(self): - encoder.__init__(self) - - def encode(self,string): - return urllib.quote(string) - - def decode(self,string): - try: - res=urllib.unquote(clear) - return res - except: - return 1 - -class encoder_double_urlencode (encoder): - text="double urlencode" - def __init__(self): - encoder.__init__(self) - - def encode(self,string): - return urllib.quote(urllib.quote(string)) - -class encoder_base64 (encoder): - text="base64" - def __init__(self): - encoder.__init__(self) - - def encode(self,string): - return base64.standard_b64encode(string) - - def decode(self,string): - import base64 - try: - res=base64.decodestring(string) - return res - except: - return 1 - -class encoder_uri_double_hex (encoder): - text="uri double hexadecimal" - def __init__(self): - encoder.__init__(self) - - def encode(self,string): - strt = "" - con = "%%25%02x" - s=re.compile(r"/|;|=|:|&|@|\\|\?") - for c in string: - if s.search(c): - strt += c - continue - strt += con % ord(c) - return strt - -class encoder_uri_hex (encoder): - text="uri hexadecimal" - def __init__(self): - encoder.__init__(self) - - def encode(self,string): - strt = "" - con = "%%%02x" - s=re.compile(r"/|;|=|:|&|@|\\|\?") - for c in string: - if s.search(c): - strt += c - continue - strt += con % ord(c) - return strt - - -class encoder_random_upper (encoder): - text="random Uppercase" - def __init__(self): - encoder.__init__(self) - - def encode(self,string): - strt = "" - for c in string: - x = int(random.uniform(0,10)) - x = x % 2 - if x == 1: - strt += c.upper() - else: - strt += c - return strt - - -class encoder_second_nibble_hex (encoder): - text="second nibble Hexa" - def __init__(self): - encoder.__init__(self) - - def encode(self,string): - strt = "" - con = "%%%02x" - s=re.compile(r"/|;|=|:|&|@|\\|\?") - for c in string: - if s.search(c): - strt += c - continue - temp = hex(ord(c))[2:] - strt += "%%%s%%%02x" % (str(temp[:1]), ord(temp[1:])) - return strt - -class encoder_first_nibble_hex (encoder): - text="first nibble Hexa" - def __init__(self): - encoder.__init__(self) - - def encode(self,string): - strt = "" - con = "%%%02x" - s=re.compile(r"/|;|=|:|&|@|\\|\?") - for c in string: - if s.search(c): - strt += c - continue - temp = hex(ord(c))[2:] - strt += "%%%%%02x%s" % (ord(temp[:1]), str(temp[1:])) - return strt - -class encoder_doble_nibble_hex (encoder): - text="double nibble Hexa" - def __init__(self): - encoder.__init__(self) - - def encode(self,string): - strt = "" - fin = "" - con = "%%%02x" - s=re.compile(r"/|;|=|:|&|@|\\|\?") - enc=encoder_uri_hex() - strt = enc.encode(string) - for c in strt: - if not c == "%": - if s.search(c): - fin += c - continue - fin += con % ord(c) - else: - fin += c - return fin - -class encoder_sha1 (encoder): - text="sha1" - def __init__(self): - encoder.__init__(self) - - def encode(self,string): - s=hashlib.sha1() - s.update(string) - res =s.hexdigest() - return res - -class encoder_md5 (encoder): - text="md5" - def __init__(self): - encoder.__init__(self) - - def encode(self,string): - m=hashlib.new('md5') - m.update(string) - res = m.hexdigest() - return res - -class encoder_binascii (encoder): - text="binary Ascii" - def __init__(self): - encoder.__init__(self) - - def encode(self,string): - res = binascii.hexlify(string) - return res - - def decode(self,string): - import binascii - try: - res = binascii.unhexlify(clear) - return res - except: - return 1 - -class encoder_html (encoder): - text="html encoder" - def __init__(self): - encoder.__init__(self) - - def encode(self,string): - res=string - res=res.replace("<","<") - res=res.replace(">",">") - res=res.replace("\"",""") - res=res.replace("'","'") - #res=res.replace("&","&") - return res - -class encoder_html_decimal (encoder): - text="html encoder decimal" - def __init__(self): - encoder.__init__(self) - - def encode(self,string): - new="" - for x in string: - new+="&#"+str(ord(x))+";" - return new - -class encoder_html_hexadecimal (encoder): - text="html encoder Hexa" - def __init__(self): - encoder.__init__(self) - - def encode(self,string): - new="" - for x in string: - val="%02x" % ord(x) - new+="&#x"+str(val)+";" - return new - -class encoder_utf8_binary (encoder): - text="utf8 binary" - def __init__(self): - encoder.__init__(self) - - def encode(self,string): - new="" - for x in string: - val="%02x" % ord(x) - new+="\\x"+str(val) - return new - -class encoder_utf8 (encoder): - text="utf8" - def __init__(self): - encoder.__init__(self) - - def encode(self,string): - new="" - for x in string: - val="%02x" % ord(x) - if len(val)==2: - new+="\\u00"+str(val) - else: - new+="\\u"+str(val) - return new - -class encoder_uri_unicode (encoder): - text="uri unicode" - def __init__(self): - encoder.__init__(self) - - def encode(self,string): - new="" - for x in string: - val="%02x" % ord(x) - if len(val)==2: - new+="%u00"+str(val) - else: - new+="%u"+str(val) - return new - -class encoder_mysqlchar (encoder): - text="mysql char" - def __init__(self): - encoder.__init__(self) - - def encode(self,string): - new="CHAR(" - for x in string: - val=str(ord(x)) - new+=str(val)+"," - new=new.strip(",") - new+=")" - return new - - def decode(self,string): - temp=string.strip("CHAR").strip("(").strip(")").split(",") - new="" - for x in temp: - new+=chr(int(x)) - return new - -class encoder_mssqlchar(encoder): - text="mssql Char" - def __init__(self): - encoder.__init__(self) - - def encode(self,string): - new="" - for x in string: - val=str(ord(x)) - new+="CHAR("+str(val)+")+" - new=new.strip("+") - return new - - def decode(self,string): - new="" - temp=string.split("+") - for x in temp: - x=x.strip("CHAR").strip(")").strip("(") - new+= chr(int(x)) - return new - -class encoder_oraclechar(encoder): - text="oracle Char" - def __init__(self): - encoder.__init__(self) - def encode(self,string): - new="" - for x in string: - val=str(ord(x)) - new+="chr("+val+")||" - new=new.strip("||") - return new - - def decode(self,string): - new="" - temp=string.split("||") - for x in temp: - x=x.strip("chr").strip(")").strip("(") - new+= chr(int(x)) - return new - - diff --git a/externals/__init__.py b/externals/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/externals/moduleman/__init__.py b/externals/moduleman/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/externals/moduleman/loader.py b/externals/moduleman/loader.py new file mode 100644 index 00000000..ed7b9620 --- /dev/null +++ b/externals/moduleman/loader.py @@ -0,0 +1,157 @@ +import inspect +import logging +import imp +import os.path + +class IModuleLoader: + def __init__(self, **params): + self.set_params(**params) + + def set_params(self, **params): + raise NotImplemented + + def load(self, registrant): + raise NotImplemented + +class FileLoader(IModuleLoader): + def __init__(self, **params): + IModuleLoader.__init__(self, **params) + self.__logger = logging.getLogger("libraries.FileLoader") + + def set_params(self, **params): + if not params.has_key("base_path"): + return + elif not params.has_key("filename"): + return + + self.filename = params["filename"] + self.base_path = params["base_path"] + if self.base_path.endswith('/'): self.base_path = self.base_path[:-1] + + def load(self, registrant): + self.module_registrant = registrant + + self._load_py_from_file(os.path.join(self.base_path, self.filename)) + + def _build_id(self, filename, objname): + filepath, filename = os.path.split(filename) + + fn = os.path.splitext(filename)[0] + identifier = filepath.split(self.base_path)[1][1:] + '/' + fn + '/' + objname + if identifier.startswith('/'): identifier = identifier[1:] + + return identifier + + def _load_py_from_file(self, filename): + """ + Opens "filename", inspects it and calls the registrant + """ + self.__logger.debug('__load_py_from_file. START, file=%s' % (filename,)) + + dirname, filename = os.path.split(filename) + fn = os.path.splitext(filename)[0] + exten_file = None + module = None + + try: + exten_file, filename, description = imp.find_module(fn, [dirname]) + module = imp.load_module(fn, exten_file, filename, description) + except ImportError, msg: + self.__logger.critical('__load_py_from_file. Exception, msg=%s' % (msg,)) + raise msg + except SyntaxError, msg: + # incorrect python syntax in file + self.__logger.critical('__load_py_from_file. Exception, msg=%s' % (msg,)) + raise msg + finally: + if exten_file: exten_file.close() + + + for objname in dir(module): + obj = getattr(module, objname) + self.__logger.debug('__load_py_from_file. inspecting=%s' % (objname,)) + if inspect.isclass(obj): + if '__PLUGIN_MODULEMAN_MARK' in dir(obj): + if self.module_registrant: + self.module_registrant.register(self._build_id(filename, objname), obj) + + self.__logger.debug('__load_py_from_file. END, loaded file=%s' % (filename,)) + +class DirLoader(FileLoader): + def __init__(self, **params): + FileLoader.__init__(self, **params) + self.__logger = logging.getLogger("libraries.DirLoader") + + def set_params(self, **params): + if not params.has_key("base_dir"): + return + elif not params.has_key("base_path"): + return + + self.base_dir = params["base_dir"] + self.base_path = params["base_path"] + if self.base_path.endswith('/'): self.base_path = self.base_path[:-1] + + def load(self, registrant): + self.module_registrant = registrant + self.structure = self.__load_all(self.base_dir) + + def __load_all(self, dir_name): + """ + loads all plugins and creates a loaded list of scripts from directory plugins like: + [ ( category,[script1, script2,...] ), (category2,[script1, (subcategory,[script1,script2]),...]) ] + """ + walked = [] + + current = os.path.join(self.base_path, dir_name) + if os.path.isdir(current): + l = self.__walk_dir_tree(current) + walked.append((current, l)) + if self.module_registrant: + self.module_registrant.end_loading() + + return walked + + def __walk_dir_tree(self, dirname): + l=[] + + self.__logger.debug('__walk_dir_tree. START dir=%s', dirname) + + for f in os.listdir(dirname): + current = os.path.join(dirname, f) + if os.path.isfile(current) and f.endswith("py"): + if self.module_registrant: + self._load_py_from_file(current) + + l.append(current) + elif os.path.isdir(current): + ret = self.__walk_dir_tree(current) + if ret: l.append( (f, ret) ) + + return l + +if __name__ == '__main__': + + from registrant import BRegistrant + + logging.basicConfig(level = logging.DEBUG, + format = '%(asctime)s %(name)-12s %(levelname)-8s %(message)s', + datefmt = '%m-%d %H:%M', + filename = "/tmp/log", + filemode = 'a') + + fl = FileLoader(**{"filename": "1.py", "base_path": "." }) + + br = BRegistrant(fl) + + print br.get_plugins_ext() + print br.get_plugins() + + print br.get_plugins_ext("default") + print br.get_plugins_ext("aggressive") + + print br.get_plugins_names() + print br.get_plugins_names("default") + print br.get_plugins_names("aggressive") + print br.get_plugins_ids("aggressive") + diff --git a/externals/moduleman/modulefilter.py b/externals/moduleman/modulefilter.py new file mode 100644 index 00000000..bd5662ae --- /dev/null +++ b/externals/moduleman/modulefilter.py @@ -0,0 +1,113 @@ +# mimicking nmap script filter + + +#nmap --script "http-*" +# Loads all scripts whose name starts with http-, such as http-auth and http-open-proxy. The argument to --script had to be in quotes to protect the wildcard from the shell. +# not valid for categories! +# +#More complicated script selection can be done using the and, or, and not operators to build Boolean expressions. The operators have the same precedence[12] as in Lua: not is the +#highest, followed by and and then or. You can alter precedence by using parentheses. Because expressions contain space characters it is necessary to quote them. +# +#nmap --script "not intrusive" +# Loads every script except for those in the intrusive category. +# +#nmap --script "default or safe" +# This is functionally equivalent to nmap --script "default,safe". It loads all scripts that are in the default category or the safe category or both. +# +#nmap --script "default and safe" +# Loads those scripts that are in both the default and safe categories. +# +#nmap --script "(default or safe or intrusive) and not http-*" +# Loads scripts in the default, safe, or intrusive categories, except for those whose names start with http-. + + +from types import ListType +from pyparsing import Word, Group, oneOf, Optional, Suppress, ZeroOrMore, Literal, alphas, alphanums + +class IFilter: + def is_visible(self, plugin, filter_string): + raise NotImplemented + +class Filter(IFilter): + def __init__(self): + category = Word( alphas + "_-*", alphanums + "_-*" ) + operator = oneOf("and or ,") + neg_operator = "not" + elementRef = category + definition = elementRef + ZeroOrMore( operator + elementRef) + nestedformula = Group(Suppress(Optional(Literal("("))) + definition + Suppress(Optional(Literal(")")))) + neg_nestedformula = Optional(neg_operator) + nestedformula + self.finalformula = neg_nestedformula + ZeroOrMore( operator + neg_nestedformula) + + elementRef.setParseAction(self.__compute_element) + neg_nestedformula.setParseAction(self.__compute_neg_formula) + nestedformula.setParseAction(self.__compute_formula) + self.finalformula.setParseAction(self.__myreduce) + + def __compute_neg_formula(self, tokens): + if len(tokens) > 1 and tokens[0] == 'not': + return not tokens[1] + else: + return tokens[0] + + def __compute_element(self, tokens): + item = tokens[0] + wildc_index = item.find("*") + + if wildc_index > 0: + return self.plugin.name.startswith(item[:wildc_index]) + else: + if type(self.plugin.category) == ListType: + return (item in self.plugin.category or self.plugin.name == item) + else: + return (self.plugin.category == item or self.plugin.name == item) + + def __myreduce(self, elements): + first = elements[0] + for i in range(1, len(elements), 2): + if elements[i] == "and": + first = (first and elements[i+1]) + elif elements[i] == "or" or elements[i] == ",": + first = (first or elements[i+1]) + + return first + + def __compute_formula(self, tokens): + return self.__myreduce(tokens[0]) + + def is_visible(self, plugin, filter_string): + self.plugin = plugin + return self.finalformula.parseString(filter_string)[0] + +if __name__ == "__main__": + tests = [] + tests.append("not intrusive") + tests.append("intrusive") + tests.append("safe") + tests.append("not safe") + tests.append("not http-adas") + tests.append("default or safe") + tests.append("default,safe") + tests.append("default and safe") + tests.append("not default or not safe") + tests.append("(default or safe or intrusive) and not http") + tests.append("not (default or safe or intrusive) and not http") + tests.append("not (default or safe or intrusive) and safe") + tests.append("not (default or safe or intrusive) or safe") + tests.append("not (default or intrusive) and safe") + tests.append("http-*") + tests.append("http-test") + tests.append("not http-test") + tests.append("not safe") + + class t: + def category(self): return "safe" + def name(self): return "http-test" + + res = t() + + print "cat = %s, name = %s\n\n" % (res.category(), res.name()) + for i in tests: + f = Filter() + print "%s := %s" % (str(i), f.is_visible(res, i)) + diff --git a/externals/moduleman/plugin.py b/externals/moduleman/plugin.py new file mode 100644 index 00000000..f1490290 --- /dev/null +++ b/externals/moduleman/plugin.py @@ -0,0 +1,32 @@ +def moduleman_plugin(*args): + method_args = ["name", "description", "priority", "category"] + + def inner_decorator(cls): + for method in method_args: + if (not (method in dir(cls))): + raise Exception("Required method %s not implemented" % method) + cls.__PLUGIN_MODULEMAN_MARK = "Plugin mark" + + return cls + + if not callable(args[0]): + method_args += args + return inner_decorator + + return inner_decorator(args[0]) + +if __name__ == '__main__': + @moduleman_plugin + class test: + def __init__(self): + print "test init" + + def description(self): + print "ii" + + def name(self): + print "ii" + + a = test() + a.description() + print a.__PLUGIN_MODULEMAN_MARK diff --git a/externals/moduleman/registrant.py b/externals/moduleman/registrant.py new file mode 100644 index 00000000..b66be261 --- /dev/null +++ b/externals/moduleman/registrant.py @@ -0,0 +1,118 @@ +import operator +from modulefilter import Filter +from collections import defaultdict +from threading import Lock + + +class IRegistrant(): + def __init__(self, loader, plg_filter): + self.plg_filter = plg_filter + self.loader = loader + + self.start_loading() + self.load() + self.end_loading() + + def register(self, identifier, module): + raise NotImplemented + + def start_loading(self): + raise NotImplemented + + def load(self): + raise NotImplemented + + def end_loading(self): + raise NotImplemented + +class KnowledgeBase: + def __init__(self): + self.__data = defaultdict(list) + self.mutex = Lock() + + def get(self, key): + with self.mutex: + return self.__data[key] + + def add(self, key, value): + with self.mutex: + self.__data[key].append(value) + + def has(self, key): + with self.mutex: + return key in self.__data + +class BRegistrant(IRegistrant): + def __init__(self, loader, plg_filter = Filter()): + self.__plugins = {} + self.__active_plugins = {} + self.kbase = KnowledgeBase() + + IRegistrant.__init__(self, loader, plg_filter) + + def register(self, identifier, module): + self.__plugins[identifier] = self._modify_instance(module) + self.__active_plugins[identifier] = True + + def load(self): + self.loader.load(self) + + def start_loading(self): + pass + + def end_loading(self): + pass + + def _modify_instance(self, module): + module.kbase = self.kbase + + return module + + # ------------------------------------------------ + # plugin management functions + # ------------------------------------------------ + def plugin_state(self, identifier, state): + self.__active_plugins[identifier] = state + + def __get_plugins(self, category, sorting): + def plugin_filter(x): + plgid, plg = x + + if category == "$all$": + return True + elif not self.__active_plugins[plgid]: + return False + else: + return self.plg_filter.is_visible(plg, category) + + def plugin_sort(x, y): + return x[1].priority - y[1].priority + + l = filter(plugin_filter, self.__plugins.items()) + + if sorting: + l.sort(plugin_sort) + + return l + + def get_plugin(self, identifier): + return self.__plugins[identifier] + + def get_plugins(self, category="$all$", sorting="true"): + return [plg for plg_id, plg in self.__get_plugins(category, sorting)] + + def get_plugins_ext(self, category="$all$", sorting="true"): + l = [['Id', 'Priority', 'Category', 'Name', 'Description']] + + for plg_id, plg in self.__get_plugins(category, sorting): + l.append([plg_id, str(plg.priority), ', '.join(plg.category), str(plg.name), str(plg.description) ]) + + return l + + def get_plugins_names(self, category="$all$", sorting="true"): + return [plg.name for plg_id, plg in self.__get_plugins(category, sorting)] + + def get_plugins_ids(self, category="$all$", sorting="true"): + return [plg_id for plg_id, plg in self.__get_plugins(category, sorting)] + + diff --git a/externals/reqresp/Request.py b/externals/reqresp/Request.py new file mode 100644 index 00000000..b434a61d --- /dev/null +++ b/externals/reqresp/Request.py @@ -0,0 +1,424 @@ +#Covered by GPL V2.0 +#Coded by Carlos del Ojo Elias (deepbit@gmail.com) +#Lately maintained by Javi Mendez (xmendez@edge-security.com) + +from urlparse import urlparse, urlunparse +import string +import re +import pycurl +import types + +from Variables import VariablesSet +from exceptions import ReqRespException +from Response import Response + +try: + from TextParser import * +except: + pass + +class Request: + def __init__(self): + self.__host=None # www.google.com:80 + self.__path=None # /index.php + self.__params=None # Mierdaza de index.php;lskjflkasjflkasjfdlkasdf? + self.schema="http" # http + + ##### Variables calculadas por getters NO SE PUEDEN MODIFICAR + # self.urlWithoutPath # http://www.google.es + # self.pathWithVariables # /index.php?a=b&c=d + # self.urlWithoutVariables=None # http://www.google.es/index.php + # self.completeUrl="" # http://www.google.es/index.php?a=b + # self.finalUrl="" # Url despues de hacer el FollowLocation + # self.redirectUrl="" # Url redirected + # self.postdata="" # Datos por POST, toto el string + ################ + + self.ContentType="application/x-www-form-urlencoded" # None es normal encoding + self.multiPOSThead={} + + self.__variablesGET=VariablesSet() + self.__variablesPOST=VariablesSet() + + # diccionario, por ejemplo headers["Cookie"] + self._headers={'Content-Type': 'application/x-www-form-urlencoded', + "User-Agent": "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; .NET CLR 1.1)" + } + + self.response=None # Apunta a la response que produce dicha request + + ################### lo de debajo no se deberia acceder directamente + + self.time=None # 23:00:00 + self.ip=None # 192.168.1.1 + self.method="GET" # GET o POST (EN MAYUSCULAS SI PUEDE SER) + self.protocol="HTTP/1.1" # HTTP/1.1 + self.__performHead="" + self.__performBody="" + + self.__authMethod=None + self.__userpass="" + + self.description="" # For temporally store imformation + + self.__proxy=None + self.proxytype = None + self.__timeout=None + self.__totaltimeout=None + self.__finalurl="" + + self.followLocation=False + self.__userpass="" + + self.totaltime = None + + def setFinalUrl(self, fu): + self.__finalurl = fu + + def __str__(self): + str="[ URL: %s" % (self.completeUrl) + if self.method=="POST": + str+=" - POST: \"%s\"" % self.postdata + if "Cookie" in self._headers: + str+=" - COOKIE: \"%s\"" % self._headers["Cookie"] + str+=" ]" + return str + + def getHost(self): + return self.__host + + def getXML(self,obj): + r=obj.createElement("request") + r.setAttribute("method",self.method) + url=obj.createElement("URL") + url.appendChild(obj.createTextNode(self.completeUrl)) + r.appendChild(url) + if self.method=="POST": + pd=obj.createElement("PostData") + pd.appendChild(obj.createTextNode(self.postdata)) + r.appendChild(pd) + if "Cookie" in self._headers: + ck=obj.createElement("Cookie") + ck.appendChild(obj.createTextNode(self._headers["Cookie"])) + r.appendChild(ck) + + return r + + def __getattr__ (self,name): + if name=="urlWithoutVariables": + return urlunparse((self.schema,self.__host,self.__path,'','','')) + elif name=="pathWithVariables": + return urlunparse(('','',self.__path,'',self.__variablesGET.urlEncoded(),'')) + elif name=="completeUrl": + return urlunparse((self.schema,self.__host,self.__path,self.__params,self.__variablesGET.urlEncoded(),'')) + elif name=="finalUrl": + if self.__finalurl: + return self.__finalurl + return self.completeUrl + elif name=="urlWithoutPath": + return "%s://%s" % (self.schema,self._headers["Host"]) + elif name=="path": + return self.__path + elif name=="postdata": + if self.ContentType=="application/x-www-form-urlencoded": + return self.__variablesPOST.urlEncoded() + elif self.ContentType=="multipart/form-data": + return self.__variablesPOST.multipartEncoded() + else: + return self.__uknPostData + else: + raise AttributeError + + def setUrl (self, urltmp): + if not (urltmp.startswith("http://") or urltmp.startswith("https://")): + urltmp = "http://" + urltmp + + self.__variablesGET=VariablesSet() + self.schema,self.__host,self.__path,self.__params,variables,f=urlparse(urltmp) + self._headers["Host"]=self.__host + + if variables: + self.__variablesGET.parseUrlEncoded(variables) + +############### PROXY ################################## + def getProxy (self): + return self.__proxy + + def setProxy (self,prox,ptype): + self.__proxy=prox + self.proxytype=ptype + +############### FOLLOW LOCATION ######################## + def setFollowLocation(self,value): + self.followLocation=value + +############## TIMEOUTS ################################ + def setConnTimeout (self,time): + self.__timeout=time + + def getConnTimeout(self): + return self.__timeout + + def setTotalTimeout (self,time): + self.__totaltimeout=time + + def getTotalTimeout(self): + return self.__totaltimeout + +############## Autenticacion ########################### + def setAuth (self,method,string): + self.__authMethod=method + self.__userpass=string + + def getAuth (self): + return self.__authMethod, self.__userpass + +############## TRATAMIENTO VARIABLES GET & POST ######################### + + def existsGETVar(self,key): + return self.__variablesGET.existsVar(key) + + def existPOSTVar(self): + return self.__variablesPOST.existsVar(key) + + + def setVariablePOST (self,key,value): + self.method="POST" + v=self.__variablesPOST.getVariable(key) + v.update(value) +# self._headers["Content-Length"]=str(len(self.postdata)) + + def setVariableGET (self,key,value): + v=self.__variablesGET.getVariable(key) + v.update(value) + + def getGETVars(self): + return self.__variablesGET.variables + + def getPOSTVars(self): + return self.__variablesPOST.variables + + def setPostData (self,pd,boundary=None): + self.__variablesPOST=VariablesSet() + self.method="POST" + if self.ContentType=="application/x-www-form-urlencoded": + self.__variablesPOST.parseUrlEncoded(pd) + elif self.ContentType=="multipart/form-data": + self.__variablesPOST.parseMultipart(pd,boundary) + else: + self.__uknPostData=pd + +############################################################################ + + def addHeader (self,key,value): + k=string.capwords(key,"-") + if k.lower() not in ["accept-encoding","content-length","if-modified-since","if-none-match"]: + self._headers[k]=value + + def delHeader (self,key): + k = string.capwords(key,"-") + if self._headers.has_key(k): + del self._headers[k] + + def __getitem__ (self,key): + k=string.capwords(key,"-") + if k in self._headers: + return self._headers[k] + else: + return "" + + def getHeaders(self): + list=[] + for i,j in self._headers.items(): + list+=["%s: %s" % (i,j)] + return list + + def head(self): + conn=pycurl.Curl() + conn.setopt(pycurl.SSL_VERIFYPEER,False) + conn.setopt(pycurl.SSL_VERIFYHOST,0) + conn.setopt(pycurl.URL,self.completeUrl) + + conn.setopt(pycurl.NOBODY, True) # para hacer un pedido HEAD + + conn.setopt(pycurl.WRITEFUNCTION, self.header_callback) + conn.perform() + + rp=Response() + rp.parseResponse(self.__performHead) + self.response=rp + + def createPath(self,newpath): + '''Creates new url from a location header || Hecho para el followLocation=true''' + if "http" in newpath[:4].lower(): + return newpath + + parts=urlparse(self.completeUrl) + if "/" != newpath[0]: + newpath="/".join(parts[2].split("/")[:-1])+"/"+newpath + + return urlunparse([parts[0],parts[1],newpath,'','','']) + + # pycurl - reqresp conversions + @staticmethod + def to_pycurl_object(c, req): + + c.setopt(pycurl.MAXREDIRS, 5) + + c.setopt(pycurl.WRITEFUNCTION, req.body_callback) + c.setopt(pycurl.HEADERFUNCTION, req.header_callback) + + c.setopt(pycurl.NOSIGNAL, 1) + c.setopt(pycurl.SSL_VERIFYPEER, False) + c.setopt(pycurl.SSL_VERIFYHOST, 0) + + c.setopt(pycurl.URL,req.completeUrl) + + if req.getConnTimeout(): + c.setopt(pycurl.CONNECTTIMEOUT, req.getConnTimeout()) + + if req.getTotalTimeout(): + c.setopt(pycurl.TIMEOUT, req.getTotalTimeout()) + + + authMethod, userpass = req.getAuth() + if authMethod or userpass: + if authMethod == "basic": + c.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC) + elif authMethod == "ntlm": + c.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_NTLM) + elif authMethod == "digest": + c.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_DIGEST) + c.setopt(pycurl.USERPWD, userpass) + + c.setopt(pycurl.HTTPHEADER, req.getHeaders()) + if req.method == "POST": + c.setopt(pycurl.POSTFIELDS, req.postdata) + + c.setopt(pycurl.CUSTOMREQUEST, req.method) + if req.method == "HEAD": + c.setopt(pycurl.NOBODY, True) + + if req.followLocation: + c.setopt(pycurl.FOLLOWLOCATION, 1) + + proxy = req.getProxy() + if proxy != None: + c.setopt(pycurl.PROXY, proxy) + if req.proxytype=="SOCK5": + c.setopt(pycurl.PROXYTYPE,pycurl.PROXYTYPE_SOCKS5) + elif req.proxytype=="SOCK4": + c.setopt(pycurl.PROXYTYPE,pycurl.PROXYTYPE_SOCKS4) + req.delHeader("Proxy-Connection") + + return c + + def response_from_conn_object(self, conn, header, body): + # followlocation + if conn.getinfo(pycurl.EFFECTIVE_URL) != self.completeUrl: + self.setFinalUrl(conn.getinfo(pycurl.EFFECTIVE_URL)) + #pycurl reponse headers includes original => remove + header = header[header.find("\r\n\r\n")+1:] + + self.totaltime = conn.getinfo(pycurl.TOTAL_TIME) + + rp = Response() + rp.parseResponse(header) + rp.addContent(body) + + if self.schema=="https" and self.__proxy: + self.response=Response() + self.response.parseResponse(rp.getContent()) + else: + self.response=rp + + return rp + + def perform(self): + self.__performHead="" + self.__performBody="" + self.__headersSent="" + + try: + conn = Request.to_pycurl_object(pycurl.Curl(), self) + conn.perform() + self.response_from_conn_object(conn, self.__performHead, self.__performBody) + except pycurl.error, error: + errno, errstr = error + raise ReqRespException(ReqRespException.FATAL, errstr) + finally: + conn.close() + + ######### ESTE conjunto de funciones no es necesario para el uso habitual de la clase + + def getAll (self): + pd=self.postdata + string=str(self.method)+" "+str(self.pathWithVariables)+" "+str(self.protocol)+"\n" + for i,j in self._headers.items(): + string+=i+": "+j+"\n" + string+="\n"+pd + + return string + + ########################################################################## + + def header_callback(self,data): + self.__performHead+=data + + def body_callback(self,data): + self.__performBody+=data + + def Substitute(self,src,dst): + a=self.getAll() + rx=re.compile(src) + b=rx.sub(dst,a) + del rx + self.parseRequest(b,self.schema) + + def parseRequest (self,rawRequest,prot="http"): + ''' Aun esta en fase BETA y por probar''' + tp=TextParser() + tp.setSource("string",rawRequest) + + self.__variablesPOST=VariablesSet() + self._headers={} # diccionario, por ejemplo headers["Cookie"] + + tp.readLine() + try: + tp.search("^(\w+) (.*) (HTTP\S*)$") + self.method=tp[0][0] + self.protocol=tp[0][2] + except Exception,a: + print rawRequest + raise a + + pathTMP=tp[0][1].replace(" ","%20") + pathTMP=('','')+urlparse(pathTMP)[2:] + pathTMP=urlunparse(pathTMP) + + while True: + tp.readLine() + if (tp.search("^([^:]+): (.*)$")): + self.addHeader(tp[0][0],tp[0][1]) + else: + break + + self.setUrl(prot+"://"+self._headers["Host"]+pathTMP) + + if self.method.upper()=="POST": + + pd="" + while tp.readLine(): + pd+=tp.lastFull_line + + boundary=None + if "Content-Type" in self._headers: + values=self._headers["Content-Type"].split(";") + self.ContentType=values[0].strip().lower() + if self.ContentType=="multipart/form-data": + boundary=values[1].split("=")[1].strip() + + self.setPostData(pd,boundary) + + + diff --git a/externals/reqresp/Response.py b/externals/reqresp/Response.py new file mode 100644 index 00000000..2328e3da --- /dev/null +++ b/externals/reqresp/Response.py @@ -0,0 +1,156 @@ +from urlparse import urlparse, urlunparse +import string +import re +import StringIO +import gzip + +try: + from TextParser import * +except: + pass + +class Response: + + def __init__ (self,protocol="",code="",message=""): + self.protocol=protocol # HTTP/1.1 + self.code=code # 200 + self.message=message # OK + self._headers=[] # bueno pues las cabeceras igual que en la request + self.__content="" # contenido de la response (si i solo si Content-Length existe) + self.md5="" # hash de los contenidos del resultado + self.charlen="" # Cantidad de caracteres de la respuesta + + def addHeader (self,key,value): + k=string.capwords(key,"-") + self._headers+=[(k,value)] + + def delHeader (self,key): + for i in self._headers: + if i[0].lower()==key.lower(): + self._headers.remove(i) + + + def addContent (self,text): + self.__content=self.__content+text + + def __getitem__ (self,key): + for i,j in self._headers: + if key==i: + return j + print "Error al obtener header!!!" + + def getCookie (self): + str=[] + for i,j in self._headers: + if i.lower()=="set-cookie": + str.append(j.split(";")[0]) + return "; ".join(str) + + + def has_header (self,key): + for i,j in self._headers: + if i.lower()==key.lower(): + return True + return False + + def getLocation (self): + for i,j in self._headers: + if i.lower()=="location": + return j + return None + + def header_equal (self,header,value): + for i,j in self._headers: + if i==header and j.lower()==value.lower(): + return True + return False + + def getHeaders (self): + return self._headers + + def getContent (self): + return self.__content + + def getTextHeaders(self): + string=str(self.protocol)+" "+str(self.code)+" "+str(self.message)+"\r\n" + for i,j in self._headers: + string+=i+": "+j+"\r\n" + + return string + + def getAll (self): + string=self.getTextHeaders()+"\r\n"+self.getContent() + return string + + def Substitute(self,src,dst): + a=self.getAll() + b=a.replace(src,dst) + self.parseResponse(b) + + def getAll_wpost (self): + string=str(self.protocol)+" "+str(self.code)+" "+str(self.message)+"\r\n" + for i,j in self._headers: + string+=i+": "+j+"\r\n" + return string + + + def parseResponse (self,rawResponse,type="curl"): + self.__content="" + self._headers=[] + + tp=TextParser() + tp.setSource("string",rawResponse) + + while True: + tp.readUntil("(HTTP\S*) ([0-9]+)") + + try: + self.protocol=tp[0][0] + except: + self.protocol="unknown" + + try: + self.code=tp[0][1] + except: + self.code="0" + + if self.code!="100": + break + + + self.code=int(self.code) + + while True: + tp.readLine() + if (tp.search("^([^:]+): ?(.*)$")): + self.addHeader(tp[0][0],tp[0][1]) + else: + break + + while tp.skip(1): + self.addContent(tp.lastFull_line) + + if type=='curl': + self.delHeader("Transfer-Encoding") + + if self.header_equal("Transfer-Encoding","chunked"): + result="" + content=StringIO.StringIO(self.__content) + hexa=content.readline() + nchunk=int(hexa.strip(),16) + + while nchunk: + result+=content.read(nchunk) + content.readline() + hexa=content.readline() + nchunk=int(hexa.strip(),16) + + self.__content=result + + if self.header_equal("Content-Encoding","gzip"): + compressedstream = StringIO.StringIO(self.__content) + gzipper = gzip.GzipFile(fileobj=compressedstream) + body=gzipper.read() + self.__content=body + self.delHeader("Content-Encoding") + diff --git a/TextParser.py b/externals/reqresp/TextParser.py old mode 100644 new mode 100755 similarity index 91% rename from TextParser.py rename to externals/reqresp/TextParser.py index c4026fa6..c03438e6 --- a/TextParser.py +++ b/externals/reqresp/TextParser.py @@ -13,12 +13,31 @@ def __init__ (self): self.type="" self.lastFull_line=None self.lastline = None - pass + + self.actualIndex=0 + + def __del__ (self): if self.type=="file": self.fd.close() + def __str__(self): + return str( self.matches) + + def __iter__(self): + self.actualIndex=0 + return self + + def next(self): + try: + value=self.matches[self.actualIndex] + self.actualIndex+=1 + return value + except: + raise StopIteration + + def setSource (self,t,*args): '''Se especifica el tipo de entrada. Puede ser fichero o entrada estandard diff --git a/externals/reqresp/Variables.py b/externals/reqresp/Variables.py new file mode 100644 index 00000000..853b1959 --- /dev/null +++ b/externals/reqresp/Variables.py @@ -0,0 +1,124 @@ +class Variable: + def __init__(self,name,value="",extraInfo=""): + self.name=name + self.value=value + self.initValue=value + self.extraInfo=extraInfo + + def restore(self): + self.value=self.initValue + + def change(self,newval): + self.initValue=self.value=newval + + def update(self,val): + self.value=val + + def append(self,val): + self.value+=val + + def __str__(self): + return "[ %s : %s ]" % (self.name,self.value) + +class VariablesSet: + def __init__(self): + self.variables=[] + self.boundary=None + + def names(self): + dicc=[] + for i in self.variables: + dicc.append(i.name) + + return dicc + + def existsVar(self,name): + return name in self.names() + + def addVariable(self,name,value="",extraInfo=""): + self.variables.append(Variable(name,value,extraInfo)) + + + def getVariable(self,name): + dicc=[] + for i in self.variables: + if i.name==name: + dicc.append(i) + + if len(dicc)>1: + raise Exception, "Variable exists more than one time!!! :D" % (name) + + if not dicc: + var=Variable(name) + self.variables.append(var) + return var + + return dicc[0] + + + def urlEncoded(self): + return "&".join(["=".join([i.name,i.value]) if i.value is not None else i.name for i in self.variables]) + + def parseUrlEncoded(self,cad): + dicc=[] + + for i in cad.split("&"): + if i: + list=i.split("=",1) + if len (list)==1: + dicc.append(Variable(list[0], None)) + elif len (list)==2: + dicc.append(Variable(list[0],list[1])) + + self.variables=dicc + + def multipartEncoded(self): + if not self.boundary: + self.boundary="---------------------------D33PB1T0R3QR3SP0B0UND4RY2203" + pd="" + pos=0 + for i in self.variables: + pd+="--"+self.boundary+"\r\n" + pd+="%s\r\n\r\n%s\r\n" % ("\r\n".join(i.extraInfo),i.value) + pd+="--"+self.boundary+"--\r\n" + return pd + + def parseMultipart(self,cad,boundary): + self.boundary=boundary + dicc=[] + tp=TextParser() + tp.setSource("string",cad) + + while True: + headers=[] + if not tp.readUntil("name=\"([^\"]+)\""): + break + var=tp[0][0] + headers.append(tp.lastFull_line.strip()) + while True: + tp.readLine() + if tp.search("^([^:]+): (.*)$"): + headers.append(tp.lastFull_line.strip()) + else: + break + + value="" + while True: + tp.readLine() + if not tp.search(boundary): + value+=tp.lastFull_line + else: + break + + if value[-2:]=="\r\n": + value=value[:-2] + + + dicc.append(Variable(var,value,headers)) + + self.variables=dicc + + + + + diff --git a/externals/reqresp/__init__.py b/externals/reqresp/__init__.py new file mode 100644 index 00000000..6dc74444 --- /dev/null +++ b/externals/reqresp/__init__.py @@ -0,0 +1,2 @@ +from Request import Request +from Response import Response diff --git a/externals/reqresp/cache.py b/externals/reqresp/cache.py new file mode 100644 index 00000000..07715a6c --- /dev/null +++ b/externals/reqresp/cache.py @@ -0,0 +1,43 @@ +from collections import defaultdict + +class HttpCache: + def __init__(self): + # cache control + self.__cache_map = defaultdict(list) + + def _gen_cache_key(self, req): + key = req.urlWithoutVariables + + dicc = {} + + for j in [i.name for i in req.getGETVars()]: + dicc[j] = True + + for j in [i.name for i in req.getPOSTVars()]: + dicc[j] = True + + # take URL parameters into consideration + url_params = dicc.keys() + url_params.sort() + key += "-" + "-".join(url_params) + + return key + + def update_cache(self, req, category = 'default'): + key = self._gen_cache_key(req) + + # first hit + if not key in self.__cache_map: + self.__cache_map[key].append(category) + return True + elif key in self.__cache_map and not category in self.__cache_map[key]: + self.__cache_map[key].append(category) + return True + + return False + + def msg_in_cache(self, req, category = 'default'): + key = self._gen_cache_key(req) + + return key in self.__cache_map and category in self.__cache_map[key] + diff --git a/externals/reqresp/exceptions.py b/externals/reqresp/exceptions.py new file mode 100644 index 00000000..9b2f7a51 --- /dev/null +++ b/externals/reqresp/exceptions.py @@ -0,0 +1,7 @@ + +class ReqRespException(Exception): + FATAL, RESOLVE_PROXY, RESOLVE_HOST, CONNECT_HOST, SSL, TIMEOUT = range(6) + def __init__(self, etype, msg): + self.etype = etype + self.msg = msg + Exception.__init__(self, msg) diff --git a/externals/settings/__init__.py b/externals/settings/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/externals/settings/settings.py b/externals/settings/settings.py new file mode 100644 index 00000000..af3eb6a6 --- /dev/null +++ b/externals/settings/settings.py @@ -0,0 +1,90 @@ +import ConfigParser +import os, sys + +class SettingsBase: + """ + Contains application settings. uses a ConfigParser + """ + def __init__(self, save = False): + self.cparser = ConfigParser.SafeConfigParser() + + self.set_all(self.set_defaults()) + self.filename = os.path.join(self._path_to_program_dir(), self.get_config_file()) + + self.cparser.read(self.filename) + + # Base members should implement + + def get_config_file(self): + '''Returns the name of the file where the config is saved.''' + raise NotImplemented + + def set_defaults(self): + ''' + Returns a dictionary with the default settings in the form of + { \ + Section: [ \ + ("setting_x", '5'), + ... + ("setting_y", '5'), + ], + ... + } + ''' + raise NotImplemented + + def set(self, section, setting, value): + if type(value) == type(u''): + value = value.encode('utf-8') + self.cparser.set(section, setting, value) + + def get(self, section, setting): + value = self.cparser.get(section, setting) + return value.decode('utf-8') + + def get_options(self, section): + return self.cparser.options(section) + + def get_sections(self): + return self.cparser.sections() + + def get_all(self): + sett = {} + + # dump entire config file + for section in self.cparser.sections(): + for option in self.cparser.options(section): + if not sett.has_key(section): + sett[section] = [] + sett[section].append( (option, self.cparser.get(section, option) ) ) + + return sett + + def set_all(self, sett): + self.cparser = ConfigParser.SafeConfigParser() + for section, settings in sett.items(): + self.cparser.add_section(section) + for key, value in settings: + self.cparser.set(section, key, value) + + def save(self): + try: + iniFile = file(self.filename, 'w') + self.cparser.write(iniFile) + iniFile.close() + except Exception, message: + return False + return True + + def _path_to_program_dir(self): + """ + Returns path to program directory + """ + path = sys.argv[0] + + if not os.path.isdir(path): + path = os.path.dirname(path) + + if not path: return '.' + + return path diff --git a/framework/__init__.py b/framework/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/framework/core/__init__.py b/framework/core/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/framework/core/facade.py b/framework/core/facade.py new file mode 100644 index 00000000..28a8c8d5 --- /dev/null +++ b/framework/core/facade.py @@ -0,0 +1,94 @@ +from patterns.singleton import Singleton +from framework.core.myexception import FuzzException +from externals.moduleman.registrant import BRegistrant +from externals.moduleman.loader import FileLoader +from externals.moduleman.loader import DirLoader +from externals.settings.settings import SettingsBase + +version = "2.1" + +class Settings(SettingsBase): + def get_config_file(self): + return "wfuzz.ini" + + def set_defaults(self): + return dict( + plugins=[("file_bl", '.jpg,.gif,.png,.jpeg,.mov,.avi,.flv,.ico'), ("bing_apikey", '')], + ) + +class FuzzSessionOptions: + def __init__(self): + self._values = { + "filter_params": None, + "printer_tool": "default", + "rlevel": 0, + "script_string": "", + "sleeper": None, + "proxy_list": None, + "scanmode": False, + "interactive": False, + "max_concurrent": 10, + "genreq": None, + } + + def set(self, name, value): + self._values[name] = value + + def get(self, name): + return self._values[name] + +class Facade: + __metaclass__ = Singleton + + def __init__(self): + try: + self.__printers = BRegistrant(FileLoader(**{"filename": "printers.py", "base_path": "./plugins" })) + self.__plugins = BRegistrant(DirLoader(**{"base_dir": "scripts", "base_path": "./plugins" })) + self.__encoders = BRegistrant(FileLoader(**{"filename": "encoders.py", "base_path": "./plugins" })) + self.__iterators = BRegistrant(FileLoader(**{"filename": "iterations.py", "base_path": "./plugins" })) + self.__payloads = BRegistrant(FileLoader(**{"filename": "payloads.py", "base_path": "./plugins" })) + except Exception, e: + raise FuzzException(FuzzException.FATAL, "Error loading plugins: %s" % str(e)) + + self.sett = Settings() + + def proxy(self, which): + if which == 'parsers': + return self.__plugins + elif which == 'encoders': + return self.__encoders + elif which == 'iterators': + return self.__iterators + elif which == 'payloads': + return self.__payloads + elif which == 'printers': + return self.__printers + else: + raise FuzzException(FuzzException.FATAL, "Non-existent proxy %s" % which) + + def get_printer(self, name): + try: + return self.__printers.get_plugin("printers/" + name)() + except KeyError: + raise FuzzException(FuzzException.FATAL, name + " printer does not exists (-e printers for a list of available printers)") + + def get_payload(self, name): + try: + return self.__payloads.get_plugin("payloads/" + name) + except KeyError: + raise FuzzException(FuzzException.FATAL, name + " payload does not exists (-e payloads for a list of available payloads)") + + def get_iterator(self, name): + try: + return self.__iterators.get_plugin("iterations/" + name) + except KeyError: + raise FuzzException(FuzzException.FATAL, name + " iterator does not exists (-m iterators for a list of available iterators)") + + def get_encoder(self, name): + try: + return self.__encoders.get_plugin("encoders/" + name)() + except KeyError: + raise FuzzException(FuzzException.FATAL, name + " encoder does not exists (-e encodings for a list of available encoders)") + + def get_parsers(self, filterstr): + return self.__plugins.get_plugins(filterstr) diff --git a/framework/core/myexception.py b/framework/core/myexception.py new file mode 100644 index 00000000..5c60aa52 --- /dev/null +++ b/framework/core/myexception.py @@ -0,0 +1,8 @@ + +class FuzzException(Exception): + FATAL, SIGCANCEL = range(2) + + def __init__(self, etype, msg): + self.etype = etype + self.msg = msg + Exception.__init__(self, msg) diff --git a/framework/fuzzer/Fuzzer.py b/framework/fuzzer/Fuzzer.py new file mode 100644 index 00000000..269a748e --- /dev/null +++ b/framework/fuzzer/Fuzzer.py @@ -0,0 +1,213 @@ +import threading +import time +from Queue import Queue + +from framework.fuzzer.fuzzobjects import FuzzResult +from framework.fuzzer.fuzzobjects import FuzzRequest +from framework.fuzzer.dictio import requestGenerator + +from framework.core.facade import Facade + +from framework.core.myexception import FuzzException +from framework.utils.myqueue import MyPriorityQueue +from framework.utils.myqueue import FuzzQueue +from framework.fuzzer.myhttp import HttpQueue +from framework.plugins.jobs import JobMan +from framework.plugins.jobs import ProcessorQ +from framework.plugins.jobs import RoundRobin +from framework.fuzzer.filter import FilterQ + +from externals.reqresp.exceptions import ReqRespException +from externals.reqresp.cache import HttpCache + + + +class SeedQ(FuzzQueue): + def __init__(self, genReq, delay, queue_out): + FuzzQueue.__init__(self, queue_out) + self.delay = delay + self.genReq = genReq + + def get_name(self): + return 'SeedQ' + + def _cleanup(self): + pass + + def do_baseline(self): + # initial baseline request + bl = self.genReq.get_baseline() + if bl: + self.genReq.stats.pending_fuzz += 1 + self.queue_out.put_first(bl) + + # wait for BBB to be completed before generating more items + while(self.genReq.stats.processed == 0): + time.sleep(0.0001) + + def process(self, prio, item): + if isinstance(item, requestGenerator): + if self.genReq.stats.pending_seeds == 0: + self.genReq.stats.mark_start() + self.genReq.stats.pending_seeds += 1 + self.do_baseline() + else: + self.genReq.stats.pending_seeds += 1 + elif isinstance(item, FuzzRequest): + self.genReq.restart(item) + else: + raise FuzzException(FuzzException.FATAL, "SeedQ: Unknown item type in queue!") + + # Enqueue requests + try: + rq = self.genReq.next() + while rq: + self.genReq.stats.pending_fuzz += 1 + if self.delay: time.sleep(self.delay) + self.send(rq) + rq = self.genReq.next() + except StopIteration: + pass + + self.genReq.stats.pending_seeds -= 1 + + +class RoutingQ(FuzzQueue): + def __init__(self, routes, queue_out): + FuzzQueue.__init__(self, queue_out) + self.routes = routes + + def set_routes(self, routes): + self.routes = routes + + def get_name(self): + return 'RoutingQ' + + def _cleanup(self): + pass + + def process(self, prio, item): + if str(item.__class__) == "framework.plugins.pluginobjects.PluginRequest": + self.routes[str(item.__class__)].put(item.request) + else: + self.routes[str(item.__class__)].put(item) + +class Fuzzer: + def __init__(self, options): + self.genReq = options.get("genreq") + + # Get active plugins + lplugins = None + if options.get("script_string"): + lplugins = Facade().get_parsers(options.get("script_string")) + + if not lplugins: + raise FuzzException(FuzzException.FATAL, "No plugin selected, check the --script name or category introduced.") + + recursive = lplugins or options.get("rlevel") > 0 + filtering = options.get('filter_params')['active'] is True + + # Create queues (in reverse order) + # genReq ---> seed_queue -> http_queue -> [round_robin] -> [plugins_queue] * N -> process_queue -> [routing_queue] -> [filter_queue]---> results_queue + self.results_queue = MyPriorityQueue() + self.filter_queue = FilterQ(options.get("filter_params"), self.results_queue) if filtering else None + self.routing_queue = RoutingQ(None, self.filter_queue if filtering else self.results_queue) if recursive else None + self.process_queue = ProcessorQ(options.get("rlevel"), self.genReq.stats, self.routing_queue if recursive else self.filter_queue if filtering else self.results_queue) + self.plugins_queue = None + if lplugins: + cache = HttpCache() + self.plugins_queue = RoundRobin([JobMan(lplugins, cache, self.process_queue), JobMan(lplugins, cache, self.process_queue), JobMan(lplugins, cache, self.process_queue)]) + self.http_queue = HttpQueue(options, self.plugins_queue if lplugins else self.process_queue) + self.seed_queue = SeedQ(self.genReq, options.get("sleeper"), self.http_queue) + + # recursion routes + if recursive: + self.routing_queue.set_routes({ + "": self.seed_queue, + "framework.plugins.pluginobjects.PluginRequest": self.http_queue, + "framework.fuzzer.fuzzobjects.FuzzResult": self.filter_queue if filtering else self.results_queue}) + + ## initial seed request + self.seed_queue.put_priority(1, self.genReq) + + def __iter__(self): + return self + + def process(self): + # http://bugs.python.org/issue1360 + prio, item = self.results_queue.get(True, 365 * 24 * 60 * 60) + + # Return new result + if isinstance(item, FuzzResult): + item.nres = self.genReq.stats.processed + self.genReq.stats.processed += 1 + self.genReq.stats.pending_fuzz -= 1 + if not item.is_visible: self.genReq.stats.filtered += 1 + + self.results_queue.task_done() + return item + # raise exceptions originated on other queues (not sigcancel which is to cancel the whole proces) + elif isinstance(item, FuzzException) and item.etype == FuzzException.SIGCANCEL: + self.results_queue.task_done() + elif isinstance(item, Exception): + self.results_queue.task_done() + raise item + # We are done if a None arrives + elif item == None: + self.results_queue.task_done() + self.genReq.stats.mark_end() + return None + + def next(self): + res = self.process() + + # done! (None sent has gone through all queues). + if not res: + raise StopIteration + + # check if we are done. If so, send None to everyone so the can stop nicely + if self.genReq.stats.pending_fuzz == 0 and self.genReq.stats.pending_seeds == 0: + self.seed_queue.put_last(None) + + return res + + def stats(self): + dic = { + "plugins_queue": self.plugins_queue.qsize() if self.plugins_queue else -1, + "results_queue": self.process_queue.qsize(), + "results_queue": self.results_queue.qsize(), + "routing_queue": self.routing_queue.qsize() if self.routing_queue else -1, + "http_queue": self.http_queue.qsize(), + "seed_queue": self.seed_queue.qsize(), + "filter_queue": self.filter_queue.qsize() if self.filter_queue else -1, + } + + if self.plugins_queue: + j = 0 + for i in self.plugins_queue.queue_out: + dic = dict(dic.items() + {"plugins_queue #%d" % j: i.qsize()}.items()) + j += 1 + + return dict(self.http_queue.job_stats().items() + self.genReq.stats.get_stats().items() + dic.items()) + + def cancel_job(self): + # stop generating items + self.http_queue.pause.set() + self.genReq.stop() + + # stop processing pending items + for q in [self.seed_queue, self.http_queue, self.plugins_queue, self.process_queue, self.filter_queue, self.routing_queue]: + if q: q.put_first(FuzzException(FuzzException.SIGCANCEL, "Cancel job")) + + # wait for cancel to be processed + for q in [self.seed_queue, self.http_queue, self.plugins_queue] + self.plugins_queue.queue_out if self.plugins_queue else [] + [self.process_queue, self.filter_queue, self.routing_queue]: + if q: q.join() + + # send None to stop (almost nicely) + self.seed_queue.put_last(None) + + def pause_job(self): + self.http_queue.pause.clear() + + def resume_job(self): + self.http_queue.pause.set() diff --git a/framework/fuzzer/__init__.py b/framework/fuzzer/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/framework/fuzzer/base.py b/framework/fuzzer/base.py new file mode 100644 index 00000000..e297849e --- /dev/null +++ b/framework/fuzzer/base.py @@ -0,0 +1,246 @@ +import abc + +# decorator for iterator plugins +def wfuzz_iterator(gen_func): + class _reiterator: + name = gen_func.name + description = gen_func.description + category = gen_func.category + priority = gen_func.priority + + def __init__(self, *args, **kwargs): + self.__args = args + self.__kwargs = kwargs + + self.__gen = gen_func(*self.__args, **self.__kwargs) + + def restart(self): + self.__gen = gen_func(*self.__args, **self.__kwargs) + + def __getattr__(self, method): + if method != "restart": + return getattr(self.__gen, method) + else: + return self.restart + + _reiterator.__PLUGIN_MODULEMAN_MARK = "Plugin mark" + return _reiterator + +class BaseFuzzRequest: + """ Abstract class defining an interface for a Fuzz request. + This is mainly due to the idea of not using reqresp in the future and therefore be agnostic on underneath HTTP request representation + This allows plugins to access to fuzzrequest consistently across the code base. + """ + __metaclass__ = abc.ABCMeta + + # read only methods for accessing HTTP requests information consistenly accross the codebase + + @abc.abstractmethod + def fr_headers(self): + """ + Gets HTTP request headers in the form of a dictionary.dict(request = {}, response = {}) + """ + return + + @abc.abstractmethod + def fr_parameters(self): + """ + Gets HTTP request GET or POST parameters in the form of a dictionary.dict(get = {}, post = {}) + """ + return + + @abc.abstractmethod + def fr_cookies(self): + """ + Returns HTTP request cookies in the form of a dictionary.dict(request = {}, response = {}) + """ + return + + @abc.abstractmethod + def fr_method(self): + """ + Returns HTTP request method, ie. GET, POST, PUT,... + """ + return + + @abc.abstractmethod + def fr_schema(self): + """ + Returns HTTP request schema, ie. HTTP or HTTPS + """ + return + + @abc.abstractmethod + def fr_host(self): + """ + Returns HTTP request host + """ + return + + @abc.abstractmethod + def fr_url(self): + """ + Sets/gets HTTP request final url (even if redirected) + """ + return + + @abc.abstractmethod + def fr_redirect_url(self): + """ + Returns HTTP request original URL before redirection (or same as url() if none) + """ + return + + @abc.abstractmethod + def fr_content(self): + """ + Returns HTTP response raw content (without headers) + """ + return + + @abc.abstractmethod + def fr_code(self): + """ + Returns HTTP response return code (if no response none) + """ + return + + @abc.abstractmethod + def fr_auth(self): + """ + Gets/Sets HTTP request auth in the form of (basic/digest/ntlm, user:pass) + """ + return + + @abc.abstractmethod + def fr_follow(self): + """ + Property that sets/gets if HTTP request follows redirections + """ + return + + @abc.abstractmethod + def fr_time(self): + """ + Returns time to fullfill HTTP request and response + """ + return + + # Info extra that wfuzz needs within an HTTP request + + @abc.abstractproperty + def wf_is_baseline(self): + """ + Property that indicates if an HTTP request is from the baseline + """ + return + + @abc.abstractproperty + def wf_proxy(self): + """ + Property that sets/gets HTTP request proxy in the form of (IP:PORT, TYPE) + """ + return + + @abc.abstractmethod + def wf_allvars_len(self): + """ + Returns the number of variables of the HTTP request depending on alvars property ('allvars','allpost','allheaders') + """ + return + + @abc.abstractproperty + def wf_allvars(self): + """ + Returns 'none','allvars','allpost','allheaders' if the HTTP request is a wfuzz request without FUZZ and fuzzing everything + """ + return + + @abc.abstractproperty + def rlevel(self): + """ + Property that sets/gets HTTP request recursion level (this is need for priority queues) + """ + return + + @abc.abstractproperty + def wf_fuzz_methods(self): + """ + Property that sets/gets if HTTP methods are fuzzed + """ + return + + @abc.abstractproperty + def wf_description(self): + """ + Property that sets/gets HTTP request summary to show in console + """ + return + + # methods wfuzz needs to perform HTTP requests (this might change in the future). + + @abc.abstractmethod + def from_http_object(self, c, h, b): + """ + Converts pycurl object to fuzz request + """ + return + + @abc.abstractmethod + def to_http_object(self, c): + """ + Converts the fuzz request to a pycurl object + """ + return + + # methods wfuzz needs for substituing payloads and building dictionaries + + @staticmethod + @abc.abstractmethod + def from_seed(seed, payload): + """ + Returns a new fuzz request instance substituting all the FUZnZ marks to the corresponding payload. + """ + return + + @staticmethod + @abc.abstractmethod + def from_baseline(seed): + """ + Returns a new fuzz request instance replacing FUZZ{baseline} for baseline + """ + return + + @staticmethod + @abc.abstractmethod + def from_all_fuzz_request(seed, payload): + """ + yields a new fuzz request instance for earch variable and payload + """ + return + + # methods wfuzz needs for creating and converting a fuzz request to other internal objects, ie. fuzz result + + @staticmethod + @abc.abstractmethod + def from_fuzzRes(fuzz_res, new_url = None): + """ + Returns a new fuzz request instance based on the given fuzzresult. + if new url is set, the new fuzz request must change it and not to be base on res history (due to recursivity) + """ + return + + @abc.abstractmethod + def from_copy(self): + """ + Returns a new fuzz request instance copying itself + """ + return + + @staticmethod + @abc.abstractmethod + def from_parse_options(options): + """ + Returns a new fuzz request instance parsing command line options + """ + return diff --git a/framework/fuzzer/dictio.py b/framework/fuzzer/dictio.py new file mode 100644 index 00000000..b9c973e6 --- /dev/null +++ b/framework/fuzzer/dictio.py @@ -0,0 +1,78 @@ +from framework.fuzzer.fuzzobjects import FuzzRequest +from framework.fuzzer.fuzzobjects import FuzzStats + +class dictionary: + def __init__(self, payload, encoders): + self.__payload = payload + self.__encoder = encoders + self.__generator = None + + def count (self): + return self.__payload.count() * len(self.__encoder) + + def restart(self): + self.__payload.restart() + self.__generator = self.gen() + + def __iter__(self): + self.restart() + return self + + def gen(self): + while 1: + pl = self.__payload.next() + for encode in self.__encoder: + yield encode(pl) + + def next(self): + return self.__generator.next() + +class requestGenerator: + def __init__(self, seed, dictio): + self.seed = seed + self._baseline = FuzzRequest.from_baseline(seed) + self.dictio = dictio + + self.stats = FuzzStats.from_requestGenerator(self) + + if self.seed.wf_allvars is not None: + self._allvar_gen = self.__allvars_gen(self.dictio) + else: + self._allvar_gen = None + + + def stop(self): + self.stats.cancelled = True + + def restart(self, seed): + self.seed = seed + self.dictio.restart() + + def count(self): + v = self.dictio.count() + if self.seed.wf_allvars is not None: + v *= self.seed.wf_allvars_len() + + if self._baseline: v += 1 + + return v + + def __iter__(self): + return self + + def __allvars_gen(self, dic): + for payload in dic: + for r in FuzzRequest.from_all_fuzz_request(self.seed, payload): + yield r + + def next(self): + if self.stats.cancelled: + raise StopIteration + + if self.seed.wf_allvars is not None: + return self._allvar_gen.next() + else: + return FuzzRequest.from_seed(self.seed, self.dictio.next()) + + def get_baseline(self): + return self._baseline diff --git a/framework/fuzzer/filter.py b/framework/fuzzer/filter.py new file mode 100644 index 00000000..96973ef0 --- /dev/null +++ b/framework/fuzzer/filter.py @@ -0,0 +1,178 @@ +from framework.core.myexception import FuzzException +from threading import Thread + +from framework.fuzzer.fuzzobjects import FuzzResult +from framework.utils.myqueue import FuzzQueue + +PYPARSING = True +try: + from pyparsing import Word, Group, oneOf, Optional, Suppress, ZeroOrMore, Literal + from pyparsing import ParseException +except ImportError: + PYPARSING = False + +class FilterQ(FuzzQueue): + def __init__(self, ffilter, queue_out): + FuzzQueue.__init__(self, queue_out) + Thread.__init__(self) + self.setName('filter_thread') + + self.queue_out = queue_out + + if PYPARSING: + element = oneOf("c l w h") + digits = "XB0123456789" + integer = Word( digits )#.setParseAction( self.__convertIntegers ) + elementRef = Group(element + oneOf("= != < > >= <=") + integer) + operator = oneOf("and or") + definition = elementRef + ZeroOrMore( operator + elementRef) + nestedformula = Group(Suppress(Optional(Literal("("))) + definition + Suppress(Optional(Literal(")")))) + self.finalformula = nestedformula + ZeroOrMore( operator + nestedformula) + + elementRef.setParseAction(self.__compute_element) + nestedformula.setParseAction(self.__compute_formula) + self.finalformula.setParseAction(self.__myreduce) + + self.res = None + self.hideparams = ffilter + + if "XXX" in self.hideparams['codes']: + self.hideparams['codes'].append("0") + + self.baseline = None + + def get_name(self): + return 'filter_thread' + + def _cleanup(self): + pass + + def process(self, prio, item): + if item.is_baseline: + self.baseline = self._set_baseline_fuzz(item) + item.is_visible = self.is_visible(item) + self.send(item) + + def _set_baseline_fuzz(self, res): + if "BBB" in self.hideparams['lines']: + self.hideparams['lines'].append(str(res.lines)) + if "BBB" in self.hideparams['codes']: + self.hideparams['codes'].append(str(res.code)) + if "BBB" in self.hideparams['words']: + self.hideparams['words'].append(str(res.words)) + if "BBB" in self.hideparams['chars']: + self.hideparams['chars'].append(str(res.chars)) + + return res + + def __convertIntegers(self, tokens): + return int(tokens[0]) + + def __compute_element(self, tokens): + element, operator, value = tokens[0] + + if value == 'BBB' and self.baseline == None: + raise FuzzException(FuzzException.FATAL, "FilterQ: specify a baseline value when using BBB") + + if element == 'c' and value == 'XXX': + value = 0 + + if value == 'BBB': + if element == 'l': + value = self.baseline.lines + elif element == 'c': + value = self.baseline.code + elif element == 'w': + value = self.baseline.words + elif element == 'h': + value = self.baseline.chars + + test = dict(w=self.res.words, c=self.res.code, l=self.res.lines, h=self.res.chars) + value = int(value) + + if operator == "=": + return test[element] == value + elif operator == "<=": + return test[element] <= value + elif operator == ">=": + return test[element] >= value + elif operator == "<": + return test[element] < value + elif operator == ">": + return test[element] > value + elif operator == "!=": + return test[element] != value + + def __myreduce(self, elements): + first = elements[0] + for i in range(1, len(elements), 2): + if elements[i] == "and": + first = (first and elements[i+1]) + elif elements[i] == "or": + first = (first or elements[i+1]) + + return first + + def __compute_formula(self, tokens): + return self.__myreduce(tokens[0]) + + def is_visible(self, res): + # baseline + if self.baseline and res.is_baseline == True: + return True + + filter_string = self.hideparams['filter_string'] + if filter_string and PYPARSING: + self.res = res + try: + return self.finalformula.parseString(filter_string)[0] + except ParseException, e: + raise FuzzException(FuzzException.FATAL, "Incorrect filter expression. It should be composed of: c,l,w,h/and,or/=,<,>,!=,<=,>=") + else: + if self.baseline == None and ('BBB' in self.hideparams['codes'] \ + or 'BBB' in self.hideparams['lines'] \ + or 'BBB' in self.hideparams['words'] \ + or 'BBB' in self.hideparams['chars']): + raise FuzzException(FuzzException.FATAL, "FilterQ: specify a baseline value when using BBB") + + if self.hideparams['codes_show'] is None: + cond1 = True + else: + cond1 = not self.hideparams['codes_show'] + + if self.hideparams['regex_show'] is None: + cond2 = True + else: + cond2 = not self.hideparams['regex_show'] + + if str(res.code) in self.hideparams['codes'] \ + or str(res.lines) in self.hideparams['lines'] \ + or str(res.words) in self.hideparams['words'] \ + or str(res.chars) in self.hideparams['chars']: + cond1 = self.hideparams['codes_show'] + + if self.hideparams['regex']: + if self.hideparams['regex'].search(res.history.fr_content()): + cond2 = self.hideparams['regex_show'] + + return (cond1 and cond2) + +if __name__ == "__main__": + tests = [] + tests.append("(w=200 and w=200) or w=200") + tests.append("(w=400 and w=200) and (w=200 or w=200 or w=000)") + tests.append("(w=200 and l=7) and (h=23)") + tests.append("w=201") + tests.append("w=200") + + class t: + code = 200 + words = 200 + lines = 7 + chars = 23 + + res = t() + + f = FilterQ() + for i in tests: + print "%s := %s" % (str(i), f.is_visible(res, i)) diff --git a/framework/fuzzer/fuzzobjects.py b/framework/fuzzer/fuzzobjects.py new file mode 100644 index 00000000..58893a6b --- /dev/null +++ b/framework/fuzzer/fuzzobjects.py @@ -0,0 +1,573 @@ +import types +import time +import hashlib +import re +from urlparse import urljoin +from threading import Lock + +from externals.reqresp import Request +from framework.core.myexception import FuzzException +from framework.fuzzer.base import BaseFuzzRequest + +class FuzzRequest(BaseFuzzRequest, Request): + def __init__(self): + Request.__init__(self) + self._rlevel = 0 + self._proxy = None + self._allvars = None + self._is_baseline = False + self._fuzz_methods = False + self._description = "" + + # read only methods for accessing HTTP requests information consistenly accross the codebase + + def fr_headers(self): + h = dict(request = {}, response = {}) + + h['request'] = dict(map(lambda x: x.split(":",1), self.getHeaders())) + + if self.response: + h['response'] = dict(self.response.getHeaders()) + + return h + + def fr_parameters(self): + p = dict(get = {}, post = {}) + + p['get'] = dict(map(lambda x: (x.name, x.value), self.getGETVars())) + p['post'] = dict(map(lambda x: (x.name, x.value), self.getPOSTVars())) + + return p + + def fr_cookies(self): + cc = dict(request = {}, response = {}) + + if self['Cookie']: + c = self['Cookie'].split("; ") + if c[0]: + #cc['request'] = dict(map(lambda x: x.split("=", 1), c)) + cc['request'] = dict(map(lambda x:[x[0],x[2]],map(lambda x:x.partition("="), c))) + + if self.response: + c = self.response.getCookie().split("; ") + if c[0]: + #cc['response'] = dict(map(lambda x: x.split("=", 1), c)) + cc['response'] = dict(map(lambda x:[x[0],x[2]],map(lambda x:x.partition("="), c))) + + return cc + + def fr_method(self): + return self.method + + def fr_schema(self): + return self.schema + + def fr_host(self): + return self.getHost() + + def fr_url(self): + return self.finalUrl + + def fr_redirect_url(self): + return self.completeUrl + + def fr_content(self): + return self.response.getContent() if self.response else "" + + def fr_code(self): + return self.response.code if self.response else None + + def fr_auth(self): + return self.getAuth() + + def fr_follow(self): + return self.followLocation + + def fr_time(self): + return self.totaltime + + # Info extra that wfuzz needs within an HTTP request + + def _get_baseline(self): + return self._is_baseline + + def _set_baseline(self, bl): + self._is_baseline = bl + + wf_is_baseline = property( _get_baseline, _set_baseline ) + + def wf_allvars_len(self): + if self.wf_allvars == "allvars": + varSET = self.getGETVars() + elif self.wf_allvars == "allpost": + varSET = self.getPOSTVars() + else: + raise FuzzException(FuzzException.FATAL, "Unknown variable set: " + self.wf_allvars) + + return len(varSET) + + def _get_allvars(self): + return self._allvars + + def _set_allvars(self, bl): + if bl is not None and bl not in ['allvars', 'allpost']: + raise FuzzException(FuzzException.FATAL, "Incorrect all parameters brute forcing type specified, correct values are allvars, allpost or allheaders.") + + self._allvars = bl + + wf_allvars = property( _get_allvars, _set_allvars ) + + def _set_rlevel(self, l): + self._rlevel = l + + def _get_rlevel(self): + return self._rlevel + + rlevel = property( _get_rlevel, _set_rlevel ) + + def _set_fuzz_methods(self, l): + self._fuzz_methods = l + + def _get_fuzz_methods(self): + return self._fuzz_methods + + wf_fuzz_methods = property( _get_fuzz_methods, _set_fuzz_methods ) + + def _set_description(self, l): + self._description = l + + def _get_description(self): + return self._description + + wf_description = property( _get_description, _set_description ) + + def _set_proxies(self, l): + self._proxy = l + + def _get_proxies(self): + return self._proxy + + wf_proxy = property( _get_proxies, _set_proxies ) + + # methods wfuzz needs to perform HTTP requests (this might change in the future). + + def to_http_object(self, c): + return Request.to_pycurl_object(c, self) + + def from_http_object(self, c, bh, bb): + return self.response_from_conn_object(c, bh, bb) + + # methods wfuzz needs for substituing payloads and building dictionaries + + @staticmethod + def from_seed(seed, payload): + marker_regex = re.compile("FUZ\d*Z",re.MULTILINE|re.DOTALL) + fuzz_words = len(marker_regex.findall(seed.getAll())) + if seed.wf_fuzz_methods: + fuzz_words += 1 + + if len(payload) != fuzz_words: + raise FuzzException(FuzzException.FATAL, "FUZZ words and number of payloads do not match!") + + rawReq = seed.getAll() + schema = seed.schema + method, userpass = seed.getAuth() + http_method = None + + newreq = seed.from_copy() + + rawUrl = newreq.completeUrl + + for payload_pos, payload_content in enumerate(payload, start=1): + fuzz_word = "FUZ" + str(payload_pos) + "Z" if payload_pos > 1 else "FUZZ" + + if newreq.wf_description: + newreq.wf_description += " - " + newreq.wf_description += payload_content + + if seed.wf_fuzz_methods and fuzz_word == "FUZZ": + http_method = payload_content + elif method and (userpass.count(fuzz_word)): + userpass = userpass.replace(fuzz_word, payload_content) + elif newreq.completeUrl.count(fuzz_word): + rawUrl = rawUrl.replace(fuzz_word, payload_content) + + # reqresp appends http:// if not indicated in the URL, but if I have a payload with a full URL + # this messes up everything => http://FUZZ and then http://http://asdkjsakd.com + if rawUrl[:11] == 'http://http': + rawUrl = rawUrl[7:] + elif rawReq.count(fuzz_word): + rawReq = rawReq.replace(fuzz_word, payload_content) + else: + raise FuzzException(FuzzException.FATAL, "No %s word!" % fuzz_word) + + newreq.parseRequest(rawReq, schema) + newreq.setUrl(rawUrl) + if http_method: newreq.method = http_method + if method != 'None': newreq.setAuth(method, userpass) + + return newreq + + @staticmethod + def from_baseline(seed): + schema = seed.schema + rawReq = seed.getAll() + + marker_regex = re.compile("FUZ\d*Z{(.*?)}",re.MULTILINE|re.DOTALL) + baseline_payload = marker_regex.findall(rawReq) + + # if there is no marker, there is no baseline request + if len(baseline_payload) == 0: + return None + + # it is not possible to specify baseline value for HTTP method! + if seed.wf_fuzz_methods: + baseline_payload = ['GET'] + baseline_payload + + ## remove baseline marker from seed request + for i in baseline_payload: + rawReq = rawReq.replace("{" + i + "}", '') + + # re-parse seed without baseline markers + seed.parseRequest(rawReq, schema) + if seed.wf_fuzz_methods: seed.method = "FUZZ" + + baseline_req = FuzzRequest.from_seed(seed, baseline_payload) + baseline_req.wf_is_baseline = True + + return baseline_req + + @staticmethod + def from_all_fuzz_request(seed, payload): + # no FUZZ keyword allowed + marker_regex = re.compile("FUZ\d*Z",re.MULTILINE|re.DOTALL) + if len(marker_regex.findall(seed.getAll())) > 0: + raise FuzzException(FuzzException.FATAL, "FUZZ words not allowed when using all parameters brute forcing.") + + # only a fuzz payload is allowed using this technique + if len(payload) > 1: + raise FuzzException(FuzzException.FATAL, "Only one payload is allowed when fuzzing all parameters!") + + if seed.wf_allvars == "allvars": + varSET = seed.getGETVars() + elif seed.wf_allvars == "allpost": + varSET = seed.getPOSTVars() + elif seed.wf_allvars == "allheaders": + varSET = seed.getHeaders() + else: + raise FuzzException(FuzzException.FATAL, "Unknown variable set: " + seed.wf_allvars) + + if len(varSET) == 0: + raise FuzzException(FuzzException.FATAL, "No variables on specified variable set: " + seed.wf_allvars) + + for v in varSET: + variable = v.name + payload_content = payload[0] + copycat = seed.from_copy() + copycat.wf_description = variable + "=" + payload_content + + try: + if seed.wf_allvars == "allvars": + copycat.setVariableGET(variable, payload_content) + elif seed.wf_allvars == "allpost": + copycat.setVariablePOST(variable, payload_content) + elif seed.wf_allvars == "allheaders": + copycat.addHeader(variable, payload_content) + else: + raise FuzzException(FuzzException.FATAL, "Unknown variable set: " + seed.wf_allvars) + except TypeError, e: + raise FuzzException(FuzzException.FATAL, "It is not possible to use all fuzzing with duplicated parameters.") + + yield copycat + + # methods wfuzz needs for creating and converting a fuzz request to other internal objects, ie. fuzz result + + @staticmethod + def from_fuzzRes(fuzz_res, new_url = None): + fr = fuzz_res.history.from_copy() + + fr.wf_description = fuzz_res.description + fr.rlevel = fuzz_res.rlevel + + if new_url: fr.setUrl(new_url) + + return fr + + def from_copy(self): + newreq = FuzzRequest() + + newreq.rlevel = self.rlevel + newreq.wf_description = self.wf_description + newreq.wf_proxy = self.wf_proxy + newreq.wf_is_baseline = self.wf_is_baseline + newreq.wf_allvars = self.wf_allvars + newreq.wf_fuzz_methods = self.wf_fuzz_methods + + + for k,v in self.fr_headers()['request'].items(): + newreq.addHeader(k, v) + + pp = self.fr_parameters()['post'] + if pp: + newreq.setPostData("&".join(["=".join([n,v]) if v is not None else n for n,v in pp.items()])) + + newreq.setFollowLocation(self.followLocation) + m, up = self.getAuth() + newreq.setAuth(m, up) + newreq.setUrl(self.finalUrl) + newreq.proxytype = self.proxytype + newreq.totaltime = self.totaltime + newreq.schema = self.schema + + if self.wf_fuzz_methods: + newreq.method = "FUZZ" + else: + newreq.method = self.method + + return newreq + + @staticmethod + def from_parse_options(options): + fr = FuzzRequest() + + fr.rlevel = 1 + fr.setUrl(options['url']) + fr.wf_fuzz_methods = options['fuzz_methods'] + + if options['auth'][0] is not None: + fr.setAuth(options['auth'][0],options['auth'][1]) + + if options['follow']: + fr.setFollowLocation(options['follow']) + + if options['postdata']: + fr.setPostData(options['postdata']) + + if options['head']: + fr.method="HEAD" + + if options['cookie']: + fr.addHeader("Cookie", options['cookie']) + + if options['extraheaders']: + hh = options['extraheaders'].split(",") + for x in hh: + splitted = x.partition(":") + if splitted[1] != ":": + raise FuzzException(FuzzException.FATAL, "Wrong header specified, it should be in the format \"name: value\".") + fr.addHeader(splitted[0], splitted[2]) + + if options['allvars']: + fr.wf_allvars = options['allvars'] + + return fr + +class FuzzStats: + def __init__(self): + self.mutex = Lock() + + self.url = "" + self.seed = None + + self.total_req = 0 + self._pending_fuzz = 0 + self._pending_seeds = 0 + self._processed = 0 + self._backfeed = 0 + self._filtered = 0 + + self._totaltime = 0 + self.__starttime = 0 + + self._cancel = False + + @staticmethod + def from_requestGenerator(rg): + tmp_stats = FuzzStats() + + tmp_stats.url = rg.seed.completeUrl + tmp_stats.total_req = rg.count() + tmp_stats.seed = FuzzResult.from_fuzzReq(rg.seed, -1) + + return tmp_stats + + def get_stats(self): + return { + "url": self.url, + "total": self.total_req, + + "backfed": self.backfeed, + "Processed": self.processed, + "Pending": self.pending_fuzz, + "filtered": self.filtered, + + "Pending_seeds": self.pending_seeds, + + "totaltime": self.totaltime, + } + + def get_cancelled(self): + with self.mutex: + return self._cancel + + def set_cancelled(self, someValue): + with self.mutex: + self._cancel = someValue + + cancelled = property( get_cancelled, set_cancelled ) + + def get_pend_fuzz(self): + with self.mutex: + return self._pending_fuzz + + def set_pend_fuzz(self, someValue): + with self.mutex: + self._pending_fuzz = someValue + + pending_fuzz = property( get_pend_fuzz, set_pend_fuzz ) + + def get_filtered(self): + with self.mutex: + return self._filtered + + def set_filtered(self, someValue): + with self.mutex: + self._filtered = someValue + + filtered = property( get_filtered, set_filtered ) + + def get_backfeed(self): + with self.mutex: + return self._backfeed + + def set_backfeed(self, someValue): + with self.mutex: + self._backfeed = someValue + + backfeed = property( get_backfeed, set_backfeed ) + + def get_processed(self): + with self.mutex: + return self._processed + + def set_processed(self, someValue): + with self.mutex: + self._processed = someValue + + processed = property( get_processed, set_processed ) + + def get_pend_seeds(self): + with self.mutex: + return self._pending_seeds + + def set_pend_seeds(self, someValue): + with self.mutex: + self._pending_seeds = someValue + + pending_seeds = property( get_pend_seeds, set_pend_seeds ) + + def get_total_time(self): + with self.mutex: + return self._totaltime + + def set_total_time(self, someValue): + with self.mutex: + self._totaltime = someValue + + totaltime = property( get_total_time, set_total_time ) + + def mark_start(self): + with self.mutex: + self.__starttime = time.time() + + def mark_end(self): + self.totaltime = time.time() - self.__starttime + + +class FuzzResult: + def __init__(self, nres): + self.is_visible = True + self.is_baseline = False + + self.nres = nres + self.timer = 0 + self.rlevel = 1 + + self.exception = None + self.description = "" + + self.url = "" + + self.code = 0 + self.chars = 0 + self.lines = 0 + self.words = 0 + self.md5 = "" + + self.history = None + + self.plugins_res = [] + self.plugins_backfeed = [] + + @staticmethod + def from_fuzzReq(req, nres = -1, exception = None): + fr = FuzzResult(nres) + + fr.nres = nres + if req.fr_content(): + m = hashlib.md5() + m.update(req.fr_content()) + fr.md5 = m.hexdigest() + + fr.chars = len(req.fr_content()) + fr.lines = req.fr_content().count("\n") + fr.words = len(re.findall("\S+",req.fr_content())) + + fr.code = 0 if req.fr_code() is None else int(req.fr_code()) + fr.url = req.fr_url() + fr.description = req.wf_description + fr.timer = req.fr_time() + fr.rlevel = req.rlevel + + fr.history = req + fr.is_baseline = req.wf_is_baseline + + if exception: + fr.code = 0 + fr.exception = exception + fr.description = exception.msg + + return fr + + def is_path(self): + if self.code == 200 and self.url[-1] == '/': + return True + elif self.code >= 300 and self.code < 400: + if "Location" in self.history.fr_headers()['response'] and self.history.fr_headers()['response']["Location"][-1]=='/': + return True + elif self.code == 401: + if self.url[-1] == '/': + return True + + return False + + def to_new_seed(self): + seed = FuzzRequest.from_fuzzRes(self, self._recursive_url()) + seed.rlevel += 1 + + return seed + + def _recursive_url(self): + if self.code >= 300 and self.code < 400 and "Location" in self.history.fr_headers()['response']: + new_url = self.history.fr_headers()['response']["Location"] + if not new_url[-1] == '/': new_url += "/" + # taking into consideration redirections to /xxx/ without full URL + new_url = urljoin(self.url, new_url) + elif self.code == 401 or self.code == 200: + new_url = self.url + if not self.url[-1] == '/': new_url = "/" + else: + raise Exception, "Error generating seed from fuzz res" + + return new_url + "FUZZ" + diff --git a/framework/fuzzer/myhttp.py b/framework/fuzzer/myhttp.py new file mode 100644 index 00000000..e0a27fa1 --- /dev/null +++ b/framework/fuzzer/myhttp.py @@ -0,0 +1,176 @@ +from Queue import Queue +from threading import Thread +from threading import Lock +from threading import Event +from cStringIO import StringIO + +from externals.reqresp.exceptions import ReqRespException + +from framework.core.myexception import FuzzException +from framework.utils.myqueue import FuzzQueue +from framework.fuzzer.fuzzobjects import FuzzResult + +import pycurl + +class HttpQueue(FuzzQueue): + HTTPAUTH_BASIC, HTTPAUTH_NTLM, HTTPAUTH_DIGEST = ('basic', 'ntlm', 'digest') + + def __init__(self, options, q_out): + FuzzQueue.__init__(self, q_out, options.get("max_concurrent") * 5) + + self.options = options + + self.processed = 0 + + self.exit_job = False + self.mutex_multi = Lock() + self.mutex_stats = Lock() + + self.queue_out = q_out + + # Connection pool + self.m = None + self.freelist = Queue() + self.create_pool(options.get("max_concurrent")) + + th2 = Thread(target=self.__read_multi_stack) + th2.setName('__read_multi_stack') + th2.start() + + self.pause = Event() + self.pause.set() + + self._proxies = None + if options.get("proxy_list"): + self._proxies = self.__get_next_proxy(options.get("proxy_list")) + + def get_name(self): + return 'HttpQueue' + + def job_stats(self): + with self.mutex_stats: + dic = { + "http_Processed": self.processed, + "http_Pending": self.qsize(), + "http_Idle Workers": self.freelist.qsize() + } + return dic + + # Pycurl management + def create_pool(self, num_conn): + # Pre-allocate a list of curl objects + self.m = pycurl.CurlMulti() + self.m.handles = [] + + for i in range(num_conn): + c = pycurl.Curl() + self.m.handles.append(c) + self.freelist.put(c) + + def _cleanup(self): + self.exit_job = True + + def __get_next_proxy(self, proxy_list): + i = 0 + while 1: + yield proxy_list[i] + i += 1 + i = i % len(proxy_list) + + def _set_proxy(self, c, freq): + ip, port, ptype = self._proxies.next() + + freq.wf_proxy = (("%s:%s" % (ip, port)), ptype) + + c.setopt(pycurl.PROXY, "%s:%s" % (ip, port)) + if ptype == "SOCKS5": + c.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS5) + elif ptype == "SOCKS4": + c.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS4) + elif ptype == "HTML": + pass + else: + raise FuzzException(FuzzException.FATAL, "Bad proxy type specified, correct values are HTML, SOCKS4 or SOCKS5.") + + return c + + def process(self, prio, obj): + self.pause.wait() + c = obj.to_http_object(self.freelist.get()) + if self._proxies: c = self._set_proxy(c, obj) + + c.response_queue = ((StringIO(), StringIO(), obj)) + c.setopt(pycurl.WRITEFUNCTION, c.response_queue[0].write) + c.setopt(pycurl.HEADERFUNCTION, c.response_queue[1].write) + + with self.mutex_multi: + self.m.add_handle(c) + + def __read_multi_stack(self): + # Check for curl objects which have terminated, and add them to the freelist + while not self.exit_job: + with self.mutex_multi: + while not self.exit_job: + ret, num_handles = self.m.perform() + if ret != pycurl.E_CALL_MULTI_PERFORM: + break + + num_q, ok_list, err_list = self.m.info_read() + for c in ok_list: + # Parse response + buff_body, buff_header, req = c.response_queue + req.from_http_object(c, buff_header.getvalue(), buff_body.getvalue()) + + self.send(FuzzResult.from_fuzzReq(req)) + + self.m.remove_handle(c) + self.freelist.put(c) + + with self.mutex_stats: + self.processed += 1 + + for c, errno, errmsg in err_list: + buff_body, buff_header, req = c.response_queue + + req.totaltime = 0 + self.m.remove_handle(c) + self.freelist.put(c) + + # Usual suspects: + + #Exception in perform (35, 'error:0B07C065:x509 certificate routines:X509_STORE_add_cert:cert already in hash table') + #Exception in perform (18, 'SSL read: error:0B07C065:x509 certificate routines:X509_STORE_add_cert:cert already in hash table, errno 11') + #Exception in perform (28, 'Connection time-out') + #Exception in perform (7, "couldn't connect to host") + #Exception in perform (6, "Couldn't resolve host 'www.xxx.com'") + #(28, 'Operation timed out after 20000 milliseconds with 0 bytes received') + #Exception in perform (28, 'SSL connection timeout') + #5 Couldn't resolve proxy 'aaa' + + err_number = ReqRespException.FATAL + if errno == 35: + err_number = ReqRespException.SSL + elif errno == 18: + err_number = ReqRespException.SSL + elif errno == 28: + err_number = ReqRespException.TIMEOUT + elif errno == 7: + err_number = ReqRespException.CONNECT_HOST + elif errno == 6: + err_number = ReqRespException.RESOLVE_HOST + elif errno == 5: + err_number = ReqRespException.RESOLVE_PROXY + + e = ReqRespException(err_number, "Pycurl error %d: %s" % (errno, errmsg)) + self.send(FuzzResult.from_fuzzReq(req, exception=e)) + + if not self.options.get("scanmode"): + self._throw(e) + + with self.mutex_stats: + self.processed += 1 + + # cleanup multi stack + for c in self.m.handles: + c.close() + self.m.close() diff --git a/framework/plugins/__init__.py b/framework/plugins/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/framework/plugins/api.py b/framework/plugins/api.py new file mode 100644 index 00000000..1a2a68f6 --- /dev/null +++ b/framework/plugins/api.py @@ -0,0 +1,127 @@ +from framework.plugins.pluginobjects import PluginResult +from framework.plugins.pluginobjects import PluginRequest +from framework.core.myexception import FuzzException +from framework.core.facade import Facade + +import os +import urlparse +import urllib2 +import json + +# Util methods when processing fuzz results + +def url_filename(fuzzresult): + u = urlparse.urlsplit(fuzzresult.url).path.split('/')[-1:][0] + + return u + +def url_same_domain(url1, url2): + return url_domain(url1) == url_domain(url2) + +def url_domain(url): + return '.'.join(urlparse.urlparse(url).netloc.split(".")[-2:]) + +def url_filename_ext(url): + path = urlparse.urlparse(url).path + ext = os.path.splitext(path)[1] + + return ext + +# Util methods for accessing search results +def search_bing(dork, key = None, raw = False): + if key is None: + key = Facade().sett.get('plugins', 'bing_apikey') + + if not key: + raise FuzzException(FuzzException.FATAL, "An api Bing key is needed. Please chek wfuzz.ini.") + + # some code taken from http://www.securitybydefault.com/2014/07/search2auditpy-deja-que-bing-haga-el.html?utm_source=feedburner&utm_medium=feed&utm_campaign=Feed%3A+SecurityByDefault+%28Security+By+Default%29 + user_agent = 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; FDM; .NET CLR 2.0.50727; InfoPath.2; .NET CLR 1.1.4322)' + creds = (':%s' % key).encode('base64')[:-1] + auth = 'Basic %s' % creds + + # temporary solution, wf should have a process performing http requests. even plugins might need this. + + try: + request = urllib2.Request('https://api.datamarket.azure.com/Data.ashx/Bing/Search/Composite?Sources=%27web%27&Query=%27'+dork+'%27&$format=json') + request.add_header('Authorization', auth) + request.add_header('User-Agent', user_agent) + requestor = urllib2.build_opener() + result = requestor.open(request) + except Exception, e: + raise FuzzException(FuzzException.FATAL, "Error when retrieving Bing API results: %s." % e.msg) + + results = json.loads(result.read()) + #test results = {u'd': {u'results': [{u'Web': [{u'Description': u'Diario de informaci\xf3n general de USA, noticias de \xfaltima hora de USA, el mundo, local, deportes, noticias curiosas y m\xe1s', u'Title': u'20minutos.com - El medio social - \xdaltima hora en USA y el ...', u'Url': u'http://www.20minutos.com/', u'__metadata': {u'type': u'WebResult', u'uri': u"https://api.datamarket.azure.com/Data.ashx/Bing/Search/ExpandableSearchResultSet(guid'b670a6b6-6ae7-4830-ad6f-83b525d6266d')/Web?$skip=0&$top=1"}, u'DisplayUrl': u'www.20minutos.com', u'ID': u'546995b5-587a-4618-984d-93bc5041e067'}, {u'Description': u'Informaci\xf3n, noticias y resultados de deportes: F\xfatbol, Baloncesto, NBA, Beisbol, F\xf3rmula 1, MotoGP, Tenis y m\xe1s en 20minutos.com', u'Title': u'Noticias deportivas - 20minutos.com', u'Url': u'http://www.20minutos.com/deportes/', u'__metadata': {u'type': u'WebResult', u'uri': u"https://api.datamarket.azure.com/Data.ashx/Bing/Search/ExpandableSearchResultSet(guid'b670a6b6-6ae7-4830-ad6f-83b525d6266d')/Web?$skip=1&$top=1"}, u'DisplayUrl': u'www.20minutos.com/deportes', u'ID': u'2ff2cd36-eece-4810-9b00-cba7d5ecfa47'}], u'VideoTotal': u'', u'RelatedSearch': [], u'Image': [], u'__metadata': {u'type': u'ExpandableSearchResult', u'uri': u"https://api.datamarket.azure.com/Data.ashx/Bing/Search/Composite?Sources='web'&Query='ip:193.148.34.26'&$skip=0&$top=1"}, u'ImageOffset': u'', u'AlterationOverrideQuery': u'', u'ImageTotal': u'', u'WebTotal': u'20', u'SpellingSuggestionsTotal': u'', u'WebOffset': u'0', u'Video': [], u'News': [], u'AlteredQuery': u'', u'SpellingSuggestions': [], u'VideoOffset': u'', u'NewsTotal': u'', u'ID': u'b670a6b6-6ae7-4830-ad6f-83b525d6266d', u'NewsOffset': u''}]}} + + if raw: + return results + else: + return results['d']['results'][0]['Web'] + +class BasePlugin(): + def __init__(self): + self.results_queue = None + self.base_fuzz_res = None + + def run(self, fuzzresult, control_queue, results_queue): + try: + self.results_queue = results_queue + self.base_fuzz_res = fuzzresult + self.process(fuzzresult) + except Exception, e: + plres = PluginResult() + plres.source = "$$exception$$" + plres.issue = "Exception within plugin %s: %s" % (self.name, str(e)) + results_queue.put(plres) + finally: + control_queue.get() + control_queue.task_done() + return + + def process(self, fuzzresult): + ''' + This is were the plugin processing is done. Any wfuzz plugin must implement this method, do its job with the fuzzresult received and: + - queue_url: if it is a discovery plugin enqueing more HTTP request that at some point will generate more results + - add_result: Add information about the obtained results after the processing with an accurate description + + A kbase (get_kbase, has_kbase, add_kbase) is shared between all plugins. this can be used to store and retrieve relevant "collaborative" information. + ''' + raise NotImplemented + + def add_result(self, issue): + plres = PluginResult() + plres.source = self.name + plres.issue = issue + + self.results_queue.put(plres) + + def queue_raw_request(self, raw): + self.results_queue.put(raw) + + def queue_url(self, url): + self.results_queue.put(PluginRequest.from_fuzzRes(self.base_fuzz_res, url, self.name)) + + def get_kbase(self, key): + v = self.kbase.get(key) + if not v: + raise FuzzException(FuzzException.FATAL, "Key not in kbase") + return v + + def has_kbase(self, key): + return self.kbase.has(key) + + def add_kbase(self, key, value): + self.kbase.add(key, value) + +# Plugins specializations with common methods useful for their own type + +class DiscoveryPlugin(BasePlugin): + def __init__(self): + self.black_list = Facade().sett.get('plugins', 'file_bl').split(",") + + if self.has_kbase("discovery.bl"): + self.black_list = self.get_kbase("discovery.bl")[0].split("-") + + def blacklisted_extension(self, url): + return url_filename_ext(url) in self.black_list diff --git a/framework/plugins/jobs.py b/framework/plugins/jobs.py new file mode 100644 index 00000000..160ef3e6 --- /dev/null +++ b/framework/plugins/jobs.py @@ -0,0 +1,136 @@ +import threading +from Queue import Queue + +from framework.fuzzer.fuzzobjects import FuzzResult +from framework.fuzzer.fuzzobjects import FuzzRequest +from framework.plugins.pluginobjects import PluginResult +from framework.plugins.pluginobjects import PluginRequest +from framework.core.myexception import FuzzException +from framework.utils.myqueue import FuzzQueue +from framework.utils.myqueue import FuzzListQueue + +class RoundRobin(FuzzListQueue): + def __init__(self, queue_out): + FuzzListQueue.__init__(self, queue_out) + self.next_route = self._get_next_route() + + def get_name(self): + return 'RoundRobin' + + def _cleanup(self): + pass + + def send(self, item): + self.next_route.next().put(item) + + def _get_next_route(self): + i = 0 + while 1: + yield self.queue_out[i] + i += 1 + i = i % len(self.queue_out) + + def process(self, prio, item): + self.send(item) + +class JobMan(FuzzQueue): + def __init__(self, selected_plugins, cache, queue_out): + FuzzQueue.__init__(self, queue_out) + self.__walking_threads = Queue(20) + self.selected_plugins = selected_plugins + self.cache = cache + + def get_name(self): + return 'Jobman' + + def _cleanup(self): + pass + + # ------------------------------------------------ + # threading + # ------------------------------------------------ + def process(self, prio, res): + # process request through plugins + if res.is_visible and not res.exception: + if self.cache.update_cache(res.history, "processed"): + + plugins_res_queue = Queue() + + for plugin_class in self.selected_plugins: + try: + pl = plugin_class() + if not pl.validate(res): + continue + th = threading.Thread(target = pl.run, kwargs={"fuzzresult": res, "control_queue": self.__walking_threads, "results_queue": plugins_res_queue}) + except Exception, e: + raise FuzzException(FuzzException.FATAL, "Error initialising plugin %s: %s " % (plugin_class.name, str(e))) + self.__walking_threads.put(th) + th.start() + + self.__walking_threads.join() + + + while not plugins_res_queue.empty(): + item = plugins_res_queue.get() + if isinstance(item, PluginResult): + if item.source == "$$exception$$": + self._throw(FuzzException(FuzzException.FATAL, item.issue)) + res.plugins_res.append(item) + elif isinstance(item, PluginRequest): + if self.cache.update_cache(item.request, "backfeed"): + res.plugins_backfeed.append(item) + + # add result to results queue + self.send(res) + +class ProcessorQ(FuzzQueue): + def __init__(self, max_rlevel, stats, queue_out): + FuzzQueue.__init__(self, queue_out) + + self.stats = stats + self.max_rlevel = max_rlevel + + def get_name(self): + return 'ProcessorQ' + + def _cleanup(self): + pass + + def process(self, prio, fuzz_res): + # Getting results from plugins or directly from http if not activated + enq_item = 0 + plugin_name = "" + + # Check for plugins new enqueued requests + while fuzz_res.plugins_backfeed: + plg_backfeed = fuzz_res.plugins_backfeed.pop() + plugin_name = plg_backfeed.source + + self.stats.backfeed += 1 + self.stats.pending_fuzz += 1 + self.send(plg_backfeed) + enq_item += 1 + + if enq_item > 0: + plres = PluginResult() + plres.source = "Backfeed" + fuzz_res.plugins_res.append(plres) + plres.issue = "Plugin %s enqueued %d more requests (rlevel=%d)" % (plugin_name, enq_item, fuzz_res.rlevel) + + # check if recursion is needed + if self.max_rlevel >= fuzz_res.rlevel and fuzz_res.is_path(): + self.send_new_seed(fuzz_res) + + # send new result + self.send(fuzz_res) + + def send_new_seed(self, res): + # Little hack to output that the result generates a new recursion seed + plres = PluginResult() + plres.source = "Recursion" + plres.issue = "Enqueued response for recursion (level=%d)" % (res.rlevel) + res.plugins_res.append(plres) + + # send new seed + self.stats.pending_seeds += 1 + self.send(res.to_new_seed()) diff --git a/framework/plugins/pluginobjects.py b/framework/plugins/pluginobjects.py new file mode 100644 index 00000000..c96d547b --- /dev/null +++ b/framework/plugins/pluginobjects.py @@ -0,0 +1,26 @@ +from framework.fuzzer.fuzzobjects import FuzzRequest + +class PluginResult: + def __init__(self): + self.source = "" + self.issue = "" + +class PluginRequest(): + def __init__(self): + self.source = "" + self.request = None + self.rlevel = 0 + + @staticmethod + def from_fuzzRes(res, url, source): + fr = FuzzRequest.from_fuzzRes(res, str(url)) + fr.wf_description = fr.path + fr.rlevel = res.rlevel + 1 + + plreq = PluginRequest() + plreq.source = source + plreq.request = fr + plreq.rlevel = res.rlevel + 1 + + return plreq + diff --git a/framework/ui/__init__.py b/framework/ui/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/framework/ui/console/__init__.py b/framework/ui/console/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/framework/ui/console/clparser.py b/framework/ui/console/clparser.py new file mode 100644 index 00000000..c38604a2 --- /dev/null +++ b/framework/ui/console/clparser.py @@ -0,0 +1,358 @@ +import sys +import getopt +import time +import re +from collections import defaultdict + +from framework.fuzzer.dictio import dictionary +from framework.fuzzer.fuzzobjects import FuzzRequest +from framework.fuzzer.filter import PYPARSING +from framework.core.facade import Facade +from framework.core.facade import FuzzSessionOptions +from framework.fuzzer.dictio import requestGenerator +from framework.core.myexception import FuzzException +from framework.ui.console.common import help_banner +from framework.ui.console.common import usage +from framework.ui.console.common import brief_usage +from framework.ui.console.common import version +from framework.ui.console.output import table_print + +import plugins.encoders +import plugins.iterations + + +class CLParser: + def __init__(self, argv): + self.argv = argv + + def show_brief_usage(self): + print help_banner + print brief_usage + + def show_usage(self): + print help_banner + print usage + + def show_plugins_help(self, registrant, cols=3, category="$all$"): + print "\nAvailable %s:\n" % registrant + table_print(map(lambda x: x[cols:], Facade().proxy(registrant).get_plugins_ext(category))) + sys.exit(0) + + def parse_cl(self): + options = FuzzSessionOptions() + + # Usage and command line help + try: + opts, args = getopt.getopt(self.argv[1:], "hAZIXvcb:e:R:d:z:r:f:t:w:V:H:m:o:s:p:w:",['sc=','sh=','sl=','sw=','ss=','hc=','hh=','hl=','hw=','hs=','ntlm=','basic=','digest=','follow','script-help=','script=','script-args=','filter=','interact','help','version']) + optsd = defaultdict(list) + for i,j in opts: + optsd[i].append(j) + + self._parse_help_opt(optsd) + + if len(args) == 0: + raise FuzzException(FuzzException.FATAL, "You must specify a payload and a URL") + + url = args[0] + + self._check_options(optsd) + self._parse_options(optsd, options) + options.set("filter_params", self._parse_filters(optsd)) + options.set("genreq", requestGenerator(self._parse_seed(url, optsd), self._parse_payload(optsd))) + + return options + except FuzzException, e: + self.show_brief_usage() + #self.show_usage() + raise e + except ValueError: + self.show_brief_usage() + raise FuzzException(FuzzException.FATAL, "Incorrect options, please check help.") + except getopt.GetoptError, qw: + self.show_brief_usage() + #self.show_usage() + raise FuzzException(FuzzException.FATAL, "%s." % str(qw)) + + def _parse_help_opt(self, optsd): + if "--version" in optsd: + print version + sys.exit(0) + + if "--help" in optsd or "-h" in optsd: + self.show_usage() + sys.exit(0) + + # Extensions help + if "--script-help" in optsd: + script_string = optsd["--script-help"][0] + if script_string == "": + script_string = "$all$" + + self.show_plugins_help("parsers", 2, script_string) + + if "-e" in optsd: + if "payloads" in optsd["-e"]: + self.show_plugins_help("payloads") + elif "encoders" in optsd["-e"]: + self.show_plugins_help("encoders", 2) + elif "iterators" in optsd["-e"]: + self.show_plugins_help("iterators") + elif "printers" in optsd["-e"]: + self.show_plugins_help("printers") + elif "scripts" in optsd["-e"]: + self.show_plugins_help("parsers", 2) + else: + raise FuzzException(FuzzException.FATAL, "Unknown category. Valid values are: payloads, encoders, iterators, printers or scripts.") + + if "-o" in optsd: + if "help" in optsd["-o"]: + self.show_plugins_help("printers") + if "-m" in optsd: + if "help" in optsd["-m"]: + self.show_plugins_help("iterators") + if "-z" in optsd: + if "help" in optsd["-z"]: + self.show_plugins_help("payloads") + + + def _check_options(self, optsd): + if not "-z" in optsd.keys() and not "-w" in optsd.keys(): + raise FuzzException(FuzzException.FATAL, "Bad usage: You must specify a payload.") + + if "--filter" in optsd.keys() and filter(lambda x: x in optsd.keys(), ["--sc","--ss","--sh","--sl","--sw","--hc","--hs","--hh","--hl","--hw"]): + raise FuzzException(FuzzException.FATAL, "Bad usage: Advanced and filter flags are mutually exclusive. Only one could be specified.") + + # Check for repeated flags + l = ["--hc", "--hw", "--hl", "--hh", "--hs", "--sc", "--sw", "--sl", "--sh", "--ss", "--script", "--script-args"] + if [i for i in l if i in optsd and len(optsd[i]) > 1]: + raise FuzzException(FuzzException.FATAL, "Bad usage: Only one filter could be specified at the same time.") + + #HEAD with POST parameters + if "-d" in optsd.keys() and "-I" in optsd.keys(): + raise FuzzException(FuzzException.FATAL, "Bad usage: HEAD with POST parameters? Does it makes sense?") + + #-A and script not allowed at the same time + if "--script" in optsd.keys() and "-A" in optsd.keys(): + raise FuzzException(FuzzException.FATAL, "Bad usage: --scripts and -A are incompatible options, -A already defines --script=default.") + + + def _parse_filters(self, optsd): + filter_params = dict( + active = False, + regex_show = None, + codes_show = None, + codes = [], + words = [], + lines = [], + chars = [], + regex = None, + filter_string = "" + ) + + + if "--filter" in optsd: + if not PYPARSING: + raise FuzzException(FuzzException.FATAL, "--filter switch needs pyparsing module.") + filter_params['filter_string'] = optsd["--filter"][0] + + if "--hc" in optsd: + filter_params['codes'] = optsd["--hc"][0].split(",") + if "--hw" in optsd: + filter_params['words'] = optsd["--hw"][0].split(",") + if "--hl" in optsd: + filter_params['lines'] = optsd["--hl"][0].split(",") + if "--hh" in optsd: + filter_params['chars'] = optsd["--hh"][0].split(",") + if "--hs" in optsd: + filter_params['regex'] = re.compile(optsd["--hs"][0], re.MULTILINE|re.DOTALL) + + if filter(lambda x: x in optsd, ["--ss"]): + filter_params['regex_show'] = True + elif filter(lambda x: x in optsd, ["--hs"]): + filter_params['regex_show'] = False + + if filter(lambda x: x in optsd, ["--sc", "--sw", "--sh", "--sl"]): + filter_params['codes_show'] = True + elif filter(lambda x: x in optsd, ["--hc", "--hw", "--hh", "--hl"]): + filter_params['codes_show'] = False + + if "--sc" in optsd: + filter_params['codes'] = optsd["--sc"][0].split(",") + if "--sw" in optsd: + filter_params['words'] = optsd["--sw"][0].split(",") + if "--sl" in optsd: + filter_params['lines'] = optsd["--sl"][0].split(",") + if "--sh" in optsd: + filter_params['chars'] = optsd["--sh"][0].split(",") + if "--ss" in optsd: + filter_params['regex'] = re.compile(optsd["--ss"][0], re.MULTILINE|re.DOTALL) + + if filter_params['regex_show'] is not None or filter_params['codes_show'] is not None or filter_params['filter_string'] != "": + filter_params['active'] = True + + return filter_params + + def _parse_payload(self, optsd): + selected_dic = [] + if "-z" in optsd: + for i in optsd["-z"]: + vals = i.split(",") + t, par = vals[:2] + p = Facade().get_payload(t)(par) + + l = [] + if len(vals) == 3: + encoding = vals[2] + for i in encoding.split("-"): + if i.find('@') > 0: + l.append(plugins.encoders.pencoder_multiple([Facade().get_encoder(ii) for ii in i.split("@")]).encode) + else: + l += map(lambda x: x().encode, Facade().proxy("encoders").get_plugins(i)) + else: + l = [Facade().get_encoder('none').encode] + + d = dictionary(p, l) + selected_dic.append(d) + + # Alias por "-z file,Wordlist" + if "-w" in optsd: + for i in optsd["-w"]: + vals = i.split(",") + f, = vals[:1] + p = Facade().get_payload("file")(f) + + l = [] + if len(vals) == 2: + encoding = vals[1] + for i in encoding.split("-"): + if i.find('@') > 0: + l.append(plugins.encoders.pencoder_multiple([Facade().get_encoder(ii) for ii in i.split("@")]).encode) + else: + l += map(lambda x: x().encode, Facade().proxy("encoders").get_plugins(i)) + else: + l = [Facade().get_encoder('none').encode] + + d = dictionary(p, l) + selected_dic.append(d) + + iterat_tool = plugins.iterations.piterator_void + if "-m" in optsd: + iterat_tool = Facade().get_iterator(optsd['-m'][0]) + elif len(selected_dic) > 0: + iterat_tool = Facade().get_iterator("product") + + return iterat_tool(*selected_dic) + + def _parse_seed(self, url, optsd): + + options = dict( + url = url, + fuzz_methods = False, + auth = (None, None), + follow = False, + head = False, + postdata = None, + extraheaders = None, + cookie = None, + allvars = None, + ) + + options['url'] = url + + if "-X" in optsd: + options['fuzz_methods'] = True + + if "--basic" in optsd: + options['auth'] = ("basic", optsd["--basic"][0]) + + if "--digest" in optsd: + options['auth'] = ("digest", optsd["--digest"][0]) + + if "--ntlm" in optsd: + options['auth'] = ("ntlm", optsd["--ntlm"][0]) + + if "--follow" in optsd: + options['follow'] = True + + if "-I" in optsd: + options['head'] = "HEAD" + + if "-d" in optsd: + options['postdata'] = optsd["-d"][0] + + if "-b" in optsd: + options['cookie'] = optsd["-b"][0] + + if "-H" in optsd: + options['extraheaders'] = str(optsd["-H"][0]) + + if "-V" in optsd: + varset = str(optsd["-V"][0]) + if varset not in ['allvars','allpost','allheaders']: + raise FuzzException(FuzzException.FATAL, "Incorrect all parameters brute forcing type specified, correct values are allvars,allpost or allheaders.") + + options['allvars'] = varset + + return FuzzRequest.from_parse_options(options) + + def _parse_options(self, optsd, options): + + if "-p" in optsd: + proxy = [] + + for p in optsd["-p"][0].split('-'): + vals = p.split(":") + + if len(vals) == 2: + proxy.append((vals[0], vals[1], "HTML")) + elif len(vals) == 3: + if vals[2] not in ("SOCKS5","SOCKS4","HTML"): + raise FuzzException(FuzzException.FATAL, "Bad proxy type specified, correct values are HTML, SOCKS4 or SOCKS5.") + proxy.append((vals[0], vals[1], vals[2])) + else: + raise FuzzException(FuzzException.FATAL, "Bad proxy parameter specified.") + + options.set('proxy_list', proxy) + + if "-R" in optsd: + options.set("rlevel", int(optsd["-R"][0])) + + options.set("printer_tool", "default") + + if "-v" in optsd: + options.set("printer_tool", "verbose") + + if "-c" in optsd: + Facade().proxy("printers").kbase.add("colour", True) + + if "-A" in optsd: + options.set("printer_tool", "verbose") + Facade().proxy("printers").kbase.add("colour", True) + + options.set("script_string", "default") + + options.set("scanmode", "-Z" in optsd) + + if "-o" in optsd: + options.set("printer_tool", optsd['-o'][0]) + + if "--script" in optsd: + options.set("script_string", "default" if optsd["--script"][0] == "" else optsd["--script"][0]) + + if "--script-args" in optsd: + vals = optsd["--script-args"][0].split(",") + for i in vals: + k, v = i.split("=", 1) + Facade().proxy("parsers").kbase.add(k, v) + + options.set("interactive", "--interact" in optsd) + + # HTTP options + + if "-s" in optsd: + options.set("sleeper", float(optsd["-s"][0])) + + if "-t" in optsd: + options.set("max_concurrent", int(optsd["-t"][0])) + + diff --git a/framework/ui/console/common.py b/framework/ui/console/common.py new file mode 100644 index 00000000..6a9d5049 --- /dev/null +++ b/framework/ui/console/common.py @@ -0,0 +1,103 @@ +import sys +from framework.core.facade import version + + +exec_banner = '''********************************************************\r +* Wfuzz %s - The Web Bruteforcer *\r +********************************************************\r\n''' % version + +help_banner = '''******************************************************** +* Wfuzz %s - The Web Bruteforcer * +* * +* Version up to 1.4c coded by: * +* Christian Martorella (cmartorella@edge-security.com) * +* Carlos del ojo (deepbit@gmail.com) * +* * +* Version 1.4d to %s coded by: * +* Xavier Mendez (xmendez@edge-security.com) * +********************************************************\r\n''' % (version, version) + +brief_usage ='''Usage: %s [options] -z payload,params \r\n +Type wfuzz.py -h for further information. +''' % (sys.argv[0]) +usage ='''Usage: %s [options] -z payload,params \r\n +Options: +-h/--help : This help +--version : Wfuzz version details +-e : List of available encoders/payloads/iterators/printers/scripts + +-c : Output with colors +-v : Verbose information. Alias for -o verbose +-o printer : Format output using the specified printer (default printer if omitted). +--interact : (beta) If selected,all key presses are captured. This allows you to interact with the program. + +-p addr : Use Proxy in format ip:port:type or ip:port:type-...-ip:port:type for using various proxies. + Where type could be SOCKS4,SOCKS5 or HTTP if omitted. + +-t N : Specify the number of concurrent connections (10 default) +-s N : Specify time delay between requests (0 default) +-R depth : Recursive path discovery being depth the maximum recursion level. +-I : Use HTTP HEAD method (No HTML body responses). +--follow : Follow HTTP redirections +-Z : Scan mode (Connection errors will be ignored). + +-A : Alias for --script=default -v -c +--script= : Equivalent to --script=default +--script= : Runs script's scan. is a comma separated list of plugin-files or plugin-categories +--script-help= : Show help about scripts. +--script-args n1=v1,... : Provide arguments to scripts. ie. --script-args grep.regex=\"\" + +-m iterator : Specify an iterator for combining payloads (product by default) +-z payload : Specify a payload for each FUZZ keyword used in the form of type,parameters,encoder. + A list of encoders can be used, ie. md5-sha1. Encoders can be chained, ie. md5@sha1. + Encoders category can be used. ie. url +-w wordlist : Specify a wordlist file (alias for -z file,wordlist). +-V alltype : All parameters bruteforcing (allvars and allpost). No need for FUZZ keyword. +-X : Payload within HTTP methods (ex: "FUZZ HTTP/1.0"). No need for FUZZ keyword. + +-b cookie : Specify a cookie for the requests +-d postdata : Use post data (ex: "id=FUZZ&catalogue=1") +-H headers : Use headers (ex:"Host:www.mysite.com,Cookie:id=1312321&user=FUZZ") +--basic/ntlm/digest auth : in format "user:pass" or "FUZZ:FUZZ" or "domain\FUZ2Z:FUZZ" + +--hc/hl/hw/hh N[,N]+ : Hide responses with the specified code/lines/words/chars (Use BBB for taking values from baseline) +--sc/sl/sw/sh N[,N]+ : Show responses with the specified code/lines/words/chars (Use BBB for taking values from baseline) +--ss/hs regex : Show/Hide responses with the specified regex within the content +--filter : Filter responses using the specified expression (Use BBB for taking values from baseline) + It should be composed of: c,l,w,h/and,or/=,<,>,!=,<=,>= + +Keyword: FUZZ, ..., FUZnZ wherever you put these keywords wfuzz will replace them with the values of the specified payload. +Baseline: FUZZ{baseline_value} FUZZ will be replaced by baseline_value. It will be the first request performed and could be used as a base for filtering. + +Examples: - wfuzz.py -c -z file,users.txt -z file,pass.txt --sc 200 http://www.site.com/log.asp?user=FUZZ&pass=FUZ2Z + - wfuzz.py -c -z range,1-10 --hc=BBB http://www.site.com/FUZZ{something not there} + - wfuzz.py --script=robots -z list,robots.txt http://www.webscantest.com/FUZZ + + More examples in the README.''' % (sys.argv[0]) + +class term_colors: + reset = "\x1b[0m" + bright = "\x1b[1m" + dim = "\x1b[2m" + underscore = "\x1b[4m" + blink = "\x1b[5m" + reverse = "\x1b[7m" + hidden = "\x1b[8m" + + fgBlack = "\x1b[30m" + fgRed = "\x1b[31m" + fgGreen = "\x1b[32m" + fgYellow = "\x1b[33m" + fgBlue = "\x1b[34m" + fgMagenta = "\x1b[35m" + fgCyan = "\x1b[36m" + fgWhite = "\x1b[37m" + + bgBlack = "\x1b[40m" + bgRed = "\x1b[41m" + bgGreen = "\x1b[42m" + bgYellow = "\x1b[43m" + bgBlue = "\x1b[44m" + bgMagenta = "\x1b[45m" + bgCyan = "\x1b[46m" + bgWhite = "\x1b[47m" diff --git a/framework/ui/console/controller.py b/framework/ui/console/controller.py new file mode 100644 index 00000000..e19f531a --- /dev/null +++ b/framework/ui/console/controller.py @@ -0,0 +1,86 @@ +from framework.utils.myqueue import memory_usage_resource + +usage='''\r\n +Interactive keyboard commands:\r\n +?: Show this help + +p: Pause +s: Show stats +q: Cancel +''' + +class Controller: + def __init__(self, fuzzer, view): + self._debug = False + self.fuzzer = fuzzer + self.view = view + self.__paused = False + + self.view.dispatcher.subscribe(self.on_help, "?") + self.view.dispatcher.subscribe(self.on_pause, "p") + self.view.dispatcher.subscribe(self.on_stats, "s") + self.view.dispatcher.subscribe(self.on_exit, "q") + + # dynamic keyboard bindings + def on_exit(self, **event): + self.fuzzer.genReq.stats.mark_end() + self.fuzzer.cancel_job() + #self.view.cancel_job() + #self.view.cancel_job() + + def on_help(self, **event): + print usage + + def on_pause(self, **event): + self.__paused = not self.__paused + if self.__paused: + self.fuzzer.pause_job() + + if self._debug: + print "\n=============== Paused ==================" + stats = self.fuzzer.stats() + for k,v in stats.items(): + print "%s: %s" % (k, v) + print "\n=========================================" + else: + self.fuzzer.resume_job() + + def on_stats(self, **event): + if self._debug: + fzstats = self.fuzzer.stats() + mem = memory_usage_resource() + + print "\nTotal items %d, Backfed items %d, HTTP reqs: %d, Fuzzed items: %d, Pending: %d (Wait HTTP: %d, Wait pre HTTP: %d, Wait Workers: %d, Wait processed: %d). (MEM: %d)" % \ + (fzstats['total'], \ + fzstats['backfed'], \ + fzstats['http_Processed'], \ + fzstats['Processed'], \ + fzstats['Pending'], \ + fzstats['http_Pending'], \ + fzstats['http_queue'], \ + fzstats['plugins_queue'], \ + fzstats['results_queue'], + mem + ) + else: + pending = self.fuzzer.genReq.stats.total_req - self.fuzzer.genReq.stats.processed + summary = self.fuzzer.genReq.stats + summary.mark_end() + print "\nTotal requests: %s\r" % str(summary.total_req) + print "Pending requests: %s\r" % str(pending) + + if summary.backfeed > 0: + print "Processed Requests: %s (%d + %d)\r" % (str(summary.processed)[:8], (summary.processed - summary.backfeed), summary.backfeed) + else: + print "Processed Requests: %s\r" % (str(summary.processed)[:8]) + print "Filtered Requests: %s\r" % (str(summary.filtered)[:8]) + req_sec = summary.processed/summary.totaltime if summary.totaltime > 0 else 0 + print "Total time: %s\r" % str(summary.totaltime)[:8] + if req_sec > 0: + print "Requests/sec.: %s\r" % str(req_sec)[:8] + eta = pending/req_sec + if eta > 60: + print "ET left min.: %s\r\n" % str(eta/60)[:8] + else: + print "ET left sec.: %s\r\n" % str(eta)[:8] + diff --git a/framework/ui/console/getch.py b/framework/ui/console/getch.py new file mode 100644 index 00000000..d193fcad --- /dev/null +++ b/framework/ui/console/getch.py @@ -0,0 +1,81 @@ +# http://code.activestate.com/recipes/134892/ +# +# getch()-like unbuffered character reading from stdin on both Windows and Unix (Python recipe) +# A small utility class to read single characters from standard input, on both Windows and UNIX systems. It provides a getch() function-like instance. +# +# Created by Danny Yoo on Fri, 21 Jun 2002 (PSF) +# (http://code.activestate.com/recipes/users/98032/) + +class _Getch: + """Gets a single character from standard input. Does not echo to the screen.""" + def __init__(self): + try: + self.impl = _GetchWindows() + except ImportError: + try: + self.impl = _GetchUnix() + except ImportError: + self.impl = _GetchMacCarbon() + + def __call__(self): return self.impl() + +class _GetchUnix: + def __init__(self): + import tty, sys + + def __call__(self): + import sys, tty, termios + fd = sys.stdin.fileno() + old_settings = termios.tcgetattr(fd) + try: + tty.setraw(sys.stdin.fileno()) + ch = sys.stdin.read(1) + finally: + termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) + return ch + + +class _GetchWindows: + def __init__(self): + import msvcrt + + def __call__(self): + import msvcrt + +class _GetchMacCarbon: + """ + A function which returns the current ASCII key that is down; + if no ASCII key is down, the null string is returned. The + page http://www.mactech.com/macintosh-c/chap02-1.html was + very helpful in figuring out how to do this. + """ + def __init__(self): + import Carbon + Carbon.Evt #see if it has this (in Unix, it doesn't) + + def __call__(self): + import Carbon + if Carbon.Evt.EventAvail(0x0008)[0]==0: # 0x0008 is the keyDownMask + return '' + else: + # + # The event contains the following info: + # (what,msg,when,where,mod)=Carbon.Evt.GetNextEvent(0x0008)[1] + # + # The message (msg) contains the ASCII char which is + # extracted with the 0x000000FF charCodeMask; this + # number is converted to an ASCII character with chr() and + # returned + # + (what,msg,when,where,mod)=Carbon.Evt.GetNextEvent(0x0008)[1] + return chr(msg & 0x000000FF) + +if __name__ == '__main__': # a little test + print 'Press a key' + inkey = _Getch() + + import sys + for i in xrange(sys.maxint): + k=inkey() + if k<>'':break + print 'you pressed ',k diff --git a/framework/ui/console/keystroke.py b/framework/ui/console/keystroke.py new file mode 100644 index 00000000..d1a89af2 --- /dev/null +++ b/framework/ui/console/keystroke.py @@ -0,0 +1,36 @@ +from framework.utils.dispatcher import SimpleEventDispatcher +from framework.ui.console.getch import _Getch +import threading + + +class KeyPress(threading.Thread): + def __init__(self): + threading.Thread.__init__(self) + self.inkey = _Getch() + self.setName("KeyPress") + + self.dispatcher = SimpleEventDispatcher() + self.dispatcher.create_event("?") + self.dispatcher.create_event("p") + self.dispatcher.create_event("s") + self.dispatcher.create_event("q") + + self.do_job = True + + def cancel_job(self): + self.do_job = False + + def run(self): + while self.do_job: + k = self.inkey() + if ord(k) == 3: + self.dispatcher.notify("q", key="q") + elif k == 'p': + self.dispatcher.notify("p", key="p") + elif k == 's': + self.dispatcher.notify("s", key="s") + elif k == '?': + self.dispatcher.notify("?", key="?") + elif k == 'q': + self.dispatcher.notify("q", key="q") + #raise KeyboardInterrupt diff --git a/framework/ui/console/output.py b/framework/ui/console/output.py new file mode 100644 index 00000000..4ae6e28d --- /dev/null +++ b/framework/ui/console/output.py @@ -0,0 +1,141 @@ +import math +import cStringIO,operator + + +def indent(rows, hasHeader=False, headerChar='-', delim=' | ', justify='left', + separateRows=False, prefix='', postfix='', wrapfunc=lambda x:x): + """ + @author http://code.activestate.com/recipes/267662-table-indentation/ + + + Indents a table by column. + - rows: A sequence of sequences of items, one sequence per row. + - hasHeader: True if the first row consists of the columns' names. + - headerChar: Character to be used for the row separator line + (if hasHeader==True or separateRows==True). + - delim: The column delimiter. + - justify: Determines how are data justified in their column. + Valid values are 'left','right' and 'center'. + - separateRows: True if rows are to be separated by a line + of 'headerChar's. + - prefix: A string prepended to each printed row. + - postfix: A string appended to each printed row. + - wrapfunc: A function f(text) for wrapping text; each element in + the table is first wrapped by this function.""" + # closure for breaking logical rows to physical, using wrapfunc + def rowWrapper(row): + newRows = [wrapfunc(item).split('\n') for item in row] + return [[substr or '' for substr in item] for item in map(None,*newRows)] + # break each logical row into one or more physical ones + logicalRows = [rowWrapper(row) for row in rows] + # columns of physical rows + columns = map(None,*reduce(operator.add,logicalRows)) + # get the maximum of each column by the string length of its items + maxWidths = [max([len(str(item)) for item in column]) for column in columns] + rowSeparator = headerChar * (len(prefix) + len(postfix) + sum(maxWidths) + \ + len(delim)*(len(maxWidths)-1)) + # select the appropriate justify method + justify = {'center':str.center, 'right':str.rjust, 'left':str.ljust}[justify.lower()] + output=cStringIO.StringIO() + if separateRows: print >> output, rowSeparator + for physicalRows in logicalRows: + for row in physicalRows: + print >> output, \ + prefix \ + + delim.join([justify(str(item),width) for (item,width) in zip(row,maxWidths)]) \ + + postfix + if separateRows or hasHeader: print >> output, rowSeparator; hasHeader=False + return output.getvalue() + +def wrap_always(text, width): + """A simple word-wrap function that wraps text on exactly width characters. + It doesn't split the text in words.""" + return '\n'.join([ text[width*i:width*(i+1)] \ + for i in xrange(int(math.ceil(1.*len(text)/width))) ]) + +def table_print(rows, width = 80): + print indent(rows, hasHeader=True, separateRows=False, + prefix=' ', postfix=' ', + wrapfunc=lambda x: wrap_always(x,width)) + +def getTerminalSize(): + #http://stackoverflow.com/questions/566746/how-to-get-console-window-width-in-python + import platform + current_os = platform.system() + tuple_xy=None + if current_os == 'Windows': + tuple_xy = _getTerminalSize_windows() + if tuple_xy is None: + tuple_xy = _getTerminalSize_tput() + # needed for window's python in cygwin's xterm! + if current_os == 'Linux' or current_os == 'Darwin' or current_os.startswith('CYGWIN'): + tuple_xy = _getTerminalSize_linux() + if tuple_xy is None: + print "default" + tuple_xy = (80, 25) # default value + + return tuple_xy + +def _getTerminalSize_windows(): + res=None + try: + from ctypes import windll, create_string_buffer + + # stdin handle is -10 + # stdout handle is -11 + # stderr handle is -12 + + h = windll.kernel32.GetStdHandle(-12) + csbi = create_string_buffer(22) + res = windll.kernel32.GetConsoleScreenBufferInfo(h, csbi) + except: + return None + if res: + import struct + (bufx, bufy, curx, cury, wattr, + left, top, right, bottom, maxx, maxy) = struct.unpack("hhhhHhhhhhh", csbi.raw) + sizex = right - left + 1 + sizey = bottom - top + 1 + return sizex, sizey + else: + return None + +def _getTerminalSize_tput(): + # get terminal width + # src: http://stackoverflow.com/questions/263890/how-do-i-find-the-width-height-of-a-terminal-window + try: + import subprocess + proc=subprocess.Popen(["tput", "cols"],stdin=subprocess.PIPE,stdout=subprocess.PIPE) + output=proc.communicate(input=None) + cols=int(output[0]) + proc=subprocess.Popen(["tput", "lines"],stdin=subprocess.PIPE,stdout=subprocess.PIPE) + output=proc.communicate(input=None) + rows=int(output[0]) + return (cols,rows) + except: + return None + + +def _getTerminalSize_linux(): + def ioctl_GWINSZ(fd): + try: + import fcntl, termios, struct, os + cr = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ,'1234')) + except: + return None + return cr + cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2) + if not cr: + try: + fd = os.open(os.ctermid(), os.O_RDONLY) + cr = ioctl_GWINSZ(fd) + os.close(fd) + except: + pass + if not cr: + try: + cr = (env['LINES'], env['COLUMNS']) + except: + return None + return int(cr[1]), int(cr[0]) + diff --git a/framework/utils/__init__.py b/framework/utils/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/framework/utils/dispatcher.py b/framework/utils/dispatcher.py new file mode 100644 index 00000000..83176d33 --- /dev/null +++ b/framework/utils/dispatcher.py @@ -0,0 +1,23 @@ +from collections import defaultdict + +class SimpleEventDispatcher: + def __init__(self): + self.publisher = defaultdict(list) + + def create_event(self, msg): + self.publisher[msg] = [] + + def subscribe(self, func, msg, dynamic = False): + if not self.publisher.has_key(msg) and not dynamic: + raise KeyError, 'subscribe. No such event: %s' % (msg) + else: + self.publisher[msg].append(func) + + def notify(self, msg, **event): + if not self.publisher.has_key(msg): + raise KeyError, 'notify. Event not subscribed: %s' % (msg,) + else: + for functor in self.publisher[msg]: + functor(**event) + + diff --git a/framework/utils/myqueue.py b/framework/utils/myqueue.py new file mode 100644 index 00000000..466200dd --- /dev/null +++ b/framework/utils/myqueue.py @@ -0,0 +1,155 @@ +import resource +import sys +import traceback + +from Queue import PriorityQueue +from threading import Thread +from framework.core.myexception import FuzzException + + +class MyPriorityQueue(PriorityQueue): + def __init__(self, limit = 0): + PriorityQueue.__init__(self, limit) + + self.max_prio = 0 + self.none_send = False + + def put_priority(self, prio, item): + if self.should_sent(item): + self.max_prio = max(prio, self.max_prio) + PriorityQueue.put(self, (prio, item)) + + def put(self, item): + if self.should_sent(item): + self.max_prio = max(item.rlevel, self.max_prio) + PriorityQueue.put(self, (item.rlevel, item)) + + def put_first(self, item): + if self.should_sent(item): + PriorityQueue.put(self, (0, item)) + + def put_last(self, item): + if self.should_sent(item): + self.max_prio += 1 + PriorityQueue.put(self, (self.max_prio, item)) + + def should_sent(self, item): + ''' + Little hack to avoid sending more than one None as it breaks sync (due to roundrobin queue) + ''' + if (item is None and not self.none_send) \ + or (item is not None): + self.none_send = True if item is None else False + return True + + return False + + +class FuzzQueue(MyPriorityQueue, Thread): + def __init__(self, queue_out, limit = 0, wait_qout = True): + Thread.__init__(self) + MyPriorityQueue.__init__(self, limit) + + self.queue_out = queue_out + self.wait_qout = wait_qout + + self.setName(self.get_name()) + + self.start() + + def process(self, prio, item): + raise NotImplemented + + def get_name(self): + raise NotImplemented + + def send_first(self, item): + self.queue_out.put_first(item) + + def send_last(self, item): + self.queue_out.put_last(item) + + def qout_join(self): + self.queue_out.join() + + def send(self, item): + self.queue_out.put(item) + + def join(self): + MyPriorityQueue.join(self) + + def tjoin(self): + Thread.join(self) + + def _cleanup(self): + raise NotImplemented + + def _throw(self, e): + if isinstance(e, FuzzException): + self.send_first(e) + else: + msg = "%s\n\n%s" %(str(e), traceback.format_exc()) + self.send_first(FuzzException(FuzzException.FATAL, msg)) + + def run(self): + cancelling = False + + while 1: + prio, item = self.get(True, 365 * 24 * 60 * 60) + + try: + if item == None and not cancelling: + if self.wait_qout: + self.send_last(None) + self.qout_join() + self.task_done() + break + elif item == None and cancelling: + self.send_last(None) + self.task_done() + break + elif cancelling: + self.task_done() + continue + elif isinstance(item, Exception): + cancelling = True if item.etype == FuzzException.SIGCANCEL else False + self.send_first(item) + self.task_done() + continue + + self.process(prio, item) + self.task_done() + except Exception, e: + self.task_done() + self._throw(e) + + self._cleanup() + +class FuzzListQueue(FuzzQueue): + def __init__(self, queue_out, limit = 0, wait_qout = True): + FuzzQueue.__init__(self, queue_out, limit, wait_qout) + + def send_first(self, item): + for q in self.queue_out: + q.put_first(item) + + def send_last(self, item): + for q in self.queue_out: + q.put_last(item) + + def send(self, item): + for q in self.queue_out: + q.put(item) + + def qout_join(self): + for q in self.queue_out: + q.join() + +def memory_usage_resource(): + # from http://fa.bianp.net/blog/2013/different-ways-to-get-memory-consumption-or-lessons-learned-from-memory_profiler/ + rusage_denom = 1024. + if sys.platform == 'darwin': + # ... it seems that in OSX the output is different units ... + rusage_denom = rusage_denom * rusage_denom + mem = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / rusage_denom + return mem diff --git a/iterations.py b/iterations.py deleted file mode 100644 index c81261ad..00000000 --- a/iterations.py +++ /dev/null @@ -1,37 +0,0 @@ -import itertools - -class iterator_zip(itertools.izip): - text="zip" - - def __init__(self, *i): - itertools.izip.__init__(self, *i) - self.__count = max(map(lambda x:x.count(), i)) - - def count(self): - return self.__count - -class iterator_product(itertools.product): - text="product" - - def __init__(self, *i): - itertools.product.__init__(self, *i) - self.__count = reduce(lambda x,y:x*y.count(), i[1:], i[0].count()) - - def count(self): - return self.__count - -class iterator_chain: - text="chain" - - def count(self): - return self.__count - - def __init__(self, *i): - self.__count = sum(map(lambda x:x.count(), i)) - self.it = itertools.chain(*i) - - def next(self): - return (self.it.next(),) - - def __iter__(self): - return self diff --git a/patterns/__init__.py b/patterns/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/patterns/singleton.py b/patterns/singleton.py new file mode 100644 index 00000000..7b5174ca --- /dev/null +++ b/patterns/singleton.py @@ -0,0 +1,33 @@ +import sys +import traceback + +class Singleton(type): + ''' Singleton metaclass. Use by defining the metaclass of a class Singleton, + e.g.: class ThereCanBeOnlyOne: + __metaclass__ = Singleton + ''' + + def __call__(class_, *args, **kwargs): + #try: + if not class_.hasInstance(): + class_.instance = super(Singleton, class_).__call__(*args, **kwargs) + return class_.instance + #except Exception, e: + # error_type, error_value, trbk = sys.exc_info() + # tb_list = traceback.format_tb(trbk, 6) + # s = "Error: %s \nDescription: %s \nTraceback:" % (error_type.__name__, error_value) + # for i in tb_list: + # s += "\n" + i + + # print s + # return None + + def deleteInstance(class_): + ''' Delete the (only) instance. This method is mainly for unittests so + they can start with a clean slate. ''' + if class_.hasInstance(): + del class_.instance + + def hasInstance(class_): + ''' Has the (only) instance been created already? ''' + return hasattr(class_, 'instance') diff --git a/payloads.py b/payloads.py deleted file mode 100644 index e5636e29..00000000 --- a/payloads.py +++ /dev/null @@ -1,288 +0,0 @@ -import encoders -import copy -import random - - -####### SUPERCLASS - -class payload: - def __init__(self): - self.__count=0 - pass - - def __iter__ (self): - return base_iterator() - - - def count(self): - return self.__count - -class base_iterator: - def __init__(self): - pass - - def next (self): - raise StopIteration - - -###################################### -###################################### -######## Inheritances -###################################### -###################################### - -class payload_file (payload): - text = "file" - def __init__(self,file): - payload.__init__(self) - self.file=file - f=open(file,"r") - self.__count=len(f.readlines()) - f.close() - - - def count(self): - return self.__count - - def __iter__ (self): - return file_iterator(self.file) - -class file_iterator (base_iterator): - def __init__(self,file): - base_iterator.__init__(self) - self.file=open (file,"r") - - def next (self): - line=self.file.next().strip() - - return line - -################### RANGE PAYLOAD - - -class payload_range (payload): - text = "range" - def __init__(self,range): ## range example --> "23-56" - payload.__init__(self) - try: - ran=range.split("-") - self.minimum=int(ran[0]) - self.maximum=int(ran[1]) - self.__count=self.maximum - self.minimum + 1 - self.width=len(ran[0]) - except: - raise Exception, "Bad range format (eg. \"23-56\")" - - - def count(self): - return self.__count - - def __iter__ (self): - return range_iterator(self.minimum,self.maximum,self.width) - - -class range_iterator (base_iterator): - def __init__(self,min,max,width): - base_iterator.__init__(self) - self.minimum=min - self.maximum=max - self.width=width - self.current=self.minimum - - def next (self): - if self.current>self.maximum: - raise StopIteration - if self.width: - payl="%0"+str(self.width)+"d" - payl=payl % (self.current) - else: - payl=str(self.current) - - self.current+=1 - return payl - - -################### HEXRANGE PAYLOAD - - -class payload_hexrange(payload): - text="hexrange" - def __init__(self,range): ## range example --> "0-ffa" - payload.__init__(self) - try: - ran=range.split("-") - self.minimum=int(ran[0],16) - self.maximum=int(ran[1],16) - self.__count=self.maximum - self.minimum + 1 - except: - raise Exception, "Bad range format (eg. \"0-ffa\")" - - def __iter__ (self): - return hexrange_iterator(self.minimum,self.maximum) - - def count(self): - return self.__count - -class hexrange_iterator (base_iterator): - def __init__(self,min,max): - base_iterator.__init__(self) - self.minimum=min - self.maximum=max - self.current=self.minimum - - def next (self): - if self.current>self.maximum: - raise StopIteration - - lgth=len(hex(self.maximum).replace("0x","")) - pl="%"+str(lgth)+"s" - num=hex(self.current).replace("0x","") - pl= pl % (num) - payl=pl.replace(" ","0") - - self.current+=1 - - return payl - -class payload_hexrand(payload): - text="hexrand" - def __init__(self,range): ## range example --> "0-ffa" - payload.__init__(self) - try: - ran=range.split("-") - self.minimum=int(ran[0],16) - self.maximum=int(ran[1],16) - self.__count=self.maximum - self.minimum + 1 - except: - raise Exception, "Bad range format (eg. \"0-ffa\")" - - def __iter__ (self): - return hexrand_iterator(self.minimum,self.maximum) - - def count(self): - return self.__count - - - -class hexrand_iterator (base_iterator): - def __init__(self,min,max): - base_iterator.__init__(self) - self.minimum=min - self.maximum=max - self.current=self.minimum - - def next (self): - self.current = random.SystemRandom().randint(self.minimum,self.maximum) - - lgth=len(hex(self.maximum).replace("0x","")) - pl="%"+str(lgth)+"s" - num=hex(self.current).replace("0x","") - pl= pl % (num) - payl=pl.replace(" ","0") - - return payl - -######################### PAYLOAD LIST - - -class payload_list (payload): - text="list" - separator="-" - def __init__(self,l): - payload.__init__(self) - self.l=l.split(self.separator) - self.__count=len(self.l) - - def __iter__ (self): - return plist_iterator(self.l) - - def count(self): - return self.__count - - -class plist_iterator (list): - def __init__(self,list): - self.list=list - self.current=0 - - def next (self): - try: - elem=self.list[self.current] - self.current+=1 - return elem - except: - raise StopIteration - -class payload_names(payload): - text="names" - def __init__(self,startnames): - self.startnames=startnames - payload.__init__(self) - from sets import Set - possibleusernames=[] - name="" - list=self.startnames.split("-") - for x in list: - if name=="": - name=name+x - else: - name=name+" "+x - if " " in name: - parts=name.split() - possibleusernames.append(parts[0]) - possibleusernames.append(parts[0]+"."+parts[1]) - possibleusernames.append(parts[0]+parts[1]) - possibleusernames.append(parts[0]+"."+parts[1][0]) - possibleusernames.append(parts[0][0]+"."+parts[1]) - possibleusernames.append(parts[0]+parts[1][0]) - possibleusernames.append(parts[0][0]+parts[1]) - str1="" - str2="" - str3="" - str4="" - for i in range(0,len(parts)-1): - str1=str1+parts[i]+"." - str2=str2+parts[i] - str3=str3+parts[i][0]+"." - str4=str4+parts[i][0] - str5=str1+parts[-1] - str6=str2+parts[-1] - str7=str4+parts[-1] - str8=str3+parts[-1] - str9=str2+parts[-1][0] - str10=str4+parts[-1][0] - possibleusernames.append(str5) - possibleusernames.append(str6) - possibleusernames.append(str7) - possibleusernames.append(str8) - possibleusernames.append(str9) - possibleusernames.append(str10) - possibleusernames.append(parts[-1]) - possibleusernames.append(parts[0]+"."+parts[-1]) - possibleusernames.append(parts[0]+parts[-1]) - possibleusernames.append(parts[0]+"."+parts[-1][0]) - possibleusernames.append(parts[0][0]+"."+parts[-1]) - possibleusernames.append(parts[0]+parts[-1][0]) - possibleusernames.append(parts[0][0]+parts[-1]) - self.creatednames=possibleusernames - else: - possibleusernames.append(name) - self.creatednames=possibleusernames - self.__count=len(possibleusernames) - - def count(self): - return self.__count - - def __iter__(self): - return name_iterator(self.creatednames) - -class name_iterator(list): - def __init__(self,list): - self.list=list - - def next(self): - if self.list != []: - payl=self.list.pop() - return payl - else: - raise StopIteration diff --git a/plugins/__init__.py b/plugins/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/plugins/encoders.py b/plugins/encoders.py new file mode 100644 index 00000000..5558f670 --- /dev/null +++ b/plugins/encoders.py @@ -0,0 +1,406 @@ +from externals.moduleman.plugin import moduleman_plugin + +import urllib +import base64 as mybase64 +import re +import binascii +import random +import hashlib +import cgi + +class pencoder_multiple: + text="internal multiple encoder" + + def __init__(self, l): + self.l = l + + def encode(self, string): + for i in reversed(self.l): + string = i.encode(string) + return string + + def decode(self, string): + for i in reversed(self.l): + string = i.decode(string) + return string + +@moduleman_plugin("encode") +class none: + name = "none" + description = "Returns string without changes" + category = ["default"] + priority = 99 + + def encode(self, string): + return string + + def decode(self, string): + return string + +@moduleman_plugin("encode") +class urlencode : + name = "urlencode" + description = "Replace special characters in string using the %xx escape. Letters, digits, and the characters '_.-' are never quoted." + category = ["url_safe", "url"] + priority = 99 + + def encode(self, string): + return urllib.quote(string) + + def decode(self, string): + return urllib.unquote(string) + +@moduleman_plugin("encode") +class double_urlencode: + name = "double urlencode" + description = "Applies a double encode to special characters in string using the %25xx escape. Letters, digits, and the characters '_.-' are never quoted." + category = ["url_safe", "url"] + priority = 99 + + def encode(self, string): + return urllib.quote(urllib.quote(string)) + + def decode(self, string): + return urllib.unquote(urllib.unquote(string)) + +@moduleman_plugin("encode") +class base64: + name = "base64" + description = "Encodes the given string using base64" + category = ["hashes"] + priority = 99 + + def encode(self, string): + return mybase64.standard_b64encode(string) + + def decode(self, string): + return mybase64.decodestring(string) + +@moduleman_plugin("encode") +class uri_triple_hex: + name = "uri_triple_hex" + description = "Encodes ALL charachers using the %25%xx%xx escape." + category = ["url"] + priority = 99 + + def encode(self, string): + strt = "" + s = re.compile(r"/|;|=|:|&|@|\\|\?") + for c in string: + if s.search(c): + strt += c + continue + temp = hex(ord(c))[2:] + strt += "%%25%%%02x%%%02x" % (ord(temp[:1]), ord(temp[1:])) + return strt + +@moduleman_plugin("encode") +class uri_double_hex: + name = "uri_double_hex" + description = "Encodes ALL charachers using the %25xx escape." + category = ["url"] + priority = 99 + + def encode(self,string): + strt = "" + con = "%%25%02x" + s = re.compile(r"/|;|=|:|&|@|\\|\?") + for c in string: + if s.search(c): + strt += c + continue + strt += con % ord(c) + return strt + +@moduleman_plugin("encode") +class uri_hex: + name = "uri_hex" + description = "Encodes ALL charachers using the %xx escape." + category = ["url"] + priority = 99 + + def encode(self,string): + strt = "" + con = "%%%02x" + s = re.compile(r"/|;|=|:|&|@|\\|\?") + for c in string: + if s.search(c): + strt += c + continue + strt += con % ord(c) + return strt + + +@moduleman_plugin("encode") +class random_upper: + name = "random_upper" + description = "Replaces random characters in string with its capitals letters" + category = ["default"] + priority = 99 + + def encode(self,string): + strt = "" + for c in string: + x = int(random.uniform(0,10)) + x = x % 2 + if x == 1: + strt += c.upper() + else: + strt += c + return strt + + +@moduleman_plugin("encode") +class second_nibble_hex: + name = "second_nibble_hex" + description = "Replaces ALL characters in string using the %?%dd escape" + category = ["url"] + priority = 99 + + def encode(self, string): + strt = "" + con = "%%%02x" + s = re.compile(r"/|;|=|:|&|@|\\|\?") + for c in string: + if s.search(c): + strt += c + continue + temp = hex(ord(c))[2:] + strt += "%%%s%%%02x" % (str(temp[:1]), ord(temp[1:])) + return strt + +@moduleman_plugin("encode") +class first_nibble_hex: + name = "first_nibble_hex" + description = "Replaces ALL characters in string using the %%dd? escape" + category = ["url"] + priority = 99 + + def encode(self, string): + strt = "" + con = "%%%02x" + s=re.compile(r"/|;|=|:|&|@|\\|\?") + for c in string: + if s.search(c): + strt += c + continue + temp = hex(ord(c))[2:] + strt += "%%%%%02x%s" % (ord(temp[:1]), str(temp[1:])) + return strt + +@moduleman_plugin("encode") +class doble_nibble_hex: + name = "doble_nibble_hex" + description = "Replaces ALL characters in string using the %%dd%dd escape" + category = ["url"] + priority = 99 + + def encode(self,string): + strt = "" + fin = "" + con = "%%%02x" + s=re.compile(r"/|;|=|:|&|@|\\|\?") + enc=uri_hex() + strt = enc.encode(string) + for c in strt: + if not c == "%": + if s.search(c): + fin += c + continue + fin += con % ord(c) + else: + fin += c + return fin + +@moduleman_plugin("encode") +class sha1: + name = "sha1" + description = "Applies a sha1 hash to the given string" + category = ["hashes"] + priority = 99 + + def encode(self, string): + s = hashlib.sha1() + s.update(string) + res = s.hexdigest() + return res + +@moduleman_plugin("encode") +class md5: + name = "md5" + description = "Applies a md5 hash to the given string" + category = ["hashes"] + priority = 99 + + def encode(self,string): + m=hashlib.new('md5') + m.update(string) + res = m.hexdigest() + return res + +@moduleman_plugin("encode") +class hexlify: + name = "hexlify" + description = "Every byte of data is converted into the corresponding 2-digit hex representation." + category = ["default"] + priority = 99 + + def encode(self, string): + return binascii.hexlify(string) + + def decode(self, string): + return binascii.unhexlify(clear) + +@moduleman_plugin("encode") +class html_escape: + name = "html_escape" + description = "Convert the characters '&', '<' and '>' in string to HTML-safe sequences." + category = ["html"] + priority = 99 + + def encode(self, string): + return cgi.escape(string) + +@moduleman_plugin("encode") +class html_decimal: + name = "html_decimal" + description = "Replaces ALL characters in string using the &#dd; escape" + category = ["html"] + priority = 99 + + def encode(self,string): + new = "" + for x in string: + new += "&#"+str(ord(x))+";" + return new + +@moduleman_plugin("encode") +class html_hexadecimal: + name = "html_hexadecimal" + description = "Replaces ALL characters in string using the &#xx; escape" + category = ["html"] + priority = 99 + + def encode(self,string): + new = "" + for x in string: + val = "%02x" % ord(x) + new += "&#x"+str(val)+";" + return new + +@moduleman_plugin("encode") +class utf8_binary: + name = "utf8_binary" + description = "Replaces ALL characters in string using the \\uxx escape" + category = ["url"] + priority = 99 + + def encode(self,string): + new = "" + for x in string: + val = "%02x" % ord(x) + new += "\\x"+str(val) + return new + +@moduleman_plugin("encode") +class utf8: + name = "utf8" + description = "Replaces ALL characters in string using the \\u00xx escape" + category = ["url"] + priority = 99 + + def encode(self,string): + new = "" + for x in string: + val = "%02x" % ord(x) + if len(val)==2: + new += "\\u00"+str(val) + else: + new += "\\u"+str(val) + return new + +@moduleman_plugin("encode") +class uri_unicode: + name = "uri_unicode" + description = "Replaces ALL characters in string using the %u00xx escape" + category = ["url"] + priority = 99 + + def encode(self, string): + new = "" + for x in string: + val = "%02x" % ord(x) + if len(val) == 2: + new += "%u00" + str(val) + else: + new += "%u"+str(val) + return new + +@moduleman_plugin("encode") +class mysql_char: + name = "mysql_char" + description = "Converts ALL characters to MySQL's char(xx)" + category = ["db"] + priority = 99 + + def encode(self,string): + new = "CHAR(" + for x in string: + val=str(ord(x)) + new+=str(val)+"," + new=new.strip(",") + new+=")" + return new + + def decode(self,string): + temp = string.strip("CHAR").strip("(").strip(")").split(",") + new = "" + for x in temp: + new += chr(int(x)) + return new + +@moduleman_plugin("encode") +class mssql_char: + name = "mssql_char" + description = "Converts ALL characters to MsSQL's char(xx)" + category = ["db"] + priority = 99 + + def encode(self, string): + new="" + for x in string: + val=str(ord(x)) + new+="CHAR("+str(val)+")+" + new=new.strip("+") + return new + + def decode(self, string): + new="" + temp=string.split("+") + for x in temp: + x=x.strip("CHAR").strip(")").strip("(") + new+= chr(int(x)) + return new + +@moduleman_plugin("encode") +class oracle_char: + name = "oracle_char" + description = "Converts ALL characters to Oracle's chr(xx)" + category = ["db"] + priority = 99 + + def encode(self,string): + new="" + for x in string: + val = str(ord(x)) + new += "chr("+val+")||" + new = new.strip("||") + return new + + def decode(self, string): + new = "" + temp = string.split("||") + for x in temp: + x = x.strip("chr").strip(")").strip("(") + new += chr(int(x)) + return new diff --git a/plugins/iterations.py b/plugins/iterations.py new file mode 100644 index 00000000..1ebd1e5b --- /dev/null +++ b/plugins/iterations.py @@ -0,0 +1,107 @@ +from externals.moduleman.plugin import moduleman_plugin + +import itertools + +class piterator_void: + text="void" + + def count(self): + return self.__count + + def __init__(self, *i): + self._dic = i + self.__count = max(map(lambda x:x.count(), i)) + self.it = self._dic[0] + + def next(self): + return (self.it.next(),) + + def restart(self): + for dic in self._dic: + dic.restart() + self.it = self._dic[0] + + def __iter__(self): + self.restart() + return self + +@moduleman_plugin("restart", "count", "next", "__iter__") +class zip: + name = "zip" + description = "Returns an iterator that aggregates elements from each of the iterables." + category = ["default"] + priority = 99 + + def __init__(self, *i): + self._dic = i + self.it = itertools.izip(*self._dic) + self.__count = max(map(lambda x:x.count(), i)) + + def count(self): + return self.__count + + def restart(self): + for dic in self._dic: + dic.restart() + self.it = itertools.izip.__init__(self, *self._dic) + + def next(self): + return self.it.next() + + def __iter__(self): + self.restart() + return self + +@moduleman_plugin("restart", "count", "next", "__iter__") +class product: + name = "product" + description = "Returns an iterator cartesian product of input iterables." + category = ["default"] + priority = 99 + + def __init__(self, *i): + self._dic = i + self.it = itertools.product(*self._dic) + self.__count = reduce(lambda x,y:x*y.count(), i[1:], i[0].count()) + + def restart(self): + for dic in self._dic: + dic.restart() + self.it = itertools.product(*self._dic) + + def count(self): + return self.__count + + def next(self): + return self.it.next() + + def __iter__(self): + self.restart() + return self + +@moduleman_plugin("restart", "count", "next", "__iter__") +class chain: + name = "chain" + description = "Returns an iterator returns elements from the first iterable until it is exhausted, then proceeds to the next iterable, until all of the iterables are exhausted." + category = ["default"] + priority = 99 + + def count(self): + return self.__count + + def __init__(self, *i): + self.__count = sum(map(lambda x:x.count(), i)) + self._dic = i + self.it = itertools.chain(*i) + + def restart(self): + for dic in self._dic: + dic.restart() + self.it = itertools.chain(*self._dic) + + def next(self): + return (self.it.next(),) + + def __iter__(self): + self.restart() + return self diff --git a/plugins/payloads.py b/plugins/payloads.py new file mode 100644 index 00000000..c201d54e --- /dev/null +++ b/plugins/payloads.py @@ -0,0 +1,397 @@ +import random +import sys +import __builtin__ + +from externals.moduleman.plugin import moduleman_plugin +from framework.core.myexception import FuzzException +from framework.fuzzer.base import wfuzz_iterator +from framework.plugins.api import search_bing + +@wfuzz_iterator +@moduleman_plugin("count", "next", "__iter__") +class file: + name = "file" + description = "Returns each word from a file." + category = ["default"] + priority = 99 + + def __init__(self, filename): + try: + self.f = open(filename,"r") + except IOError: + raise FuzzException(FuzzException.FATAL, "Error opening file") + + self.__count = len(self.f.readlines()) + self.f.seek(0) + + def next (self): + return self.f.next().strip() + + def count(self): + return self.__count + + def __iter__ (self): + return self + +@wfuzz_iterator +@moduleman_plugin("count", "next", "__iter__") +class range: + name = "range" + description = "Returns each number of the given range. ie. 0-10" + category = ["default"] + priority = 99 + + def __init__(self, whatrange): ## range example --> "23-56" + try: + ran = whatrange.split("-") + self.minimum = int(ran[0]) + self.maximum = int(ran[1]) + self.__count = self.maximum - self.minimum + 1 + self.width = len(ran[0]) + self.current = self.minimum + except: + raise FuzzException(FuzzException.FATAL, "Bad range format (eg. \"23-56\")") + + def next(self): + if self.current>self.maximum: + raise StopIteration + else: + if self.width: + payl = "%0"+ str(self.width) + "d" + payl = payl % (self.current) + else: + payl = str(self.current) + + self.current += 1 + return payl + + def count(self): + return self.__count + + def __iter__(self): + return self + +@wfuzz_iterator +@moduleman_plugin("count", "next", "__iter__") +class hexrange: + name = "hexrange" + description = "Returns each hex number of the given hex range. ie. 00-ff" + category = ["default"] + priority = 99 + + def __init__(self, prange): ## range example --> "0-ffa" + try: + ran = prange.split("-") + self.minimum = int(ran[0],16) + self.maximum = int(ran[1],16) + self.__count = self.maximum - self.minimum + 1 + self.current = self.minimum + except: + raise Exception, "Bad range format (eg. \"0-ffa\")" + + def __iter__(self): + return self + + def count(self): + return self.__count + + def next(self): + if self.current > self.maximum: + raise StopIteration + + lgth=len(hex(self.maximum).replace("0x","")) + pl="%"+str(lgth)+"s" + num=hex(self.current).replace("0x","") + pl= pl % (num) + payl=pl.replace(" ","0") + + self.current+=1 + + return payl + +@wfuzz_iterator +@moduleman_plugin("count", "next", "__iter__") +class hexrand: + name = "hexrand" + description = "Returns random hex numbers." + category = ["default"] + priority = 99 + + def __init__(self, prange): ## range example --> "0-ffa" + try: + ran = prange.split("-") + self.minimum=int(ran[0],16) + self.maximum=int(ran[1],16) + self.__count=-1 + except: + raise Exception, "Bad range format (eg. \"0-ffa\")" + + def __iter__ (self): + return self + + def count(self): + return self.__count + + def next (self): + self.current = random.SystemRandom().randint(self.minimum,self.maximum) + + lgth = len(hex(self.maximum).replace("0x","")) + pl="%"+str(lgth)+"s" + num = hex(self.current).replace("0x","") + pl = pl % (num) + payl =pl.replace(" ","0") + + return payl + + +@wfuzz_iterator +@moduleman_plugin("count", "next", "__iter__") +class buffer_overflow: + name = "buffer_overflow" + description = "Returns a string using the following pattern A * given number." + category = ["default"] + priority = 99 + + def __init__(self, n): + self.l = ['A' * int(n)] + self.current = 0 + + def __iter__(self): + return self + + def count(self): + return 1 + + def next (self): + if self.current == 0: + elem = self.l[self.current] + self.current+=1 + return elem + else: + raise StopIteration + +@wfuzz_iterator +@moduleman_plugin("count", "next", "__iter__") +class list: + name = "list" + description = "Returns each element of the given word list separated by -. ie word1-word2" + category = ["default"] + priority = 99 + + def __init__(self, l): + if l.find("\\") >= 0: + l = l.replace("\\-", "$SEP$") + l = l.replace("\\\\", "$SCAP$") + + self.l = l.split("-") + + for i in __builtin__.range(len(self.l)): + self.l[i] = self.l[i].replace("$SEP$", "-") + self.l[i] = self.l[i].replace("$SCAP$", "\\") + else: + self.l = l.split("-") + + self.__count = len(self.l) + self.current = 0 + + def __iter__ (self): + return self + + def count(self): + return self.__count + + def next (self): + if self.current >= self.__count: + raise StopIteration + else: + elem = self.l[self.current] + self.current += 1 + return elem + +@wfuzz_iterator +@moduleman_plugin("count", "next", "__iter__") +class stdin: + name = "stdin" + description = "Returns each item read from stdin." + category = ["default"] + priority = 99 + + def __init__(self, deprecated): + # stdin is unseekable + self.__count = -1 + #self.__count=len(sys.stdin.readlines()) + #sys.stdin.seek(0) + + def count(self): + return self.__count + + def __iter__ (self): + return self + + def next (self): + #line=sys.stdin.next().strip().split(':') + line = sys.stdin.next().strip() + + return line + +@wfuzz_iterator +@moduleman_plugin("count", "next", "__iter__") +class names: + name = "names" + description = "Returns possible usernames by mixing the given words, separated by -, using known typical constructions. ie. jon-smith" + category = ["default"] + priority = 99 + + def __init__(self, startnames): + self.startnames = startnames + from sets import Set + possibleusernames = [] + name = "" + llist = self.startnames.split("-") + + for x in llist: + if name == "": + name = name + x + else: + name = name + " " + x + + if " " in name: + parts = name.split() + possibleusernames.append(parts[0]) + possibleusernames.append(parts[0]+"."+parts[1]) + possibleusernames.append(parts[0]+parts[1]) + possibleusernames.append(parts[0]+"."+parts[1][0]) + possibleusernames.append(parts[0][0]+"."+parts[1]) + possibleusernames.append(parts[0]+parts[1][0]) + possibleusernames.append(parts[0][0]+parts[1]) + str1="" + str2="" + str3="" + str4="" + for i in __builtin__.range(0,len(parts)-1): + str1=str1+parts[i]+"." + str2=str2+parts[i] + str3=str3+parts[i][0]+"." + str4=str4+parts[i][0] + str5=str1+parts[-1] + str6=str2+parts[-1] + str7=str4+parts[-1] + str8=str3+parts[-1] + str9=str2+parts[-1][0] + str10=str4+parts[-1][0] + possibleusernames.append(str5) + possibleusernames.append(str6) + possibleusernames.append(str7) + possibleusernames.append(str8) + possibleusernames.append(str9) + possibleusernames.append(str10) + possibleusernames.append(parts[-1]) + possibleusernames.append(parts[0]+"."+parts[-1]) + possibleusernames.append(parts[0]+parts[-1]) + possibleusernames.append(parts[0]+"."+parts[-1][0]) + possibleusernames.append(parts[0][0]+"."+parts[-1]) + possibleusernames.append(parts[0]+parts[-1][0]) + possibleusernames.append(parts[0][0]+parts[-1]) + else: + possibleusernames.append(name) + + self.creatednames=possibleusernames + self.__count=len(possibleusernames) + + def count(self): + return self.__count + + def __iter__(self): + return self + + def next(self): + if self.creatednames: + payl = self.creatednames.pop() + return payl + else: + raise StopIteration + +@wfuzz_iterator +@moduleman_plugin("count", "next", "__iter__") +class permutation: + name = "permutation" + description = "Returns permutations of the given charset and length. ie. abc-2" + category = ["default"] + priority = 99 + + def __init__(self, prange): ## range example --> "abcdef-4" + self.charset = [] + + try: + ran = prange.split("-") + self.charset = ran[0] + self.width = int(ran[1]) + except: + raise Exception, "Bad range format (eg. \"abfdeg-3\")" + + pset = [] + for x in self.charset: + pset.append(x) + + words = self.xcombinations(pset, self.width) + self.lista = [] + for x in words: + self.lista.append(''.join(x)) + + self.__count = len(self.lista) + + def __iter__ (self): + return self + + def count(self): + return self.__count + + def next (self): + if self.lista != []: + payl=self.lista.pop() + return payl + else: + raise StopIteration + + def xcombinations(self, items, n): + if n == 0: + yield [] + else: + try: + for i in xrange(len(items)): + for cc in self.xcombinations(items[:i] + items[i:], n - 1): + yield [items[i]] + cc + except: + print "Interrupted Permutation calculations" + sys.exit() + +@wfuzz_iterator +@moduleman_plugin("count", "next", "__iter__") +class bing: + ''' + Some examples of bing hacking: + - http://www.elladodelmal.com/2010/02/un-poco-de-bing-hacking-i-de-iii.html + ''' + name = "bing" + description = "Returns URL results of a given bing API search (needs api key). ie, intitle:\"JBoss JMX Management Console\"-10" + category = ["default"] + priority = 99 + + def __init__(self, dork): + self.l = search_bing(dork) + self.__count = len(self.l) + self.current = 0 + + def __iter__ (self): + return self + + def count(self): + return self.__count + + def next (self): + if self.current >= self.__count: + raise StopIteration + else: + elem = self.l[self.current]['Url'] + self.current += 1 + return str(elem.strip()) diff --git a/plugins/printers.py b/plugins/printers.py new file mode 100644 index 00000000..4b9bd4c0 --- /dev/null +++ b/plugins/printers.py @@ -0,0 +1,301 @@ +import socket +import sys +from xml.dom import minidom + +from externals.moduleman.plugin import moduleman_plugin +from framework.ui.console.output import getTerminalSize +from framework.ui.console.common import exec_banner, term_colors +from framework.core.myexception import FuzzException + +@moduleman_plugin("header", "footer", "noresult", "result") +class magictree: + name = "magictree" + description = "Prints results in magictree format" + category = ["default"] + priority = 99 + + def __init__(self): + self.node_mt = None + self.node_service = None + + def __create_xml_element(self, parent, caption, text): + # Create a element + doc = minidom.Document() + el = doc.createElement(caption) + parent.appendChild(el) + + # Give the element some text + ptext = doc.createTextNode(text) + + el.appendChild(ptext) + return el + + def header(self, summary): + doc = minidom.Document() + + # + self.node_mt = doc.createElement("magictree") + self.node_mt.setAttribute("class", "MtBranchObject") + + # + node_td = doc.createElement("testdata") + node_td.setAttribute("class", "MtBranchObject") + self.node_mt.appendChild(node_td) + + #209.85.146.105 + host = summary.seed.history.fr_host() + if host.find(":") > 0: + host, port = host.split(":") + else: + port = 80 + if summary.seed.history.fr_schema().lower() == "https": + port = 443 + + try: + resolving = socket.gethostbyname(host) + node_h = self.__create_xml_element(node_td, "host", str(resolving)) + except socket.gaierror: + node_h = self.__create_xml_element(node_td, "host", str(host)) + + #tcp + node_ipr = self.__create_xml_element(node_h, "ipproto", "tcp") + + #80openhttp + node_port = self.__create_xml_element(node_ipr, "port", str(port)) + self.__create_xml_element(node_port, "state", "open") + if summary.seed.history.fr_schema().lower() == "https": + node_port = self.__create_xml_element(node_port, "tunnel", "ssl") + + self.node_service = self.__create_xml_element(node_port, "service", "http") + + def result(self, fuzz_result): + node_url = self.__create_xml_element(self.node_service, "url", str(fuzz_result.url)) + + if 'Server' in fuzz_result.history.fr_headers()['response']: + self.__create_xml_element(node_url, "HTTPServer", fuzz_result.history.fr_headers()['response']['Server']) + + location = "" + if 'Location' in fuzz_result.history.fr_headers()['response']: + location = fuzz_result.history.fr_headers()['response']['Location'] + + if fuzz_result.code == 301 or fuzz_result.code == 302 and location: + self.__create_xml_element(node_url, "RedirectLocation", location) + + self.__create_xml_element(node_url, "ResponseCode", str(fuzz_result.code)) + self.__create_xml_element(node_url, "source", "WFuzz") + + def footer(self, summary): + sys.stderr.write(self.node_mt.toxml()) + + def noresult(self, res): + pass + +@moduleman_plugin("header", "footer", "noresult", "result") +class html: + name = "html" + description = "Prints results in html format" + category = ["default"] + priority = 99 + + def header(self, summary): + url = summary.url + + sys.stderr.write("

Fuzzing %s

\r\n\r\n\r\n" % (url) ) + + def result(self, fuzz_result): + htmlc="" + + if fuzz_result.code >= 400 and fuzz_result.code < 500: + htmlc = "" + elif fuzz_result.code>=300 and fuzz_result.code < 400: + htmlc = "" + elif fuzz_result.code>=200 and fuzz_result.code < 300: + htmlc = "" + + if fuzz_result.history.fr_method().lower() == "post": + inputs="" + for n, v in fuzz_result.history.fr_parameters()['post'].items(): + inputs+="" % (n, v) + + sys.stderr.write ("\r\n\r\n\r\n\r\n\r\n\r\n\r\n" %(fuzz_result.nres, htmlc, fuzz_result.code, fuzz_result.lines, fuzz_result.words, fuzz_result.description, fuzz_result.url, inputs)) + else: + sys.stderr.write("\r\n\r\n" %(fuzz_result.nres, htmlc, fuzz_result.code, fuzz_result.lines, fuzz_result.words, fuzz_result.url, fuzz_result.url)) + + def footer(self, summary): + sys.stderr.write("
#requestCode#lines#wordsUrl
%05d%s%d%4dL%5dW
%s
%s
%05d%s%d%4dL%5dW%s
Wfuzz by EdgeSecurity
\r\n") + sys.stdout.flush() + + def noresult(self, res): + pass + +@moduleman_plugin("header", "footer", "noresult", "result") +class default: + name = "default" + description = "Default output format" + category = ["default"] + priority = 99 + + def __init__(self): + if self.kbase.has("colour"): + self.colour = self.kbase.get("colour") + else: + self.colour = False + + self.sizex, sizey = getTerminalSize() + self.written_x = 0 + + import os + self.OS = os.name + if self.OS == "nt": + import WConio + + def _erase(self): + self.written_x = 0 + sys.stdout.write ("\r") + if self.OS != 'nt': + sys.stdout.write ("\x1b[0K") + else: + WConio.clreol() + + def _write_line(self, string, line_suffix): + self.written_x += len(string) + + if self.written_x + 31 > self.sizex: + string = string[:self.sizex-31] + "...\"" + line_suffix + + sys.stdout.write(string) + + def _get_code_color(self, code): + if code == 0: + cc = term_colors.fgYellow + wc = 12 + elif code >= 400 and code < 500: + cc = term_colors.fgRed + wc = 12 + elif code >= 300 and code < 400: + cc = term_colors.fgBlue + wc = 11 + elif code >= 200 and code < 300: + cc = term_colors.fgGreen + wc = 10 + else: + cc = term_colors.fgMagenta + wc = 1 + + return (cc, wc) + + def _write(self, text, line_suffix, color = ("", 8)): + cc, wc = color + + if cc != "": + if self.OS != 'nt': + sys.stdout.write(cc) + else: + WConio.textcolor(wc) + + self._write_line(text, line_suffix) + + if wc != "": + if self.OS!='nt': + sys.stdout.write("\033[0;0m") + else: + WConio.textcolor(8) + + def _print(self, res, line_suffix): + self._erase() + + txt_color = ("", 8) if not res.is_baseline or not self.colour else (term_colors.fgCyan, 8) + + self._write("%05d: C=" % (res.nres), line_suffix, txt_color) + if res.exception: + self._write("XXX", line_suffix, self._get_code_color(res.code) if self.colour else ("",8)) + else: + self._write("%03d" % (res.code), line_suffix, self._get_code_color(res.code) if self.colour else ("",8)) + self._write(" %4d L\t %5d W\t %5d Ch\t \"%s\"%s" % (res.lines, res.words, res.chars, res.description, line_suffix), line_suffix, txt_color) + + if line_suffix != "": + for i in res.plugins_res: + print " |_ %s\r" % i.issue + + sys.stdout.flush() + + def header(self, summary): + print exec_banner + print "Target: %s\r" % summary.url + #print "Payload type: " + payloadtype + "\n" + print "Total requests: %d\r\n" % summary.total_req + print "==================================================================\r" + print "ID Response Lines Word Chars Request \r" + print "==================================================================\r\n" + + def result(self, res): + self._print(res, "\r\n") + + def noresult(self, res): + self._print(res, "") + + def footer(self, summary): + self._erase() + sys.stdout.write("\r\n") + + print "Total time: %s\r" % str(summary.totaltime)[:8] + + if summary.backfeed > 0: + print "Processed Requests: %s (%d + %d)\r" % (str(summary.processed)[:8], (summary.processed - summary.backfeed), summary.backfeed) + else: + print "Processed Requests: %s\r" % (str(summary.processed)[:8]) + print "Filtered Requests: %s\r" % (str(summary.filtered)[:8]) + print "Requests/sec.: %s\r\n" % str(summary.processed/summary.totaltime if summary.totaltime > 0 else 0)[:8] + +@moduleman_plugin("header", "footer", "noresult", "result") +class verbose(default): + name = "verbose" + description = "Results in verbose format" + category = ["default"] + priority = 99 + + def __init__(self): + default.__init__(self) + + def header(self, summary): + print exec_banner + print "Target: %s\r" % summary.url + #print "Payload type: " + payloadtype + "\n" + print "Total requests: %d\r\n" % summary.total_req + print + + print "==============================================================================================================================================\r" + print "ID C.Time Response Lines Word Chars Server Redirect Request \r" + print "==============================================================================================================================================\r\n" + + def _print(self, res, line_suffix): + self._erase() + + txt_color = ("", 8) if not res.is_baseline or not self.colour else (term_colors.fgCyan, 8) + + self._write("%05d: " % (res.nres), line_suffix, txt_color) + self._write("%.3fs C=" % (res.timer), line_suffix, txt_color) + + location = "" + if 'Location' in res.history.fr_headers()['response']: + location = res.history.fr_headers()['response']['Location'] + elif res.history.fr_url() != res.history.fr_redirect_url(): + location = "(*) %s" % res.history.fr_url() + + server = "" + if 'Server' in res.history.fr_headers()['response']: + server = res.history.fr_headers()['response']['Server'] + + if res.exception: + self._write("XXX", line_suffix, self._get_code_color(res.code) if self.colour else ("",8)) + else: + self._write("%03d" % (res.code), line_suffix, self._get_code_color(res.code) if self.colour else ("",8)) + + self._write(" %4d L\t %5d W\t %5d Ch %20.20s %51.51s \"%s\"%s" % (res.lines, res.words, res.chars, server[:17], location[:48], res.description, line_suffix), line_suffix, txt_color) + + if line_suffix != "": + for i in res.plugins_res: + print " |_ %s\r" % i.issue + + sys.stdout.flush() + diff --git a/plugins/scripts/__init__.py b/plugins/scripts/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/plugins/scripts/cookies.py b/plugins/scripts/cookies.py new file mode 100644 index 00000000..a4fbfea5 --- /dev/null +++ b/plugins/scripts/cookies.py @@ -0,0 +1,23 @@ +from framework.plugins.api import BasePlugin + +from externals.moduleman.plugin import moduleman_plugin + +@moduleman_plugin +class cookies(BasePlugin): + name = "cookies" + description = "Looks for new cookies" + category = ["default", "passive"] + priority = 99 + + def validate(self, fuzzresult): + return True + + def process(self, fuzzresult): + new_cookies = fuzzresult.history.fr_cookies()['response'].items() + + if len(new_cookies) > 0: + for name, value in new_cookies: + + if name != "" and not self.has_kbase("cookie") or name not in self.get_kbase("cookie"): + self.add_kbase("cookie", name) + self.add_result("Cookie first set - %s=%s" % (name, value)) diff --git a/plugins/scripts/grep.py b/plugins/scripts/grep.py new file mode 100644 index 00000000..6e9e216a --- /dev/null +++ b/plugins/scripts/grep.py @@ -0,0 +1,25 @@ +import re + +from framework.plugins.api import BasePlugin +from framework.core.myexception import FuzzException +from externals.moduleman.plugin import moduleman_plugin + +@moduleman_plugin +class grep(BasePlugin): + name = "grep" + description = "Extracts the given pattern from the HTTP response. Parameters: grep.regex=\"\"" + category = ["passive"] + priority = 99 + + def __init__(self): + try: + self.regex = re.compile(self.get_kbase("grep.regex")[0], re.MULTILINE|re.DOTALL) + except Exception, e: + raise FuzzException(FuzzException.FATAL, "Incorrect regex or missing regex parameter.") + + def validate(self, fuzzresult): + return True + + def process(self, fuzzresult): + for r in self.regex.findall(fuzzresult.history.fr_content()): + self.add_result("Pattern match %s" % r) diff --git a/plugins/scripts/headers.py b/plugins/scripts/headers.py new file mode 100644 index 00000000..bd7820af --- /dev/null +++ b/plugins/scripts/headers.py @@ -0,0 +1,39 @@ +from framework.plugins.api import BasePlugin + +from externals.moduleman.plugin import moduleman_plugin + +@moduleman_plugin +class headers(BasePlugin): + name = "headers" + description = "Looks for server headers" + category = ["default", "passive"] + priority = 99 + + def validate(self, fuzzresult): + return True + + def process(self, fuzzresult): + serverh = "" + poweredby = "" + + if "Server" in fuzzresult.history.fr_headers()['response']: + serverh = fuzzresult.history.fr_headers()['response']["Server"] + + if "X-Powered-By" in fuzzresult.history.fr_headers()['response']: + poweredby = fuzzresult.history.fr_headers()['response']["X-Powered-By"] + + if serverh != "": + if not self.has_kbase("server"): + self.add_kbase("server", serverh) + self.add_result("Server header first set - " + serverh) + elif serverh not in self.get_kbase("server"): + self.add_kbase("server", serverh) + self.add_result("New Server header - " + serverh) + + if poweredby != "": + if not self.has_kbase("poweredby"): + self.add_kbase("poweredby", poweredby) + self.add_result("Powered-by header first set - " + poweredby) + elif poweredby not in self.get_kbase("poweredby"): + self.add_kbase("poweredby", poweredby) + self.add_result("New X-Powered-By header - " + poweredby) diff --git a/plugins/scripts/links.py b/plugins/scripts/links.py new file mode 100644 index 00000000..1ed2a88d --- /dev/null +++ b/plugins/scripts/links.py @@ -0,0 +1,57 @@ +import re +from urlparse import urlparse, urljoin + +from framework.plugins.api import DiscoveryPlugin +from framework.plugins.api import url_same_domain +from externals.moduleman.plugin import moduleman_plugin + +@moduleman_plugin +class links(DiscoveryPlugin): + name = "links" + description = "Parses HTML looking for new content. Optional: discovery.bl=\".txt,.gif\"" + category = ["default", "active", "discovery"] + priority = 99 + + def __init__(self): + DiscoveryPlugin.__init__(self) + + regex = [ 'href="((?!mailto:|tel:|#|javascript:).*?)"', + 'src="((?!javascript:).*?)"', + 'action="((?!javascript:).*?)"', + # http://en.wikipedia.org/wiki/Meta_refresh + '', + 'getJSON\("(.*?)"', + ] + + self.regex = [] + for i in regex: + self.regex.append(re.compile(i, re.MULTILINE|re.DOTALL)) + + def validate(self, fuzzresult): + return fuzzresult.code in [200] + + def process(self, fuzzresult): + l = [] + + #O + #ParseResult(scheme='', netloc='', path='www.owasp.org/index.php/OWASP_EU_Summit_2008', params='', query='', fragment='') + + for r in self.regex: + for i in r.findall(fuzzresult.history.fr_content()): + schema, host, path, params, variables, f = urlparse(i) + + if (not schema or schema == "http" or schema == "https") and \ + (url_same_domain(i, fuzzresult.url) or (not host and path)): + if i not in l: + l.append(i) + + # dir path + split_path = path.split("/") + newpath = '/'.join(split_path[:-1]) + "/" + self.queue_url(urljoin(fuzzresult.url, newpath)) + + # file path + u = urljoin(fuzzresult.url, i) + if not self.blacklisted_extension(u): + self.queue_url(u) + diff --git a/plugins/scripts/listing.py b/plugins/scripts/listing.py new file mode 100644 index 00000000..1edfdf7a --- /dev/null +++ b/plugins/scripts/listing.py @@ -0,0 +1,39 @@ +import re + +from framework.plugins.api import BasePlugin +from externals.moduleman.plugin import moduleman_plugin + +@moduleman_plugin +class listing(BasePlugin): + name = "listing" + description = "Looks for directory listing vulnerabilities" + category = ["default", "passive"] + priority = 99 + + def __init__(self): + dir_indexing_regexes = [] + + dir_indexing_regexes.append("Index of /") + dir_indexing_regexes.append('<a href="\\?C=N;O=D">Name</a>') + dir_indexing_regexes.append("Last modified</a>") + dir_indexing_regexes.append("Parent Directory</a>") + dir_indexing_regexes.append("Directory Listing for") + dir_indexing_regexes.append("<TITLE>Folder Listing.") + dir_indexing_regexes.append("<TITLE>Folder Listing.") + dir_indexing_regexes.append('<table summary="Directory Listing" ') + dir_indexing_regexes.append("- Browsing directory ") + dir_indexing_regexes.append('">\\[To Parent Directory\\]</a><br><br>') # IIS 6.0 and 7.0 + dir_indexing_regexes.append('<A HREF=".*?">.*?</A><br></pre><hr></body></html>') # IIS 5.0 + + self.regex = [] + for i in dir_indexing_regexes: + self.regex.append(re.compile(i,re.MULTILINE|re.DOTALL)) + + def validate(self, fuzzresult): + return fuzzresult.code in [200] + + def process(self, fuzzresult): + for r in self.regex: + if len(r.findall(fuzzresult.history.fr_content())) > 0: + self.add_result("Directory listing identified") + break diff --git a/plugins/scripts/robots.py b/plugins/scripts/robots.py new file mode 100644 index 00000000..fa9d499f --- /dev/null +++ b/plugins/scripts/robots.py @@ -0,0 +1,32 @@ +import re +from urlparse import urlparse, urljoin + + +from framework.plugins.api import DiscoveryPlugin +from framework.plugins.api import url_filename +from externals.moduleman.plugin import moduleman_plugin + +@moduleman_plugin +class robots(DiscoveryPlugin): + name = "robots" + description = "Parses robots.txt looking for new content. Optional: discovery.bl=\".txt,.gif\"" + category = ["default", "active", "discovery"] + priority = 99 + + def validate(self, fuzzresult): + return url_filename(fuzzresult) == "robots.txt" and fuzzresult.code == 200 + + def process(self, fuzzresult): + # Shamelessly (partially) copied from w3af's plugins/discovery/robotsReader.py + for line in fuzzresult.history.fr_content().split('\n'): + line = line.strip() + + if len(line) > 0 and line[0] != '#' and (line.upper().find('ALLOW') == 0 or\ + line.upper().find('DISALLOW') == 0 or line.upper().find('SITEMAP') == 0): + + url = line[ line.find(':') + 1 : ] + url = url.strip(" *") + + if url and not self.blacklisted_extension(url): + self.queue_url(urljoin(fuzzresult.url, url)) + diff --git a/plugins/scripts/screenshot.py b/plugins/scripts/screenshot.py new file mode 100644 index 00000000..9e3190b9 --- /dev/null +++ b/plugins/scripts/screenshot.py @@ -0,0 +1,21 @@ +from framework.plugins.api import BasePlugin +from externals.moduleman.plugin import moduleman_plugin + +import subprocess +import tempfile + +@moduleman_plugin +class screenshot(BasePlugin): + name = "screenshot" + description = "Performs a screen capture using linux cutycapt tool" + category = ["active"] + priority = 99 + + def validate(self, fuzzresult): + return fuzzresult.code in [200] + + def process(self, fuzzresult): + (fd, filename) = tempfile.mkstemp() + + subprocess.call(['cutycapt', '--url=%s' % fuzzresult.url, '--out=%s.png' % filename]) + self.add_result("Screnshot taken, output at %s.png" % filename) diff --git a/plugins/scripts/sitemap.py b/plugins/scripts/sitemap.py new file mode 100644 index 00000000..f1853da9 --- /dev/null +++ b/plugins/scripts/sitemap.py @@ -0,0 +1,33 @@ +from framework.plugins.api import DiscoveryPlugin +from framework.plugins.api import url_filename, url_same_domain +from framework.core.myexception import FuzzException +from externals.moduleman.plugin import moduleman_plugin + +import xml.dom.minidom +import urlparse + +@moduleman_plugin +class sitemap(DiscoveryPlugin): + name = "sitemap" + description = "Parses sitemap.xml file. Optional: discovery.bl=\".txt,.gif\"" + category = ["default", "active", "discovery"] + priority = 99 + + def validate(self, fuzzresult): + return url_filename(fuzzresult) == "sitemap.xml" and fuzzresult.code == 200 + + def process(self, fuzzresult): + base_url = fuzzresult.url + + try: + dom = xml.dom.minidom.parseString(fuzzresult.history.fr_content()) + except: + raise FuzzException(FuzzException.FATAL, 'Error while parsing %s.' % fuzzresult.url) + + urlList = dom.getElementsByTagName("loc") + for url in urlList: + u = url.childNodes[0].data + + if not self.blacklisted_extension(u) and url_same_domain(u, fuzzresult.url): + self.queue_url(u) + diff --git a/plugins/scripts/svn_extractor.py b/plugins/scripts/svn_extractor.py new file mode 100644 index 00000000..2b440f28 --- /dev/null +++ b/plugins/scripts/svn_extractor.py @@ -0,0 +1,109 @@ +from urlparse import urlparse, urljoin + +from framework.plugins.api import DiscoveryPlugin +from framework.core.myexception import FuzzException +from externals.moduleman.plugin import moduleman_plugin + +import tempfile +import sqlite3 + +@moduleman_plugin +class svn_extractor(DiscoveryPlugin): + name = "svn_extractor" + description = "Parses .svn/entries file. Optional: discovery.bl=\".txt,.gif\"" + category = ["default", "active", "discovery"] + priority = 99 + + def validate(self, fuzzresult): + return fuzzresult.url.find(".svn/entries") > 0 and fuzzresult.code == 200 + + def readsvn(self, content): + ''' + Function shamesly copied (and adapted) from https://github.com/anantshri/svn-extractor/ + Credit (C) Anant Shrivastava http://anantshri.info + ''' + old_line = "" + file_list = [] + dir_list = [] + author_list = [] + + for a in content.splitlines(): + #below functionality will find all usernames from svn entries file + if (a == "has-props"): + if not old_line in author_list: author_list.append(old_line) + if (a == "file"): + if not old_line in file_list: file_list.append(old_line) + if (a == "dir"): + if old_line != "": + dir_list.append(old_line) + old_line = a + return file_list, dir_list, author_list + + def process(self, fuzzresult): + base_url = fuzzresult.url + + file_list, dir_list, author_list = self.readsvn(fuzzresult.history.fr_content()) + + if author_list: + self.add_result("SVN authors: %s" % ', '.join(author_list)) + + for f in file_list: + u = urljoin(base_url.replace("/.svn/", "/"), f) + if not self.blacklisted_extension(u): + self.queue_url(u) + + for d in dir_list: + self.queue_url(urljoin(base_url.replace("/.svn/", "/"), d) + "/.svn/entries") + + +@moduleman_plugin +class wcdb_extractor(DiscoveryPlugin): + name = "wc_extractor" + description = "Parses subversion's wc.db file. Optional: discovery.bl=\".txt,.gif\"" + category = ["default", "active", "discovery"] + priority = 99 + + def validate(self, fuzzresult): + return fuzzresult.url.find(".svn/wc.d") > 0 and fuzzresult.code == 200 + + def readwc(self, content): + ''' + Function shamesly copied (and adapted) from https://github.com/anantshri/svn-extractor/ + Credit (C) Anant Shrivastava http://anantshri.info + ''' + author_list = [] + list_items = None + (fd, filename) = tempfile.mkstemp() + + with open(filename,"wb") as f: + f.write(content) + + conn = sqlite3.connect(filename) + c = conn.cursor() + try: + c.execute('select local_relpath, ".svn/pristine/" || substr(checksum,7,2) || "/" || substr(checksum,7) || ".svn-base" as alpha from NODES where kind="file";') + list_items = c.fetchall() + #below functionality will find all usernames who have commited atleast once. + c.execute('select distinct changed_author from nodes;') + author_list = [r[0] for r in c.fetchall()] + c.close() + except Exception,e: + raise FuzzException(FuzzException.FATAL, "Error reading wc.db, either database corrupt or invalid file") + + return author_list, list_items + + def process(self, fuzzresult): + author_list, list_items = self.readwc(fuzzresult.history.fr_content()) + + if author_list: + self.add_result("SVN authors: %s" % ', '.join(author_list)) + + if list_items: + for f, pristine in list_items: + u = urljoin(fuzzresult.url.replace("/.svn/wc.db", "/"), f) + if not self.blacklisted_extension(u): + self.add_result("SVN %s source code in %s" % (f, pristine)) + self.queue_url(u) + + + diff --git a/printers.py b/printers.py deleted file mode 100644 index a260bba3..00000000 --- a/printers.py +++ /dev/null @@ -1,113 +0,0 @@ -import socket -import sys -from xml.dom import minidom - -class printer_magictree: - text = "magictree" - - def __init__(self): - self.node_mt = None - self.node_service = None - - def create_xml_element(self, parent, caption, text): - # Create a <xxx> element - doc = minidom.Document() - el = doc.createElement(caption) - parent.appendChild(el) - - # Give the <xxx> element some text - ptext = doc.createTextNode(text) - - el.appendChild(ptext) - return el - - def header(self, request): - doc = minidom.Document() - - #<magictree class="MtBranchObject"> - self.node_mt = doc.createElement("magictree") - self.node_mt.setAttribute("class", "MtBranchObject") - - #<testdata class="MtBranchObject"> - node_td = doc.createElement("testdata") - node_td.setAttribute("class", "MtBranchObject") - self.node_mt.appendChild(node_td) - - #<host>209.85.146.105 - host = request["Host"] - if host.find(":") > 0: - host, port = host.split(":") - else: - port = 80 - if request.schema.lower() == "https": - port = 443 - - try: - resolving = socket.gethostbyname(host) - node_h = self.create_xml_element(node_td, "host", str(resolving)) - except socket.gaierror: - node_h = self.create_xml_element(node_td, "host", str(host)) - - #<ipproto>tcp - node_ipr = self.create_xml_element(node_h, "ipproto", "tcp") - - #<port>80<state>open</state><service>http - node_port = self.create_xml_element(node_ipr, "port", str(port)) - self.create_xml_element(node_port, "state", "open") - if request.schema.lower() == "https": - node_port = self.create_xml_element(node_port, "tunnel", "ssl") - - self.node_service = self.create_xml_element(node_port, "service", "http") - - def result(self, nreq, fuzz_result, request): - node_url = self.create_xml_element(self.node_service, "url", str(request.completeUrl)) - - if fuzz_result.server: - self.create_xml_element(node_url, "HTTPServer", fuzz_result.server) - - if fuzz_result.code == 301 or fuzz_result.code == 302 and fuzz_result.location: - self.create_xml_element(node_url, "RedirectLocation", fuzz_result.location) - - self.create_xml_element(node_url, "ResponseCode", str(fuzz_result.code)) - self.create_xml_element(node_url, "source", "WFuzz") - - def footer(self): - sys.stderr.write(self.node_mt.toxml()) - - def error(self, nreq, request): - pass - -class printer_html: - text = "html" - - def header(self, request): - url = request.completeUrl - - sys.stderr.write("<html><head> <style> .commit-tease, .user-profile-mini-avatar, .avatar, .vcard-details, .signup-prompt-bg { display: none !IMPORTANT; } </style> <script> document.addEventListener('DOMContentLoaded', function() { this.querySelectorAll('a').forEach(anchor => { anchor.addEventListener('click', e => { e.preventDefault(); const redact = new URLSearchParams(window.location.search).get('redact'); const hasExistingParams = anchor.href.includes('?'); window.location.href = anchor.href + (hasExistingParams ? `&redact=${redact}` : `?redact=${redact}`); }); }); }); </script> </head><body bgcolor=#000000 text=#FFFFFF><h1>Fuzzing %s</h1>\r\n<table border=\"1\">\r\n<tr><td>#request</td><td>Code</td><td>#lines</td><td>#words</td><td>Url</td></tr>\r\n" % (url) ) - - def result(self, nreq, fuzz_result, request): - htmlc="<font>" - - if fuzz_result.code >= 400 and fuzz_result.code < 500: - htmlc = "<font color=#FF0000>" - elif fuzz_result.code>=300 and fuzz_result.code < 400: - htmlc = "<font color=#8888FF>" - elif fuzz_result.code>=200 and fuzz_result.code < 300: - htmlc = "<font color=#00aa00>" - - if request.method.lower() == "get": - sys.stderr.write("\r\n<tr><td>%05d</td><td>%s%d</font></td><td>%4dL</td><td>%5dW</td><td><a href=%s>%s</a></td></tr>\r\n" %(nreq,htmlc,fuzz_result.code,fuzz_result.lines,fuzz_result.words,request.completeUrl,request.completeUrl)) - else: - inputs="" - postvars = request.variablesPOST() - for i in postvars: - inputs+="<input type=\"hidden\" name=\"%s\" value=\"%s\">" % (i, request.getVariablePOST(i)) - - sys.stderr.write ("\r\n<tr><td>%05d</td>\r\n<td>%s%d</font></td>\r\n<td>%4dL</td>\r\n<td>%5dW</td>\r\n<td><table><tr><td>%s</td><td><form method=\"post\" action=\"%s\">%s<input type=submit name=b value=\"send POST\"></form></td></tr></table></td>\r\n</tr>\r\n" %(nreq,htmlc,fuzz_result.code,fuzz_result.lines,fuzz_result.words,request.description,request.completeUrl,inputs)) - - def footer(self): - sys.stderr.write("</table></body></html><h5>Wfuzz by EdgeSecurity<h5>\r\n") - sys.stdout.flush() - - def error(self, nreq, request): - sys.stderr.write ("\r\n<tr><td>%05d</td><td>XXX</td><td>%4dL</td><td>%5dW</td><td><a href=%s>%s</a></td></tr>\r\n" %(nreq,0,0,request.completeUrl,"Error in "+request.completeUrl)) diff --git a/reqresp.py b/reqresp.py deleted file mode 100644 index 597ed154..00000000 --- a/reqresp.py +++ /dev/null @@ -1,713 +0,0 @@ -#Covered by GPL V2.0 -#Coded by Carlos del Ojo Elias (deepbit@gmail.com) - - -from urlparse import * -from time import gmtime, strftime -import pycurl -import gzip -import StringIO -import string -import re -import threading -import copy -from time import localtime, strftime -from datetime import date - -from xml.dom.minidom import Document - -try: - from TextParser import * -except: - pass - -mutex=1 -Semaphore_Mutex=threading.BoundedSemaphore(value=mutex) -REQLOG=False - -class Variable: - def __init__(self,name,value="",extraInfo=""): - self.name=name - self.value=value - self.initValue=value - self.extraInfo=extraInfo - - def restore(self): - self.value=self.initValue - - def change(self,newval): - self.initValue=self.value=newval - - def update(self,val): - self.value=val - - def append(self,val): - self.value+=val - - def __str__(self): - return "[ %s : %s ]" % (self.name,self.value) - -class VariablesSet: - def __init__(self): - self.variables=[] - self.boundary=None - - def names(self): - dicc=[] - for i in self.variables: - dicc.append(i.name) - - return dicc - - def existsVar(self,name): - return name in self.names() - - def addVariable(self,name,value="",extraInfo=""): - self.variables.append(Variable(name,value,extraInfo)) - - - def getVariable(self,name): - dicc=[] - for i in self.variables: - if i.name==name: - dicc.append(i) - - if len(dicc)>1: - raise Exception, "Variable exists more than one time!!! :D" % (name) - - if not dicc: - var=Variable(name) - self.variables.append(var) - return var - - return dicc[0] - - - def urlEncoded(self): - return "&".join(["=".join([i.name,i.value]) for i in self.variables]) - - def parseUrlEncoded(self,cad): - dicc=[] - - for i in cad.split("&"): - if i: - list=i.split("=",1) - if len (list)==1: - dicc.append(Variable(list[0],"")) - elif len (list)==2: - dicc.append(Variable(list[0],list[1])) - - self.variables=dicc - - def multipartEncoded(self): - if not self.boundary: - self.boundary="---------------------------D33PB1T0R3QR3SP0B0UND4RY2203" - pd="" - pos=0 - for i in self.variables: - pd+="--"+self.boundary+"\r\n" - pd+="%s\r\n\r\n%s\r\n" % ("\r\n".join(i.extraInfo),i.value) - pd+="--"+self.boundary+"--\r\n" - return pd - - def parseMultipart(self,cad,boundary): - self.boundary=boundary - dicc=[] - tp=TextParser() - tp.setSource("string",cad) - - while True: - headers=[] - if not tp.readUntil("name=\"([^\"]+)\""): - break - var=tp[0][0] - headers.append(tp.lastFull_line.strip()) - while True: - tp.readLine() - if tp.search("^([^:]+): (.*)$"): - headers.append(tp.lastFull_line.strip()) - else: - break - - value="" - while True: - tp.readLine() - if not tp.search(boundary): - value+=tp.lastFull_line - else: - break - - if value[-2:]=="\r\n": - value=value[:-2] - - - dicc.append(Variable(var,value,headers)) - - self.variables=dicc - - - -class Request: - - def __init__ (self): - - self.__host=None # www.google.com:80 - self.__path=None # /index.php - self.__params=None # Mierdaza de index.php;lskjflkasjflkasjfdlkasdf? - self.schema="http" # http - - ##### Variables calculadas por getters NO SE PUEDEN MODIFICAR - # self.urlWithoutPath # http://www.google.es - # self.pathWithVariables # /index.php?a=b&c=d - # self.urlWithoutVariables=None # http://www.google.es/index.php - # self.completeUrl="" # http://www.google.es/index.php?a=b - # self.finalUrl="" # Url despues de hacer el FollowLocation - # self.postdata="" # Datos por POST, toto el string - ################ - - self.ContentType="application/x-www-form-urlencoded" # None es normal encoding - self.multiPOSThead={} - - self.__variablesGET=VariablesSet() - self.__variablesPOST=VariablesSet() - - self.__headers={} # diccionario, por ejemplo headers["Cookie"] - - self.response=None # Apunta a la response que produce dicha request - - ################### lo de debajo no se deberia acceder directamente - - self.time=None # 23:00:00 - self.ip=None # 192.168.1.1 - self.method="GET" # GET o POST (EN MAYUSCULAS SI PUEDE SER) - self.protocol="HTTP/1.1" # HTTP/1.1 - self.__performHead="" - self.__performBody="" - - self.__authMethod=None - self.__userpass="" - - self.description="" # For temporally store imformation - - self.__proxy=None - self.__timeout=None - self.__totaltimeout=None - self.__finalurl="" - - self.followLocation=False - self.follow_url="" - - def __str__(self): - str="[ URL: %s" % (self.completeUrl) - if self.method=="POST": - str+=" - POST: \"%s\"" % self.postdata - if "Cookie" in self.__headers: - str+=" - COOKIE: \"%s\"" % self.__headers["Cookie"] - str+=" ]" - return str - - def getHost(self): - return self.__host - - def getXML(self,obj): - r=obj.createElement("request") - r.setAttribute("method",self.method) - url=obj.createElement("URL") - url.appendChild(obj.createTextNode(self.completeUrl)) - r.appendChild(url) - if self.method=="POST": - pd=obj.createElement("PostData") - pd.appendChild(obj.createTextNode(self.postdata)) - r.appendChild(pd) - if "Cookie" in self.__headers: - ck=obj.createElement("Cookie") - ck.appendChild(obj.createTextNode(self.__headers["Cookie"])) - r.appendChild(ck) - - return r - - - def __getattr__ (self,name): - if name=="urlWithoutVariables": - return urlunparse((self.schema,self.__host,self.__path,'','','')) - elif name=="pathWithVariables": - return urlunparse(('','',self.__path,'',self.__variablesGET.urlEncoded(),'')) - elif name=="completeUrl": - return urlunparse((self.schema,self.__host,self.__path,self.__params,self.__variablesGET.urlEncoded(),'')) - elif name=="finalUrl": - if self.__finalurl: - return self.__finalurl - return self.completeUrl - elif name=="urlWithoutPath": - return "%s://%s" % (self.schema,self.__headers["Host"]) - elif name=="path": - return self.__path - elif name=="postdata": - if self.ContentType=="application/x-www-form-urlencoded": - return self.__variablesPOST.urlEncoded() - elif self.ContentType=="multipart/form-data": - return self.__variablesPOST.multipartEncoded() - else: - return self.__uknPostData - else: - raise AttributeError - - def setUrl (self, urltmp): - self.__variablesGET=VariablesSet() - self.schema,self.__host,self.__path,self.__params,variables,f=urlparse(urltmp) - self.__headers["Host"]=self.__host - - if variables: - self.__variablesGET.parseUrlEncoded(variables) - -############### PROXY ################################## - def setProxy (self,prox,type): - self.__proxy=prox - self.proxytype=type - -############### FOLLOW LOCATION ######################## - def setFollowLocation(self,value): - self.followLocation=value - -############## TIMEOUTS ################################ - def setConnTimeout (self,time): - self.__timeout=time - - def setTotalTimeout (self,time): - self.__totaltimeout=time - -############## Autenticacion ########################### - def setAuth (self,method,string): - self.__authMethod=method - self.__userpass=string - - def getAuth (self): - return self.__authMethod, self.__userpass - -############## TRATAMIENTO VARIABLES GET & POST ######################### - - def existsGETVar(self,key): - return self.__variablesGET.existsVar(key) - - def existPOSTVar(self): - return self.__variablesPOST.existsVar(key) - - - def setVariablePOST (self,key,value): - self.method="POST" - v=self.__variablesPOST.getVariable(key) - v.update(value) -# self.__headers["Content-Length"]=str(len(self.postdata)) - - def setVariableGET (self,key,value): - v=self.__variablesGET.getVariable(key) - v.update(value) - - def getGETVars(self): - return self.__variablesGET.variables - - def getPOSTVars(self): - return self.__variablesPOST.variables - - def setPostData (self,pd,boundary=None): - self.__variablesPOST=VariablesSet() - self.method="POST" - if self.ContentType=="application/x-www-form-urlencoded": - self.__variablesPOST.parseUrlEncoded(pd) - elif self.ContentType=="multipart/form-data": - self.__variablesPOST.parseMultipart(pd,boundary) - else: - self.__uknPostData=pd - -############################################################################ - - def addHeader (self,key,value): - k=string.capwords(key,"-") - if k.lower() not in ["accept-encoding","content-length","if-modified-since","if-none-match"]: - self.__headers[k]=value - - def delHeader (self,key): - k=string.capwords(key,"-") - del self.__headers[k] - - def __getitem__ (self,key): - k=string.capwords(key,"-") - if k in self.__headers: - return self.__headers[k] - else: - return "" - - def __getHeaders (self): - list=[] - for i,j in self.__headers.items(): - list+=["%s: %s" % (i,j)] - return list - - def head(self): - conn=pycurl.Curl() - conn.setopt(pycurl.SSL_VERIFYPEER,False) - conn.setopt(pycurl.SSL_VERIFYHOST,1) - conn.setopt(pycurl.URL,self.completeUrl) - - conn.setopt(pycurl.HEADER, True) # estas dos lineas son las que importan - conn.setopt(pycurl.NOBODY, True) # para hacer un pedido HEAD - - conn.setopt(pycurl.WRITEFUNCTION, self.header_callback) - conn.perform() - - rp=Response() - rp.parseResponse(self.__performHead) - self.response=rp - - def createPath(self,newpath): - '''Creates new url from a location header || Hecho para el followLocation=true''' - if "http" in newpath[:4].lower(): - return newpath - - parts=urlparse(self.completeUrl) - if "/" != newpath[0]: - newpath="/".join(parts[2].split("/")[:-1])+"/"+newpath - - return urlunparse([parts[0],parts[1],newpath,'','','']) - - def perform(self): - global REQLOG - if REQLOG: - Semaphore_Mutex.acquire() - f=open("/tmp/REQLOG-%d-%d" % (date.today().day,date.today().month) ,"a") - f.write( strftime("\r\n\r\n############################ %a, %d %b %Y %H:%M:%S\r\n", localtime())) - f.write(self.getAll()) - f.close() - Semaphore_Mutex.release() - - - self.__performHead="" - self.__performBody="" - self.__headersSent="" - - conn=pycurl.Curl() - conn.setopt(pycurl.SSL_VERIFYPEER,False) - conn.setopt(pycurl.SSL_VERIFYHOST,1) - conn.setopt(pycurl.URL,self.completeUrl) - - if self.__authMethod or self.__userpass: - if self.__authMethod=="basic": - conn.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC) - elif self.__authMethod=="ntlm": - conn.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_NTLM) - elif self.__authMethod=="digest": - conn.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_DIGEST) - conn.setopt(pycurl.USERPWD, self.__userpass) - - if self.__timeout: - conn.setopt(pycurl.CONNECTTIMEOUT, self.__timeout) - conn.setopt(pycurl.NOSIGNAL, 1) - - if self.__totaltimeout: - conn.setopt(pycurl.TIMEOUT, self.__totaltimeout) - conn.setopt(pycurl.NOSIGNAL, 1) - - conn.setopt(pycurl.WRITEFUNCTION, self.body_callback) - conn.setopt(pycurl.HEADERFUNCTION, self.header_callback) - - if self.__proxy!=None: - conn.setopt(pycurl.PROXY,self.__proxy) - if self.proxytype=="SOCK5": - conn.setopt(pycurl.PROXYTYPE,pycurl.PROXYTYPE_SOCKS5) - elif self.proxytype=="SOCK4": - conn.setopt(pycurl.PROXYTYPE,pycurl.PROXYTYPE_SOCKS4) - if self.__headers.has_key("Proxy-Connection"): - del self.__headers["Proxy-Connection"] - - conn.setopt(pycurl.HTTPHEADER,self.__getHeaders()) - if self.method=="POST": - conn.setopt(pycurl.POSTFIELDS,self.postdata) - - conn.setopt(pycurl.CUSTOMREQUEST, self.method) - if self.method == "HEAD": - conn.setopt(pycurl.NOBODY, True) - - conn.perform() - - rp=Response() - rp.parseResponse(self.__performHead) - rp.addContent(self.__performBody) - - if self.schema=="https" and self.__proxy: - self.response=Response() - self.response.parseResponse(rp.getContent()) - else: - self.response=rp - - - - if self.followLocation: - if self.response.getLocation(): - #a=Request() - a=copy.deepcopy(self) - newurl=self.createPath(self.response.getLocation()) - a.setUrl(newurl) - #url=urlparse(self.response.getLocation()) - #if not url[0] or not url[1]: - # sc=url[0] - # h=url[1] - # if not sc: - # sc=self.schema - # if not h: - # h=self.__host - # a.setUrl(urlunparse((sc,h)+url[2:])) - # self.__finalurl=urlunparse((sc,h)+url[2:]) - #else: - # a.setUrl(self.response.getLocation()) - # self.__finalurl=self.response.getLocation() - a.setProxy(self.__proxy) - - ck="" - - if "Cookie" in self.__headers: - ck=self.__headers["Cookie"] - if self.response.getCookie(): - if ck: - ck+=";"+self.response.getCookie() - else: - ck=self.response.getCookie() - - if ck: - self.addHeader("Cookie",ck) - - a.perform() - self.follow_url = a.completeUrl - self.response=a.response - - - - ######### ESTE conjunto de funciones no es necesario para el uso habitual de la clase - - def getAll (self): - pd=self.postdata - string=str(self.method)+" "+str(self.pathWithVariables)+" "+str(self.protocol)+"\n" - for i,j in self.__headers.items(): - string+=i+": "+j+"\n" - string+="\n"+pd - - return string - - ########################################################################## - - def header_callback(self,data): - self.__performHead+=data - - def body_callback(self,data): - self.__performBody+=data - - def Substitute(self,src,dst): - a=self.getAll() - rx=re.compile(src) - b=rx.sub(dst,a) - del rx - self.parseRequest(b,self.schema) - - def parseRequest (self,rawRequest,prot="http"): - ''' Aun esta en fase BETA y por probar''' - tp=TextParser() - tp.setSource("string",rawRequest) - - self.__variablesPOST=VariablesSet() - self.__headers={} # diccionario, por ejemplo headers["Cookie"] - - tp.readLine() - try: - tp.search("^(\w+) (.*) (HTTP\S*)$") - self.method=tp[0][0] - self.protocol=tp[0][2] - except Exception,a: - print rawRequest - raise a - - pathTMP=tp[0][1].replace(" ","%20") - pathTMP=('','')+urlparse(pathTMP)[2:] - pathTMP=urlunparse(pathTMP) - - while True: - tp.readLine() - if (tp.search("^([^:]+): (.*)$")): - self.addHeader(tp[0][0],tp[0][1]) - else: - break - - self.setUrl(prot+"://"+self.__headers["Host"]+pathTMP) - - if self.method.upper()=="POST": - - pd="" - while tp.readLine(): - pd+=tp.lastFull_line - - boundary=None - if "Content-Type" in self.__headers: - values=self.__headers["Content-Type"].split(";") - self.ContentType=values[0].strip().lower() - if self.ContentType=="multipart/form-data": - boundary=values[1].split("=")[1].strip() - - self.setPostData(pd,boundary) - - -class Response: - - def __init__ (self,protocol="",code="",message=""): - self.protocol=protocol # HTTP/1.1 - self.code=code # 200 - self.message=message # OK - self.__headers=[] # bueno pues las cabeceras igual que en la request - self.__content="" # contenido de la response (si i solo si Content-Length existe) - self.md5="" # hash de los contenidos del resultado - self.charlen="" # Cantidad de caracteres de la respuesta - - def addHeader (self,key,value): - k=string.capwords(key,"-") - self.__headers+=[(k,value)] - - def delHeader (self,key): - for i in self.__headers: - if i[0].lower()==key.lower(): - self.__headers.remove(i) - - - def addContent (self,text): - self.__content=self.__content+text - - def __getitem__ (self,key): - for i,j in self.__headers: - if key==i: - return j - print "Error al obtener header!!!" - - def getCookie (self): - str=[] - for i,j in self.__headers: - if i.lower()=="set-cookie": - str.append(j.split(";")[0]) - return "; ".join(str) - - - def has_header (self,key): - for i,j in self.__headers: - if i.lower()==key.lower(): - return True - return False - - def getLocation (self): - for i,j in self.__headers: - if i.lower()=="location": - return j - return None - - def header_equal (self,header,value): - for i,j in self.__headers: - if i==header and j.lower()==value.lower(): - return True - return False - - def getHeaders (self): - return self.__headers - - - def getContent (self): - return self.__content - - def getTextHeaders(self): - string=str(self.protocol)+" "+str(self.code)+" "+str(self.message)+"\r\n" - for i,j in self.__headers: - string+=i+": "+j+"\r\n" - - return string - - def getAll (self): - string=self.getTextHeaders()+"\r\n"+self.getContent() - return string - - def Substitute(self,src,dst): - a=self.getAll() - b=a.replace(src,dst) - self.parseResponse(b) - - def getAll_wpost (self): - string=str(self.protocol)+" "+str(self.code)+" "+str(self.message)+"\r\n" - for i,j in self.__headers: - string+=i+": "+j+"\r\n" - return string - - - def parseResponse (self,rawResponse,type="curl"): - self.__content="" - self.__headers=[] - - tp=TextParser() - tp.setSource("string",rawResponse) - - while True: - tp.readUntil("(HTTP\S*) ([0-9]+)") - - try: - self.protocol=tp[0][0] - except: - self.protocol="unknown" - - try: - self.code=tp[0][1] - except: - self.code="0" - - if self.code!="100": - break - - - self.code=int(self.code) - - while True: - tp.readLine() - if (tp.search("^([^:]+): ?(.*)$")): - self.addHeader(tp[0][0],tp[0][1]) - else: - break - - while tp.skip(1): - self.addContent(tp.lastFull_line) - - if type=='curl': - self.delHeader("Transfer-Encoding") - - if self.header_equal("Transfer-Encoding","chunked"): - result="" - content=StringIO.StringIO(self.__content) - hexa=content.readline() - nchunk=int(hexa.strip(),16) - - while nchunk: - result+=content.read(nchunk) - content.readline() - hexa=content.readline() - nchunk=int(hexa.strip(),16) - - self.__content=result - - if self.header_equal("Content-Encoding","gzip"): - compressedstream = StringIO.StringIO(self.__content) - gzipper = gzip.GzipFile(fileobj=compressedstream) - body=gzipper.read() - self.__content=body - self.delHeader("Content-Encoding") - - - -class ReqrespException(Exception): - def __init__ (self,value): - self.__value=value - - def __str__ (self): - return self.GetError() - - def GetError(self): - if self.__value==1: - return "Attribute not modificable" diff --git a/wfuzz_bash_completion b/wfuzz_bash_completion deleted file mode 100644 index 6a8ca7a8..00000000 --- a/wfuzz_bash_completion +++ /dev/null @@ -1,49 +0,0 @@ -# wfuzz bash completion file -# by Xavier Mendez (xavi.mendez@gmail.com) aka Javi - -_wfuzz() { - - COMPREPLY=() - local cur prev - cur=${COMP_WORDS[COMP_CWORD]} - prev=${COMP_WORDS[COMP_CWORD-1]} - - # Change to your wordlists' base directory - WLDIR=~/herramientas/fuzzdb-read-only/ - - common_options="-z[PAYLOAD] -f[WORDLIST] --hc[HIDE_HTTP_CODES] -d[POST_DATA] " - - case "$prev" in - -z) - COMPREPLY=( $( compgen -W "file, range, hexa-range, hexa-rand, list," -- $cur )) - ;; - file,) - [ -z "$WLDIR" ] && exit 1 - - if [[ "$cur" == "$WLDIR" ]]; then - local names=$(find $WLDIR -type d | grep -v '\/\(\.svn\|docs\|web-backdoors\|regex\)') - else - local names=$(find $WLDIR -type f -iname "*.txt") - fi - - COMPREPLY=( $(compgen -W "${names}" -- ${cur}) ) - ;; - range|hexa-range|hexa-rand) - COMPREPLY=( $(compgen -W "-r" -- ${cur}) ) - ;; - --digest|--ntlm|--basic) - COMPREPLY=( $(compgen -W "FUZZ:FUZZ" -- ${cur}) ) - ;; - --hc) - COMPREPLY=( $( compgen -W "400 401 301 302 500 404 200" -- $cur ) ) - ;; - -e) - COMPREPLY=( $( compgen -W "help urlencode double_nibble_hexa mssql_char binary_ascii html_encoder_hexa utf8 base64 uri_hexadecimal double_urlencode utf8_binary uri_unicode sha1 mysql_char random_uppercase html_encoder oracle_char html_encoder_decimal md5" -- $cur ) ) - ;; - *) - COMPREPLY=( $( compgen -W "-I -v -z -f -c -x -d -H -r -t -e -b -R -V --basic --ntlm --digest --hc --hl --hw --hh --hs --html --magictree" -- $cur ) ) - ;; - esac -} - -complete -F _wfuzz -o default wfuzz.py diff --git a/wordlist/general/http_methods.txt b/wordlist/general/http_methods.txt new file mode 100644 index 00000000..e346904a --- /dev/null +++ b/wordlist/general/http_methods.txt @@ -0,0 +1,32 @@ +GET +NOEXISTE +POST +HEAD +PUT +TRACE +TRACK +SEARCH +INDEX +OPTIONS +DELETE +CONNECT +PATCH +PROPFIND +PROPPATCH +MKCOL +COPY +MOVE +LOCK +UNLOCK +CHECKOUT +SHOWMETHOD +LINK +UNLINK +CHECKIN +TEXTSEARCH +SPACEJUMP +ORDERPATCH +ACL +VERSION-CONTROL +REPORT +UNCHECKOUT