Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,7 @@ venv/
ENV/
env.bak/
venv.bak/
.vscode/

# Spyder project settings
.spyderproject
Expand Down
12 changes: 9 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,17 @@ Vulnerability scanner for OFX servers.

## Usage

`./ofxpostern.py [-f FID] [-o ORG] url`
`./ofxpostern.py [-f FID] [-o ORG] [--proxy proxy_url] url`

Example:
Examples:
```bash
./ofxpostern.py -o Cavion -f 11135 https://ofx.lanxtra.com/ofx/servlet/Teller
```

`./ofxpostern.py -o Cavion -f 11135 https://ofx.lanxtra.com/ofx/servlet/Teller`
Using a web proxy (Burp Suite, Fiddler) to intercept and inspect your traffic across the wire:
```bash
./ofxpostern.py -o Cavion -f 11135 --proxy http://127.0.0.1:8080 https://ofx.lanxtra.com/ofx/servlet/Teller
```

The Financial Identifer (FID) and Organization (ORG) are sometimes optional, sometimes required depending on the Financial Institution.

Expand Down
14 changes: 10 additions & 4 deletions ofxpostern.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ def print_list(lst, indent=0):
# Core Logic
#

def send_req(server, req_name):
def send_req(server, req_name, proxy_url=None):
'''
Send request to the OFX server.
'''
Expand All @@ -161,7 +161,8 @@ def send_req(server, req_name):
cached = False

if not cache or not cached:
otc = testofx.OFXTestClient(output=debug, tls_verify=server.get_tls())
otc = testofx.OFXTestClient(output=debug, tls_verify=server.get_tls(),
proxy_url=proxy_url)
res = otc.send_req(req_name, server)

# Store result for analysis
Expand Down Expand Up @@ -506,7 +507,12 @@ def main():
action='store_false',
help='Skip TLS verification',
required=False)
parser.set_defaults(tls_verify=True)
parser.add_argument('--proxy',
dest='proxy_url',
action='store',
help='Use a intercepting proxy, such as Burp Suite',
required=False)
parser.set_defaults(tls_verify=True, proxy_url=None)
args = parser.parse_args()

print_debug(args)
Expand Down Expand Up @@ -534,7 +540,7 @@ def main():
check_tls(server, args.tls_verify)
for req_name in requests:
print(' Sending {}'.format(req_name))
send_req(server, req_name)
send_req(server, req_name, proxy_url=args.proxy_url)
print(' Analysing Server')
try:
profrs = testofx.OFXFile(req_results[testofx.REQ_NAME_OFX_PROFILE].text)
Expand Down
51 changes: 37 additions & 14 deletions testofx.py
Original file line number Diff line number Diff line change
Expand Up @@ -382,14 +382,16 @@ def __init__(self,
use_cache=False,
output=False,
version='102',
tls_verify=True
tls_verify=True,
proxy_url=None
):
self.timeout = timeout
self.wait = wait
self.use_cache = use_cache
self._output=output
self.version = version
self.tls_verify = tls_verify
self.proxy_url = proxy_url

if self.version[0] == '1':
self.ofxheader = OFX_HEADER_100.format(version=self.version)
Expand Down Expand Up @@ -427,21 +429,42 @@ def call_url_cached(self, url, tls_verify, body, method):

if self._output: print("{}".format(url))
try:
if self.proxy_url:
s = requests.Session()
s.proxies = {'http': self.proxy_url, 'https': self.proxy_url}

if method == 'GET':
r = requests.get(
url,
headers=headers,
timeout=self.timeout,
verify=tls_verify
)
if self.proxy_url:
r = s.get(
url,
headers=headers,
timeout=self.timeout,
verify=False
)
else:
r = requests.get(
url,
headers=headers,
timeout=self.timeout,
verify=tls_verify
)
elif method == 'POST':
r = requests.post(
url,
headers=headers,
timeout=self.timeout,
verify=tls_verify,
data=body
)
if self.proxy_url:
r = s.post(
url,
headers=headers,
timeout=self.timeout,
verify=False,
data=body
)
else:
r = requests.post(
url,
headers=headers,
timeout=self.timeout,
verify=tls_verify,
data=body
)
if self.use_cache:
self.cache[url] = r
return (r, False)
Expand Down