Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix crash on saving endpoint (FFUF related only) #1063

Merged
merged 9 commits into from
Nov 27, 2023
Next Next commit
Add check & exception catch while saving endpoint
  • Loading branch information
psyray committed Nov 22, 2023
commit e5bf0c4b7831bb4d4d0c94926bd2c845bda727b6
79 changes: 43 additions & 36 deletions web/reNgine/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -1666,37 +1666,38 @@ def dir_file_fuzz(self, ctx={}, description=None):
logger.error(f'FUZZ not found for "{url}"')
continue
endpoint, created = save_endpoint(url, crawl=False, ctx=ctx)
# endpoint.is_default = False
endpoint.http_status = status
endpoint.content_length = length
endpoint.response_time = duration / 1000000000
endpoint.save()
if created:
urls.append(endpoint.http_url)
endpoint.status = status
endpoint.content_type = content_type
endpoint.content_length = length
dfile, created = DirectoryFile.objects.get_or_create(
name=name,
length=length,
words=words,
lines=lines,
content_type=content_type,
url=url)
dfile.http_status = status
dfile.save()
# if created:
# logger.warning(f'Found new directory or file {url}')
dirscan.directory_files.add(dfile)
dirscan.save()

if self.subscan:
dirscan.dir_subscan_ids.add(self.subscan)

subdomain_name = get_subdomain_from_url(endpoint.http_url)
subdomain = Subdomain.objects.get(name=subdomain_name, scan_history=self.scan)
subdomain.directories.add(dirscan)
subdomain.save()
if endpoint:
# endpoint.is_default = False
psyray marked this conversation as resolved.
Show resolved Hide resolved
endpoint.http_status = status
endpoint.content_length = length
endpoint.response_time = duration / 1000000000
endpoint.save()
if created:
urls.append(endpoint.http_url)
endpoint.status = status
endpoint.content_type = content_type
endpoint.content_length = length
dfile, created = DirectoryFile.objects.get_or_create(
name=name,
length=length,
words=words,
lines=lines,
content_type=content_type,
url=url)
dfile.http_status = status
dfile.save()
# if created:
# logger.warning(f'Found new directory or file {url}')
dirscan.directory_files.add(dfile)
dirscan.save()

if self.subscan:
dirscan.dir_subscan_ids.add(self.subscan)

subdomain_name = get_subdomain_from_url(endpoint.http_url)
subdomain = Subdomain.objects.get(name=subdomain_name, scan_history=self.scan)
subdomain.directories.add(dirscan)
subdomain.save()

# Crawl discovered URLs
if enable_http_crawl:
Expand Down Expand Up @@ -4495,11 +4496,17 @@ def save_endpoint(
if not validators.url(http_url):
return None, False
http_url = sanitize_url(http_url)
endpoint, created = EndPoint.objects.get_or_create(
scan_history=scan,
target_domain=domain,
http_url=http_url,
**endpoint_data)
if os.environ.get('DEBUG')
logger.warning(f'================== URL : '+http_url+' ==================')
try:
endpoint, created = EndPoint.objects.get_or_create(
scan_history=scan,
target_domain=domain,
http_url=http_url,
**endpoint_data)
except Exception as e:
logger.error(e)
return None, False

if created:
endpoint.is_default = is_default
Expand Down