Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 17 additions & 11 deletions bbot/core/helpers/web/engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,15 +81,20 @@ async def request(self, *args, **kwargs):
if client_kwargs:
client = self.AsyncClient(**client_kwargs)

async with self._acatch(url, raise_error):
if self.http_debug:
log.trace(f"Web request: {str(args)}, {str(kwargs)}")
response = await client.request(*args, **kwargs)
if self.http_debug:
log.trace(
f"Web response from {url}: {response} (Length: {len(response.content)}) headers: {response.headers}"
)
return response
try:
async with self._acatch(url, raise_error):
if self.http_debug:
log.trace(f"Web request: {str(args)}, {str(kwargs)}")
response = await client.request(*args, **kwargs)
if self.http_debug:
log.trace(
f"Web response from {url}: {response} (Length: {len(response.content)}) headers: {response.headers}"
)
return response
except httpx.HTTPError as e:
if raise_error:
_response = getattr(e, "response", None)
return {"_request_error": str(e), "_response": _response}

async def request_batch(self, urls, threads=10, **kwargs):
async for (args, _, _), response in self.task_pool(
Expand All @@ -105,8 +110,8 @@ async def request_custom_batch(self, urls_and_kwargs, threads=10, **kwargs):

async def download(self, url, **kwargs):
warn = kwargs.pop("warn", True)
raise_error = kwargs.pop("raise_error", False)
filename = kwargs.pop("filename")
raise_error = kwargs.get("raise_error", False)
try:
result = await self.stream_request(url, **kwargs)
if result is None:
Expand All @@ -123,7 +128,8 @@ async def download(self, url, **kwargs):
log_fn = log.warning
log_fn(f"Failed to download {url}: {e}")
if raise_error:
raise
_response = getattr(e, "response", None)
return {"_download_error": str(e), "_response": _response}

async def stream_request(self, url, **kwargs):
follow_redirects = kwargs.pop("follow_redirects", True)
Expand Down
23 changes: 21 additions & 2 deletions bbot/core/helpers/web/web.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,16 @@ async def request(self, *args, **kwargs):
Note:
If the web request fails, it will return None unless `raise_error` is `True`.
"""
return await self.run_and_return("request", *args, **kwargs)
raise_error = kwargs.get("raise_error", False)
result = await self.run_and_return("request", *args, **kwargs)
if isinstance(result, dict) and "_request_error" in result:
if raise_error:
error_msg = result["_request_error"]
response = result["_response"]
error = self.ERROR_CLASS(error_msg)
error.response = response
raise error
return result

async def request_batch(self, urls, *args, **kwargs):
"""
Expand Down Expand Up @@ -199,6 +208,7 @@ async def download(self, url, **kwargs):
>>> filepath = await self.helpers.download("https://www.evilcorp.com/passwords.docx", cache_hrs=24)
"""
success = False
raise_error = kwargs.get("raise_error", False)
filename = kwargs.pop("filename", self.parent_helper.cache_filename(url))
filename = truncate_filename(Path(filename).resolve())
kwargs["filename"] = filename
Expand All @@ -211,7 +221,16 @@ async def download(self, url, **kwargs):
log.debug(f"{url} is cached at {self.parent_helper.cache_filename(url)}")
success = True
else:
success = await self.run_and_return("download", url, **kwargs)
result = await self.run_and_return("download", url, **kwargs)
if isinstance(result, dict) and "_download_error" in result:
if raise_error:
error_msg = result["_download_error"]
response = result["_response"]
error = self.ERROR_CLASS(error_msg)
error.response = response
raise error
elif result:
success = True

if success:
return filename
Expand Down
36 changes: 34 additions & 2 deletions bbot/test/test_step_1/test_web.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import re
import httpx

from ..bbot_fixtures import *

Expand All @@ -13,6 +14,7 @@ def server_handler(request):

base_url = bbot_httpserver.url_for("/test/")
bbot_httpserver.expect_request(uri=re.compile(r"/test/\d+")).respond_with_handler(server_handler)
bbot_httpserver.expect_request(uri=re.compile(r"/nope")).respond_with_data("nope", status=500)

scan = bbot_scanner()

Expand Down Expand Up @@ -49,15 +51,45 @@ def server_handler(request):
assert response.text.startswith(f"{url}: ")
assert f"H{custom_tracker}: v{custom_tracker}" in response.text

# request with raise_error=True
with pytest.raises(WebError):
await scan.helpers.request("http://www.example.com/", raise_error=True)
try:
await scan.helpers.request("http://www.example.com/", raise_error=True)
except WebError as e:
assert hasattr(e, "response")
assert e.response is None
with pytest.raises(httpx.HTTPStatusError):
response = await scan.helpers.request(bbot_httpserver.url_for("/nope"), raise_error=True)
response.raise_for_status()
try:
response = await scan.helpers.request(bbot_httpserver.url_for("/nope"), raise_error=True)
response.raise_for_status()
except httpx.HTTPStatusError as e:
assert hasattr(e, "response")
assert e.response.status_code == 500

# download
url = f"{base_url}999"
filename = await scan.helpers.download(url)
file_content = open(filename).read()
assert file_content.startswith(f"{url}: ")

# raise_error=True
# download with raise_error=True
with pytest.raises(WebError):
await scan.helpers.request("http://www.example.com/", raise_error=True)
await scan.helpers.download("http://www.example.com/", raise_error=True)
try:
await scan.helpers.download("http://www.example.com/", raise_error=True)
except WebError as e:
assert hasattr(e, "response")
assert e.response is None
with pytest.raises(WebError):
await scan.helpers.download(bbot_httpserver.url_for("/nope"), raise_error=True)
try:
await scan.helpers.download(bbot_httpserver.url_for("/nope"), raise_error=True)
except WebError as e:
assert hasattr(e, "response")
assert e.response.status_code == 500

await scan._cleanup()

Expand Down