Skip to content

Commit

Permalink
separate fetch api for autofetch bbehavior + additional improvements …
Browse files Browse the repository at this point in the history
…on partial responses: (#736)

Chromium now interrupts fetch() if abort() is called or page is
navigated, so autofetch behavior using native fetch() is less than
ideal. This PR adds support for __bx_fetch() command for autofetch
behavior (supported in browsertrix-behaviors 0.6.6) to fetch separately
from browser's reguar fetch()
- __bx_fetch() starts a fetch, but does not return content to browser,
doesn't need abort(), unaffected by page navigation, but will still try
to use browser network stack when possible, making it more efficient for
background fetching.
- if network stack fetch fails, fallback to regular node fetch() in the
crawler.
Additional improvements for interrupted fetch:
- don't store truncated media responses, even for 200
- avoid doing duplicate async fetching if response already handled (eg.
fetch handled in multiple contexts)
- fixes #735, where fetch was interrupted, resulted in an empty response
  • Loading branch information
ikreymer authored Dec 31, 2024
1 parent fb8ed18 commit d923e11
Show file tree
Hide file tree
Showing 8 changed files with 90 additions and 47 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ jobs:
run: yarn add -D http-server

- name: install py-wacz as root for tests
run: sudo pip install wacz
run: sudo pip install wacz --ignore-installed

- name: run all tests as root
run: sudo DOCKER_HOST_NAME=172.17.0.1 CI=true yarn test -validate
Expand Down
6 changes: 3 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
"dependencies": {
"@novnc/novnc": "1.4.0",
"@webrecorder/wabac": "^2.20.7",
"browsertrix-behaviors": "^0.6.5",
"browsertrix-behaviors": "^0.6.6",
"client-zip": "^2.4.5",
"css-selector-parser": "^3.0.5",
"fetch-socks": "^1.3.0",
Expand All @@ -37,7 +37,7 @@
"tsc": "^2.0.4",
"undici": "^6.18.2",
"uuid": "8.3.2",
"warcio": "^2.4.2",
"warcio": "^2.4.3",
"ws": "^7.4.4",
"yargs": "^17.7.2"
},
Expand Down Expand Up @@ -67,7 +67,7 @@
},
"resolutions": {
"wrap-ansi": "7.0.0",
"warcio": "^2.4.2",
"warcio": "^2.4.3",
"@novnc/novnc": "1.4.0"
}
}
6 changes: 6 additions & 0 deletions src/crawler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ import { Browser } from "./util/browser.js";
import {
ADD_LINK_FUNC,
BEHAVIOR_LOG_FUNC,
FETCH_FUNC,
DISPLAY,
ExtractSelector,
PAGE_OP_TIMEOUT_SECS,
Expand Down Expand Up @@ -693,6 +694,7 @@ export class Crawler {
cdp,
workerid,
callbacks,
recorder,
frameIdToExecId,
}: WorkerOpts) {
await this.browser.setupPage({ page, cdp });
Expand Down Expand Up @@ -767,6 +769,10 @@ self.__bx_behaviors.selectMainBehavior();
this.behaviorsChecked = true;
}

await page.exposeFunction(FETCH_FUNC, (url: string) => {
return recorder ? recorder.addExternalFetch(url, cdp) : true;
});

await this.browser.addInitScript(page, initScript);
}
}
Expand Down
2 changes: 2 additions & 0 deletions src/util/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@ export const EXTRACT_TEXT_TYPES = ["to-pages", "to-warc", "final-to-warc"];

export const BEHAVIOR_LOG_FUNC = "__bx_log";
export const ADD_LINK_FUNC = "__bx_addLink";
export const FETCH_FUNC = "__bx_fetch";

export const MAX_DEPTH = 1000000;

export const FETCH_HEADERS_TIMEOUT_SECS = 30;
Expand Down
96 changes: 61 additions & 35 deletions src/util/recorder.ts
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,7 @@ export class Recorder {
pendingRequests!: Map<string, RequestResponseInfo>;
skipIds!: Set<string>;
pageInfo!: PageInfoRecord;
mainFrameId: string | null = null;
skipRangeUrls!: Map<string, number>;

swTargetId?: string | null;
Expand Down Expand Up @@ -449,10 +450,14 @@ export class Recorder {
{ url, ...this.logDetails },
"recorder",
);
reqresp.deleteRange();
reqresp.requestId = "0";

const fetcher = new AsyncFetcher({
reqresp,
expectedSize: reqresp.expectedSize ? reqresp.expectedSize : -1,
recorder: this,
networkId: requestId,
networkId: "0",
});
void this.fetcherQ.add(() => fetcher.load());
return;
Expand Down Expand Up @@ -574,6 +579,12 @@ export class Recorder {

const networkId = params.networkId || requestId;

const reqresp = this.pendingReqResp(networkId);

if (!reqresp) {
return false;
}

if (responseErrorReason) {
logger.warn(
"Skipping failed response",
Expand Down Expand Up @@ -601,24 +612,23 @@ export class Recorder {
);
this.removeReqResp(networkId);

const reqresp = new RequestResponseInfo("0");
reqresp.fillRequest(params.request, params.resourceType);
if (reqresp.requestHeaders) {
delete reqresp.requestHeaders["range"];
delete reqresp.requestHeaders["Range"];
if (!reqresp.fetchContinued) {
const reqrespNew = new RequestResponseInfo("0");
reqrespNew.fillRequest(params.request, params.resourceType);
reqrespNew.deleteRange();
reqrespNew.frameId = params.frameId;

this.addAsyncFetch(
{
reqresp: reqrespNew,
expectedSize: parseInt(range.split("/")[1]),
recorder: this,
networkId: "0",
cdp,
},
contentLen,
);
}
reqresp.frameId = params.frameId;

this.addAsyncFetch(
{
reqresp,
expectedSize: parseInt(range.split("/")[1]),
recorder: this,
networkId: "0",
cdp,
},
contentLen,
);

return false;
} else {
Expand Down Expand Up @@ -651,27 +661,23 @@ export class Recorder {
"recorder",
);

const reqresp = new RequestResponseInfo("0");
reqresp.fillRequest(params.request, params.resourceType);
reqresp.url = filteredUrl;
reqresp.frameId = params.frameId;
if (!reqresp.fetchContinued) {
const reqrespNew = new RequestResponseInfo("0");
reqrespNew.fillRequest(params.request, params.resourceType);
reqrespNew.url = filteredUrl;
reqrespNew.frameId = params.frameId;

this.addAsyncFetch({
reqresp,
recorder: this,
networkId: "0",
cdp,
});
this.addAsyncFetch({
reqresp: reqrespNew,
recorder: this,
networkId: "0",
cdp,
});
}
return false;
}
}

const reqresp = this.pendingReqResp(networkId);

if (!reqresp) {
return false;
}

// indicate that this is intercepted in the page context
if (!isBrowserContext) {
reqresp.inPageContext = true;
Expand All @@ -696,6 +702,7 @@ export class Recorder {
});
this.pageInfo.ts = reqresp.ts;
this.pageInfo.tsStatus = responseStatusCode!;
this.mainFrameId = params.frameId;
}

reqresp.fillFetchRequestPaused(params);
Expand Down Expand Up @@ -842,6 +849,22 @@ export class Recorder {
void this.fetcherQ.add(() => fetcher.load());
}

addExternalFetch(url: string, cdp: CDPSession) {
const reqresp = new RequestResponseInfo("0");
reqresp.url = url;
reqresp.method = "GET";
reqresp.frameId = this.mainFrameId || undefined;
const fetcher = new NetworkLoadStreamAsyncFetcher({
reqresp,
recorder: this,
cdp,
networkId: "0",
});
void this.fetcherQ.add(() => fetcher.load());
// return true if successful
return true;
}

startPage({ pageid, url }: { pageid: string; url: string }) {
this.pageid = pageid;
this.pageUrl = url;
Expand All @@ -864,6 +887,7 @@ export class Recorder {
counts: { jsErrors: 0 },
tsStatus: 999,
};
this.mainFrameId = null;
}

addPageRecord(reqresp: RequestResponseInfo) {
Expand Down Expand Up @@ -1485,14 +1509,16 @@ class AsyncFetcher {
logger.warn(
"Async fetch: possible response size mismatch",
{
type: this.constructor.name,
size: reqresp.readSize,
expected: reqresp.expectedSize,
url,
...logDetails,
},
"recorder",
);
if (status === 206) {
if (status === 206 || status === 200) {
void serializer.externalBuffer?.purge();
await crawlState.removeDupe(ASYNC_FETCH_DUPE_KEY, url, status);
return "notfetched";
}
Expand Down
7 changes: 7 additions & 0 deletions src/util/reqresp.ts
Original file line number Diff line number Diff line change
Expand Up @@ -335,6 +335,13 @@ export class RequestResponseInfo {
return this.fromCache && !this.payload;
}

deleteRange() {
if (this.requestHeaders) {
delete this.requestHeaders["range"];
delete this.requestHeaders["Range"];
}
}

shouldSkipSave() {
// skip cached, OPTIONS/HEAD responses, and 304 responses
if (
Expand Down
2 changes: 2 additions & 0 deletions src/util/worker.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ export type WorkerOpts = {
directFetchCapture:
| ((request: DirectFetchRequest) => Promise<DirectFetchResponse>)
| null;
recorder: Recorder | null;
markPageUsed: () => void;
frameIdToExecId: Map<string, number>;
isAuthSet?: boolean;
Expand Down Expand Up @@ -183,6 +184,7 @@ export class PageWorker {
cdp,
workerid,
callbacks: this.callbacks,
recorder: this.recorder,
directFetchCapture,
frameIdToExecId: new Map<string, number>(),
markPageUsed: () => {
Expand Down
16 changes: 8 additions & 8 deletions yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -1435,10 +1435,10 @@ browserslist@^4.24.0:
node-releases "^2.0.18"
update-browserslist-db "^1.1.1"

browsertrix-behaviors@^0.6.5:
version "0.6.5"
resolved "https://registry.yarnpkg.com/browsertrix-behaviors/-/browsertrix-behaviors-0.6.5.tgz#a8e3da231caff8e54e34cac6ed3ff431c68b664d"
integrity sha512-URUMUPdU0O2J8rmgzrzY4BzT8vv/iYNQUf/B1Eif3ntMsMC51R4/MGgOC8d7pUDCfy5tnOkV1FGlhB9A5LLQrw==
browsertrix-behaviors@^0.6.6:
version "0.6.6"
resolved "https://registry.yarnpkg.com/browsertrix-behaviors/-/browsertrix-behaviors-0.6.6.tgz#10bcccfb091c051f5c886d5f69487e6d184078de"
integrity sha512-UPNcU9dV0nAvUwJHKKYCkuqdYdlMjK7AWYDyr4xBpSq55xmEh2wQlwQyDyJuUUUrhJNII4NqXK24hVXPdvf5VA==
dependencies:
query-selector-shadow-dom "^1.0.1"

Expand Down Expand Up @@ -5006,10 +5006,10 @@ walker@^1.0.8:
dependencies:
makeerror "1.0.12"

warcio@^2.4.0, warcio@^2.4.2:
version "2.4.2"
resolved "https://registry.yarnpkg.com/warcio/-/warcio-2.4.2.tgz#782d8dcb0769f271b0ae96521fb4969e2570e9b3"
integrity sha512-QYbZ3EGYtnAIrzL7Bajo7ak87pipilpkIfaFIzFQWUX4wuXNuKqnfQy/EAoi2tEIl3VJgsWcL+wjjk4+15MKbQ==
warcio@^2.4.0, warcio@^2.4.2, warcio@^2.4.3:
version "2.4.3"
resolved "https://registry.yarnpkg.com/warcio/-/warcio-2.4.3.tgz#37ff95c2358d0d5ddb16e924fe200c4774b3903d"
integrity sha512-c397HNfLE7yJsyVF3XKXB+Yh3q3WKljhdYRPkKF9eyZMtB+HIxj1aBqgq0nTYz492KMKtzygBo0Gx+Gi0fJ9dg==
dependencies:
"@types/pako" "^1.0.7"
"@types/stream-buffers" "^3.0.7"
Expand Down

0 comments on commit d923e11

Please sign in to comment.