Skip to content

Problems with cooperation between http and pipeline/"for await" #38262

Closed
@misos1

Description

@misos1

Is your feature request related to a problem? Please describe.

Problem is that "for await" and pipeline destroy source stream on error ("for await" also on break) but destroying http request removes possibility to send meaningful response to client.

Example with "for await" (body length limiting):

let http = require("http");

let server = http.createServer(async function(req, res)
{
	try
	{
		let len = 0;
		for await (let chunk of req)
		{
			len += chunk.length;
			if(len > 2) throw "payload too large";
		}
		res.end("ok");
	}
	catch(err)
	{
		console.log("server log:", err);
		res.end(err);
	}
});

(async function()
{
	await new Promise(resolve => server.listen(8888, resolve));
	let req = http.request({ port: 8888, method: "post" });
	req.end("abc");
	try
	{
		let res = await new Promise((resolve, reject) => req.on("response", resolve).on("error", reject));
		let str = "";
		for await (let chunk of res) str += chunk;
		console.log("client received:", str);
	}
	catch(err)
	{
		console.log("client got unexpected error:", err);
	}
	server.close();
}());

Possible output:

server log: payload too large
client got unexpected error: Error: socket hang up
    at connResetException (node:internal/errors:642:14)
    at Socket.socketOnEnd (node:_http_client:486:23)
    at Socket.emit (node:events:381:22)
    at endReadableNT (node:internal/streams/readable:1307:12)
    at processTicksAndRejections (node:internal/process/task_queues:81:21) {
  code: 'ECONNRESET'
}

Possible solution is to not use pipeline but pipe instead. But this requires additional handling like to do req.unpipe and req.resume to avoid problems for example:

let http = require("http");
let { PassThrough } = require("stream");

let server = http.createServer(async function(req, res)
{
	try
	{
		let len = 0;
		for await (let chunk of req.pipe(new PassThrough()))
		{
			len += chunk.length;
			if(len > 2) throw "payload too large";
		}
		res.end("ok");
	}
	catch(err)
	{
		console.log("server log:", err);
		req.unpipe();
		req.resume();
		req.on("end", () => res.end(err));
	}
});

(async function()
{
	await new Promise(resolve => server.listen(8888, resolve));
	let req = http.request({ port: 8888, method: "post" });
	req.end("abc");
	try
	{
		let res = await new Promise((resolve, reject) => req.on("response", resolve).on("error", reject));
		let str = "";
		for await (let chunk of res) str += chunk;
		console.log("client received:", str);
	}
	catch(err)
	{
		console.log("client got unexpected error:", err);
	}
	server.close();
}());

This neglected solution with forgotten unpipe and resume cases EPIPE sometimes or stops after first iteration when using keepalive Agent:

let http = require("http");
let { PassThrough } = require("stream");

let server = http.createServer(async function(req, res)
{
	try
	{
		let len = 0;
		for await (let chunk of req.pipe(new PassThrough()))
		{
			len += chunk.length;
			if(len > 2) throw "payload too large";
		}
		res.end("ok");
	}
	catch(err)
	{
		res.end(err);
	}
});

(async function()
{
	await new Promise(resolve => server.listen(8888, resolve));
	let agent = new http.Agent({ keepAlive: true });
	let data = Buffer.alloc(1000000);
	for(let i = 0; ; i++)
	{
		console.log(i);
		let req = http.request({ port: 8888, method: "post"/*, agent*/ }).end(data);
		let res = await new Promise((resolve, reject) => req.on("response", resolve).on("error", reject));
		let str = "";
		for await (let chunk of res) str += chunk;
		console.log(str);
	}
}());
2572
payload too large
2573
node:internal/process/promises:245
          triggerUncaughtException(err, true /* fromPromise */);
          ^

Error: write EPIPE
    at WriteWrap.onWriteComplete [as oncomplete] (node:internal/stream_base_commons:94:16) {
  errno: -32,
  code: 'EPIPE',
  syscall: 'write'
}

Describe the solution you'd like

Pipe is too leak-prone for language which builds on GC. But a fixed and more comfortable pipeline and "for await" have unexpected consequences for http streams.

Would be great to have some options for the pipeline function to not destroy streams but instead dump them to the end in case of error (so one can send response when the pipeline ends). Or http request could have the option that destroy would just dump stream to end (and only then actually send response) instead of causing EPIPE or similar faults at client side.

Metadata

Metadata

Assignees

No one assigned

    Labels

    confirmed-bugIssues with confirmed bugs.httpIssues or PRs related to the http subsystem.promisesIssues and PRs related to ECMAScript promises.streamIssues and PRs related to the stream subsystem.

    Type

    No type

    Projects

    No projects

    Milestone

    No milestone

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions