stream: pipeline should error if any stream is destroyed #36674
Labels
Comments
|
I would like to work on this problem |
|
I also bumped into this. Http client can easily cause this to http server which is using pipeline (this could cause really bad things): let { PassThrough, pipeline } = require("stream");
let http = require("http");
let server = http.createServer(async function(req, res)
{
await new Promise(r => setTimeout(r, 1000));
console.log("request destroyed", req.destroyed);
let pass = new PassThrough();
pipeline(req, pass, e => console.log("pipeline finished", e));
for await (let chunk of pass) console.log("received", chunk.length);
console.log("body processed");
res.end();
});
(async function()
{
await new Promise(resolve => server.listen(resolve));
let req = http.request({ port: server.address().port, method: "post" });
req.on("error", () => null);
req.write(Buffer.alloc(10000));
setTimeout(() => req.destroy(), 500);
}()); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
pipelineshould immediately fail withERR_STREAM_DESTROYEDwhen any of the streams have already been destroyed.Readablemight need a little extra consideration since it's possible to read the data after being destroyed. Should maybe check_readableState.erroredand/or_readableState.ended.Refs: #29227 (comment)
The text was updated successfully, but these errors were encountered: