@@ -11,7 +11,11 @@ Gzip, Deflate/Inflate, and Brotli.
1111
1212To access it:
1313
14- ``` js
14+ ``` mjs
15+ import os from ' node:zlib' ;
16+ ```
17+
18+ ``` cjs
1519const zlib = require (' node:zlib' );
1620```
1721
@@ -21,13 +25,35 @@ Compressing or decompressing a stream (such as a file) can be accomplished by
2125piping the source stream through a ` zlib ` ` Transform ` stream into a destination
2226stream:
2327
24- ``` js
25- const { createGzip } = require (' node:zlib' );
26- const { pipeline } = require (' node:stream' );
28+ ``` mjs
29+ import {
30+ createReadStream ,
31+ createWriteStream ,
32+ } from ' node:fs' ;
33+ import process from ' node:process' ;
34+ import { createGzip } from ' node:zlib' ;
35+ import { pipeline } from ' node:stream' ;
36+
37+ const gzip = createGzip ();
38+ const source = createReadStream (' input.txt' );
39+ const destination = createWriteStream (' input.txt.gz' );
40+
41+ pipeline (source, gzip, destination, (err ) => {
42+ if (err) {
43+ console .error (' An error occurred:' , err);
44+ process .exitCode = 1 ;
45+ }
46+ });
47+ ```
48+
49+ ``` cjs
2750const {
2851 createReadStream ,
2952 createWriteStream ,
3053} = require (' node:fs' );
54+ const process = require (' node:process' );
55+ const { createGzip } = require (' node:zlib' );
56+ const { pipeline } = require (' node:stream' );
3157
3258const gzip = createGzip ();
3359const source = createReadStream (' input.txt' );
@@ -39,17 +65,43 @@ pipeline(source, gzip, destination, (err) => {
3965 process .exitCode = 1 ;
4066 }
4167});
68+ ```
4269
43- // Or, Promisified
70+ Or, using the promise ` pipeline ` API:
4471
45- const { promisify } = require (' node:util' );
46- const pipe = promisify (pipeline);
72+ ``` mjs
73+ import {
74+ createReadStream ,
75+ createWriteStream ,
76+ } from ' node:fs' ;
77+ import process from ' node:process' ;
78+ import { createGzip } from ' node:zlib' ;
79+ import { pipeline } from ' node:stream/promises' ;
80+
81+ async function do_gzip (input , output ) {
82+ const gzip = createGzip ();
83+ const source = createReadStream (input);
84+ const destination = createWriteStream (output);
85+ await pipeline (source, gzip, destination);
86+ }
87+
88+ await do_gzip (' input.txt' , ' input.txt.gz' );
89+ ```
90+
91+ ``` cjs
92+ const {
93+ createReadStream ,
94+ createWriteStream ,
95+ } = require (' node:fs' );
96+ const process = require (' node:process' );
97+ const { createGzip } = require (' node:zlib' );
98+ const { pipeline } = require (' node:stream/promises' );
4799
48100async function do_gzip (input , output ) {
49101 const gzip = createGzip ();
50102 const source = createReadStream (input);
51103 const destination = createWriteStream (output);
52- await pipe (source, gzip, destination);
104+ await pipeline (source, gzip, destination);
53105}
54106
55107do_gzip (' input.txt' , ' input.txt.gz' )
@@ -61,7 +113,39 @@ do_gzip('input.txt', 'input.txt.gz')
61113
62114It is also possible to compress or decompress data in a single step:
63115
64- ``` js
116+ ``` mjs
117+ import process from ' node:process' ;
118+ import { Buffer } from ' node:buffer' ;
119+ import { deflate , unzip } from ' node:zlib' ;
120+
121+ const input = ' .................................' ;
122+ deflate (input, (err , buffer ) => {
123+ if (err) {
124+ console .error (' An error occurred:' , err);
125+ process .exitCode = 1 ;
126+ }
127+ console .log (buffer .toString (' base64' ));
128+ });
129+
130+ const buffer = Buffer .from (' eJzT0yMAAGTvBe8=' , ' base64' );
131+ unzip (buffer, (err , buffer ) => {
132+ if (err) {
133+ console .error (' An error occurred:' , err);
134+ process .exitCode = 1 ;
135+ }
136+ console .log (buffer .toString ());
137+ });
138+
139+ // Or, Promisified
140+
141+ import { promisify } from ' node:util' ;
142+ const do_unzip = promisify (unzip);
143+
144+ const unzippedBuffer = await do_unzip (buffer);
145+ console .log (unzippedBuffer .toString ());
146+ ```
147+
148+ ``` cjs
65149const { deflate , unzip } = require (' node:zlib' );
66150
67151const input = ' .................................' ;
@@ -104,7 +188,19 @@ limitations in some applications.
104188Creating and using a large number of zlib objects simultaneously can cause
105189significant memory fragmentation.
106190
107- ``` js
191+ ``` mjs
192+ import zlib from ' node:zlib' ;
193+ import { Buffer } from ' node:buffer' ;
194+
195+ const payload = Buffer .from (' This is some data' );
196+
197+ // WARNING: DO NOT DO THIS!
198+ for (let i = 0 ; i < 30000 ; ++ i) {
199+ zlib .deflate (payload, (err , buffer ) => {});
200+ }
201+ ```
202+
203+ ``` cjs
108204const zlib = require (' node:zlib' );
109205
110206const payload = Buffer .from (' This is some data' );
@@ -138,7 +234,47 @@ Using `zlib` encoding can be expensive, and the results ought to be cached.
138234See [ Memory usage tuning] [ ] for more information on the speed/memory/compression
139235tradeoffs involved in ` zlib ` usage.
140236
141- ``` js
237+ ``` mjs
238+ // Client request example
239+ import fs from ' node:fs' ;
240+ import zlib from ' node:zlib' ;
241+ import http from ' node:http' ;
242+ import process from ' node:process' ;
243+ import { pipeline } from ' node:stream' ;
244+
245+ const request = http .get ({ host: ' example.com' ,
246+ path: ' /' ,
247+ port: 80 ,
248+ headers: { ' Accept-Encoding' : ' br,gzip,deflate' } });
249+ request .on (' response' , (response ) => {
250+ const output = fs .createWriteStream (' example.com_index.html' );
251+
252+ const onError = (err ) => {
253+ if (err) {
254+ console .error (' An error occurred:' , err);
255+ process .exitCode = 1 ;
256+ }
257+ };
258+
259+ switch (response .headers [' content-encoding' ]) {
260+ case ' br' :
261+ pipeline (response, zlib .createBrotliDecompress (), output, onError);
262+ break ;
263+ // Or, just use zlib.createUnzip() to handle both of the following cases:
264+ case ' gzip' :
265+ pipeline (response, zlib .createGunzip (), output, onError);
266+ break ;
267+ case ' deflate' :
268+ pipeline (response, zlib .createInflate (), output, onError);
269+ break ;
270+ default :
271+ pipeline (response, output, onError);
272+ break ;
273+ }
274+ });
275+ ```
276+
277+ ``` cjs
142278// Client request example
143279const zlib = require (' node:zlib' );
144280const http = require (' node:http' );
@@ -177,7 +313,52 @@ request.on('response', (response) => {
177313});
178314```
179315
180- ``` js
316+ ``` mjs
317+ // server example
318+ // Running a gzip operation on every request is quite expensive.
319+ // It would be much more efficient to cache the compressed buffer.
320+ import zlib from ' node:zlib' ;
321+ import http from ' node:http' ;
322+ import fs from ' node:fs' ;
323+ import { pipeline } from ' node:stream' ;
324+
325+ http .createServer ((request , response ) => {
326+ const raw = fs .createReadStream (' index.html' );
327+ // Store both a compressed and an uncompressed version of the resource.
328+ response .setHeader (' Vary' , ' Accept-Encoding' );
329+ const acceptEncoding = request .headers [' accept-encoding' ] || ' ' ;
330+
331+ const onError = (err ) => {
332+ if (err) {
333+ // If an error occurs, there's not much we can do because
334+ // the server has already sent the 200 response code and
335+ // some amount of data has already been sent to the client.
336+ // The best we can do is terminate the response immediately
337+ // and log the error.
338+ response .end ();
339+ console .error (' An error occurred:' , err);
340+ }
341+ };
342+
343+ // Note: This is not a conformant accept-encoding parser.
344+ // See https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.3
345+ if (/ \b deflate\b / .test (acceptEncoding)) {
346+ response .writeHead (200 , { ' Content-Encoding' : ' deflate' });
347+ pipeline (raw, zlib .createDeflate (), response, onError);
348+ } else if (/ \b gzip\b / .test (acceptEncoding)) {
349+ response .writeHead (200 , { ' Content-Encoding' : ' gzip' });
350+ pipeline (raw, zlib .createGzip (), response, onError);
351+ } else if (/ \b br\b / .test (acceptEncoding)) {
352+ response .writeHead (200 , { ' Content-Encoding' : ' br' });
353+ pipeline (raw, zlib .createBrotliCompress (), response, onError);
354+ } else {
355+ response .writeHead (200 , {});
356+ pipeline (raw, response, onError);
357+ }
358+ }).listen (1337 );
359+ ```
360+
361+ ``` cjs
181362// server example
182363// Running a gzip operation on every request is quite expensive.
183364// It would be much more efficient to cache the compressed buffer.
@@ -315,7 +496,43 @@ quality, but can be useful when data needs to be available as soon as possible.
315496In the following example, ` flush() ` is used to write a compressed partial
316497HTTP response to the client:
317498
318- ``` js
499+ ``` mjs
500+ import zlib from ' node:zlib' ;
501+ import http from ' node:http' ;
502+ import { pipeline } from ' node:stream' ;
503+
504+ http .createServer ((request , response ) => {
505+ // For the sake of simplicity, the Accept-Encoding checks are omitted.
506+ response .writeHead (200 , { ' content-encoding' : ' gzip' });
507+ const output = zlib .createGzip ();
508+ let i;
509+
510+ pipeline (output, response, (err ) => {
511+ if (err) {
512+ // If an error occurs, there's not much we can do because
513+ // the server has already sent the 200 response code and
514+ // some amount of data has already been sent to the client.
515+ // The best we can do is terminate the response immediately
516+ // and log the error.
517+ clearInterval (i);
518+ response .end ();
519+ console .error (' An error occurred:' , err);
520+ }
521+ });
522+
523+ i = setInterval (() => {
524+ output .write (` The current time is ${ Date ()} \n ` , () => {
525+ // The data has been passed to zlib, but the compression algorithm may
526+ // have decided to buffer the data for more efficient compression.
527+ // Calling .flush() will make the data available as soon as the client
528+ // is ready to receive it.
529+ output .flush ();
530+ });
531+ }, 1000 );
532+ }).listen (1337 );
533+ ```
534+
535+ ``` cjs
319536const zlib = require (' node:zlib' );
320537const http = require (' node:http' );
321538const { pipeline } = require (' node:stream' );
0 commit comments