-
Notifications
You must be signed in to change notification settings - Fork 0
/
generate-async-iteration.js
61 lines (47 loc) · 1.48 KB
/
generate-async-iteration.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
const fs = require('fs');
const path = require('path');
const { once } = require('events');
const util = require('util');
const stream = require('stream');
const finished = util.promisify(stream.finished);
// This stream will take only one part of file.
const readStream = fs.createReadStream(path.join(__dirname, '../assets/test.csv'), {
highWaterMark: 64 * 1024, // chunk size
start: 0,
end: 64 * 1024 - 1,
});
const writeStream = fs.createWriteStream(path.join(__dirname, '../assets/big-test.csv'));
const repeatedWriteChunk = async (data, repeatCount) => {
for (let i = 0; i < repeatCount; i = +1) {
if (!writeStream.write(data)) {
// https://nodejs.org/api/stream.html#stream_event_drain
await once(writeStream, 'drain');
}
}
writeStream.end();
await finished(writeStream);
};
const writeChunk = async (chunk) => {
const lines = chunk
.toString('utf8')
.trim()
.split('\r\n');
const fieldNamesLine = lines.shift();
writeStream.write(`${fieldNamesLine}\r\n`);
// Last line can be part of the next chunk first line, so remove it.
lines.pop();
const partialData = lines.join('\r\n');
await repeatedWriteChunk(partialData, 170000);
};
const runGenerating = async (readable) => {
try {
console.log('Big csv file is generating...');
for await (const chunk of readable) {
await writeChunk(chunk);
}
console.log('Big csv file has generated.');
} catch (err) {
console.log(err);
}
};
runGenerating(readStream);