-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathutilities.js
130 lines (116 loc) · 4.25 KB
/
utilities.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
const fs = require('fs');
const path = require('path');
const util = require('util');
const fetch = require('node-fetch');
const { spawn } = require('child_process');
const streamPipeline = util.promisify(require('stream').pipeline);
const FileType = require('file-type');
const download = (job) => {
let url = job.data.url;
let dest = path.join(__dirname, `public/${job.id}.pdf`);
return new Promise((resolve, reject) => {
fetch(url).then(async (response) => {
if (response.ok) {
let file = fs.createWriteStream(dest);
let totalSize = response.headers.get('content-length');
let timer = setInterval(() => {
job.reportProgress({ done: parseInt(file.bytesWritten), total: parseInt(totalSize) });
}, 700);
console.log('\x1b[46m\x1b[30m%s\x1b[0m', `Download started: ${file.path}`);
const fileTypeStream = await FileType.stream(response.body);
console.log(fileTypeStream.fileType);
await streamPipeline(fileTypeStream, file);
clearInterval(timer);
resolve(job.data);
} else {
reject(response.statusText);
}
}).catch((err) => {
reject(err.message);
})
})
};
const compress = async (job) => {
return new Promise((resolve, reject) => {
const filename = `${job.id}.pdf`;
const outName = `${Date.now().toString(18)}.pdf`;
let dpi = job.data.dpi || 120;
let ghostScript = process.env.GSX_OPTIMIZE_COMMAND || 'gs';
let gsargs = [
'-sDEVICE=pdfwrite',
'-dCompatibilityLevel=1.4',
'-dPDFSETTINGS=/ebook',
'-dPreserveEPSInfo=false',
'-dConvertCMYKImagesToRGB=true',
'-dColorImageDownsampleThreshold=1',
`-dColorImageResolution=${dpi}`,
`-dMonoImageResolution=${dpi}`,
`-dGrayImageResolution=${dpi}`,
'-dNOPAUSE',
'-dBATCH',
'-dPrinted=false',
`-sOutputFile=./public/${outName}`,
`./public/${filename}`
]
let optimizer = spawn(ghostScript, gsargs);
console.log(`Compressing ${filename} --> ${outName} : ${dpi}dpi`);
let totalPages = 1;
optimizer.stdout.on('data', function (data) {
let rx1 = /Processing pages 1 through (\d+)/m;
let rx2 = /Page (\d+)/;
let g1 = rx1.exec(data.toString());
let g2 = rx2.exec(data.toString());
if (g1)
totalPages = g1[1];
else if (g2) {
job.reportProgress({ done: g2[1], total: totalPages });
}
});
optimizer.stderr.on('data', function (data) {
let eobj = "";
eobj += data.toString();
if (eobj.includes("failed: true")) {
console.log('stderr: ' + eobj);
reject("Failed to compress");
}
});
optimizer.on('exit', function (code) {
let exitCode = code
console.log('ghostscript exited with code ' + exitCode);
if (exitCode)
reject("Conversion error, code " + exitCode);
else
resolve(outName);
});
optimizer.on('error', (err) => {
console.log(err);
reject(err);
})
});
}
const deleteFile = async (filename) => {
return new Promise((resolve, reject) => {
fs.promises.unlink(filename).then(() => {
console.log('\x1b[41m\x1b[30m%s\x1b[0m', `${filename} was deleted`);
resolve();
}).catch((err) => {
console.log(err);
reject(err);
});
})
}
const isValidURL = (str) => {
let pattern = new RegExp('^(https?:\\/\\/)?' + // protocol
'((([a-z\\d]([a-z\\d-]*[a-z\\d])*)\\.)+[a-z]{2,}|' + // domain name
'((\\d{1,3}\\.){3}\\d{1,3}))' + // OR ip (v4) address
'(\\:\\d+)?(\\/[-a-z\\d%_.~+]*)*' + // port and path
'(\\?[;&a-z\\d%_.~+=-]*)?' + // query string
'(\\#[-a-z\\d_]*)?$', 'i'); // fragment locator
return !!pattern.test(str);
}
module.exports = {
download,
compress,
deleteFile,
isValidURL
}