|
1 | 1 | // skeleton function for customized downloading and extracting of package information
|
2 |
| -const request = require('request'); |
| 2 | +const http = require('http'); |
| 3 | +const https = require('https'); |
3 | 4 | const fs = require('fs');
|
4 | 5 | const UUID = require('uuid');
|
5 | 6 | const AWS = require('aws-sdk');
|
6 | 7 | const StreamZip = require('node-stream-zip');
|
| 8 | +// assumes the bucket already exists. make sure it is set up to allow writing objects to it from remote sources! |
7 | 9 | const BUCKET_NAME = process.env.BUCKET_NAME;
|
8 | 10 |
|
9 | 11 | if (process.env.AWS_REGION !== undefined && BUCKET_NAME !== undefined) {
|
@@ -32,52 +34,42 @@ exports.handleBundle = function (package_url, cb) {
|
32 | 34 | let bucketUrl = '';
|
33 | 35 | const TMP_FILE_NAME = `${UUID.v4()}.zip`;
|
34 | 36 |
|
35 |
| - // create a new bucket if it doesn't already exist |
36 |
| - new AWS.S3().createBucket({Bucket: BUCKET_NAME, ACL: 'public-read'}, err => { |
37 |
| - |
38 |
| - // OperationAborted errors are expected, as we are potentially |
39 |
| - // calling this API multiple times simultaneously |
40 |
| - if (err && err.code !== 'OperationAborted') { |
41 |
| - console.log(err); |
42 |
| - return cb(err); |
43 |
| - } |
44 |
| - // read the URL and save it to a buffer variable |
45 |
| - readUrlToBuffer(package_url) |
46 |
| - .then(zipBuffer => { // submit the file contents to S3 |
47 |
| - compressedSize = zipBuffer.length; |
48 |
| - const randomString = UUID.v4(); |
49 |
| - const fileName = `${randomString}.zip`; |
50 |
| - bucketUrl = `https://${BUCKET_NAME}.s3.amazonaws.com/${fileName}`; |
51 |
| - // make the bundle publicly accessible |
52 |
| - const objectParams = {Bucket: BUCKET_NAME, ACL: 'public-read', Key: fileName, Body: zipBuffer}; |
53 |
| - // Create object upload promise |
54 |
| - return new AWS.S3().putObject(objectParams).promise(); |
55 |
| - }) |
56 |
| - .then(() => { // unzip the contents of the bundle to get its uncompressed data information |
57 |
| - return streamUrlToTmpFile(bucketUrl, TMP_FILE_NAME); |
58 |
| - }) |
59 |
| - .then(() => { |
60 |
| - return unzipAndGetUncompressedSize(TMP_FILE_NAME); |
61 |
| - }) |
62 |
| - .then(uncompressedSize => { |
63 |
| - // delete the tmp zip file |
64 |
| - fs.unlink(TMP_FILE_NAME, () => { |
65 |
| - // all the information has been collected |
66 |
| - cb(null, { |
67 |
| - url: bucketUrl, |
68 |
| - size_compressed_bytes: compressedSize, |
69 |
| - size_decompressed_bytes: uncompressedSize |
70 |
| - }); |
71 |
| - }); |
72 |
| - }) |
73 |
| - .catch(err => { |
74 |
| - console.log(err); |
75 |
| - // delete the tmp zip file |
76 |
| - fs.unlink(TMP_FILE_NAME, () => { |
77 |
| - cb(err); |
| 37 | + // read the URL and save it to a buffer variable |
| 38 | + readUrlToBuffer(package_url) |
| 39 | + .then(zipBuffer => { // submit the file contents to S3 |
| 40 | + compressedSize = zipBuffer.length; |
| 41 | + const randomString = UUID.v4(); |
| 42 | + const fileName = `${randomString}.zip`; |
| 43 | + bucketUrl = `https://${BUCKET_NAME}.s3.amazonaws.com/${fileName}`; |
| 44 | + // make the bundle publicly accessible |
| 45 | + const objectParams = {Bucket: BUCKET_NAME, ACL: 'public-read', Key: fileName, Body: zipBuffer}; |
| 46 | + // Create object upload promise |
| 47 | + return new AWS.S3().putObject(objectParams).promise(); |
| 48 | + }) |
| 49 | + .then(() => { // unzip the contents of the bundle to get its uncompressed data information |
| 50 | + return streamUrlToTmpFile(bucketUrl, TMP_FILE_NAME); |
| 51 | + }) |
| 52 | + .then(() => { |
| 53 | + return unzipAndGetUncompressedSize(TMP_FILE_NAME); |
| 54 | + }) |
| 55 | + .then(uncompressedSize => { |
| 56 | + // delete the tmp zip file |
| 57 | + fs.unlink(TMP_FILE_NAME, () => { |
| 58 | + // all the information has been collected |
| 59 | + cb(null, { |
| 60 | + url: bucketUrl, |
| 61 | + size_compressed_bytes: compressedSize, |
| 62 | + size_decompressed_bytes: uncompressedSize |
78 | 63 | });
|
79 | 64 | });
|
80 |
| - }); |
| 65 | + }) |
| 66 | + .catch(err => { |
| 67 | + console.log(err); |
| 68 | + // delete the tmp zip file |
| 69 | + fs.unlink(TMP_FILE_NAME, () => { |
| 70 | + cb(err); |
| 71 | + }); |
| 72 | + }); |
81 | 73 | }
|
82 | 74 |
|
83 | 75 | function unzipAndGetUncompressedSize (fileName) {
|
@@ -109,24 +101,37 @@ function unzipAndGetUncompressedSize (fileName) {
|
109 | 101 | }
|
110 | 102 |
|
111 | 103 | function streamUrlToTmpFile (url, fileName) {
|
| 104 | + const urlObj = new URL(url); |
112 | 105 | return new Promise((resolve, reject) => {
|
113 |
| - request(url) |
114 |
| - .pipe(fs.createWriteStream(fileName)) |
115 |
| - .on('close', resolve); |
| 106 | + function resCallback (res) { |
| 107 | + res.pipe(fs.createWriteStream(fileName)).on('close', resolve); |
| 108 | + } |
| 109 | + if (urlObj.protocol === "https:") { |
| 110 | + https.get(url, resCallback).end(); |
| 111 | + } else { |
| 112 | + http.get(url, resCallback).end(); |
| 113 | + } |
116 | 114 | });
|
117 | 115 | }
|
118 | 116 |
|
119 | 117 | function readUrlToBuffer (url) {
|
| 118 | + const urlObj = new URL(url); |
120 | 119 | return new Promise((resolve, reject) => {
|
121 | 120 | let zipBuffer = [];
|
122 |
| - |
123 |
| - request(url) |
124 |
| - .on('data', data => { |
| 121 | + function resCallback (res) { |
| 122 | + res.on('data', data => { |
125 | 123 | zipBuffer.push(data);
|
126 | 124 | })
|
127 | 125 | .on('close', function () { // file fully downloaded
|
128 | 126 | // put the zip contents to a buffer
|
129 | 127 | resolve(Buffer.concat(zipBuffer));
|
130 | 128 | });
|
| 129 | + } |
| 130 | + |
| 131 | + if (urlObj.protocol === "https:") { |
| 132 | + https.get(url, resCallback).end(); |
| 133 | + } else { |
| 134 | + http.get(url, resCallback).end(); |
| 135 | + } |
131 | 136 | })
|
132 | 137 | }
|
0 commit comments