Skip to content

Commit de2ba17

Browse files
author
hoailinh1210
committed
refactor db
1 parent 21b51a7 commit de2ba17

File tree

7 files changed

+82
-56
lines changed

7 files changed

+82
-56
lines changed

.gitignore

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
11
node_modules
22
test.js
3-
config.json
3+
config.json
4+
.vscode

.vscode/settings.json

Lines changed: 0 additions & 3 deletions
This file was deleted.

bucketS3Util.js

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
const aws = require('aws-sdk');
2+
const fs = require('fs');
3+
const path = require('path');
4+
5+
module.exports = {
6+
list() {
7+
const s3 = new aws.S3();
8+
return s3.listBuckets().promise();
9+
},
10+
create(bucketName) {
11+
const s3 = new aws.S3();
12+
const bucketParams = {
13+
Bucket: bucketName,
14+
};
15+
return s3.createBucket(bucketParams).promise();
16+
},
17+
uploadFile(bucketName, fileName) {
18+
const s3 = new aws.S3();
19+
const uploadParams = { Bucket: bucketName, Key: '', Body: '' };
20+
const fileStream = fs.createReadStream(fileName);
21+
fileStream.on('error', (err) => {
22+
console.log('File Error', err);
23+
});
24+
uploadParams.Body = fileStream;
25+
uploadParams.Key = path.basename(fileName);
26+
return s3.upload(uploadParams).promise();
27+
},
28+
};

configAWS.js

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
const aws = require('aws-sdk');
2+
3+
module.exports = {
4+
configByFile(pathConfig) {
5+
aws.config.loadFromPath(pathConfig);
6+
},
7+
config(accessKeyId, secretAccessKey, region) {
8+
aws.config.update({ accessKeyId, secretAccessKey, region });
9+
},
10+
};

dynamoDBCSV.js

Lines changed: 28 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,6 @@ const aws = require('aws-sdk');
44
const path = require('path');
55
const csvToJson = require('csvtojson');
66

7-
let dynamoDB;
8-
97
function generateDataDynamoDB(tableName, data) {
108
const params = {
119
RequestItems: {
@@ -24,15 +22,9 @@ function generateDataDynamoDB(tableName, data) {
2422
}
2523

2624
function bulkData(tableName, data) {
25+
const dynamoDB = new aws.DynamoDB();
2726
const params = generateDataDynamoDB(tableName, data);
28-
console.log(params);
29-
dynamoDB.batchWriteItem(params, (err) => {
30-
if (err) {
31-
throw err;
32-
} else {
33-
console.log('Imported');
34-
}
35-
});
27+
return dynamoDB.batchWriteItem(params).promise();
3628
}
3729

3830
function createFile(locaFile, csv) {
@@ -44,49 +36,38 @@ function createFile(locaFile, csv) {
4436
fs.mkdirSync(path.dirname(locationFile));
4537
fs.writeFileSync(locationFile, csv);
4638
}
39+
return Promise.resolve(true);
4740
} catch (err) {
48-
throw err;
41+
return Promise.reject(err);
4942
}
5043
}
5144

52-
module.exports = {
53-
configByFile(pathConfig) {
54-
aws.config.loadFromPath(pathConfig);
55-
dynamoDB = new aws.DynamoDB();
56-
},
57-
config(accessKeyId, secretAccessKey, region) {
58-
aws.config.update({ accessKeyId, secretAccessKey, region });
59-
dynamoDB = new aws.DynamoDB();
60-
},
61-
export(tableName, locationFile) {
62-
const params = {
63-
TableName: tableName,
64-
};
65-
dynamoDB.scan(params, (error, data) => {
66-
if (error) {
67-
throw error;
68-
} else {
69-
const { Items: items } = data;
70-
try {
71-
const records = [];
72-
items.forEach((element) => {
73-
records.push(aws.DynamoDB.Converter.unmarshall(element));
74-
});
75-
const fields = Object.keys(records[0]);
76-
const parser = new Json2csvParser({ fields });
77-
const csv = parser.parse(records);
78-
createFile(locationFile, csv);
79-
} catch (err) {
80-
throw err;
81-
}
82-
}
45+
function csvFileToJson(data) {
46+
const { Items: items } = data;
47+
try {
48+
const records = [];
49+
items.forEach((element) => {
50+
records.push(aws.DynamoDB.Converter.unmarshall(element));
8351
});
52+
const fields = Object.keys(records[0]);
53+
const parser = new Json2csvParser({ fields });
54+
const csv = parser.parse(records);
55+
return Promise.resolve(csv);
56+
} catch (err) {
57+
return Promise.reject(err);
58+
}
59+
}
60+
61+
module.exports = {
62+
exportCSV(params, locationFile) {
63+
const dynamoDB = new aws.DynamoDB();
64+
return dynamoDB.scan(params).promise()
65+
.then(response => csvFileToJson(response))
66+
.then(csv => createFile(locationFile, csv));
8467
},
85-
import(tableName, csvFilePath) {
86-
csvToJson()
68+
importCSV(tableName, csvFilePath) {
69+
return csvToJson()
8770
.fromFile(csvFilePath)
88-
.then((jsonObj) => {
89-
bulkData(tableName, jsonObj);
90-
});
71+
.then(jsonObj => bulkData(tableName, jsonObj));
9172
},
9273
};

index.js

Lines changed: 13 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,17 @@
11
const dynamoDBCSV = require('./dynamoDBCSV');
2+
const config = require('./configAWS');
3+
const bucket = require('./bucketS3Util');
24

35
module.exports = {
4-
config: dynamoDBCSV.config,
5-
configByFile: dynamoDBCSV.configByFile,
6-
import: dynamoDBCSV.import,
7-
export: dynamoDBCSV.export,
6+
config: config.config,
7+
configByFile: config.configByFile,
8+
dynamoDB: {
9+
importCSV: dynamoDBCSV.importCSV,
10+
exportCSV: dynamoDBCSV.exportCSV,
11+
},
12+
bucket: {
13+
list: bucket.list,
14+
uploadFile: bucket.uploadFile,
15+
create: bucket.create,
16+
},
817
};

package-lock.json

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)