Skip to content

Commit

Permalink
Import generic PM/KPI data as csv
Browse files Browse the repository at this point in the history
  • Loading branch information
erssebaggala committed Oct 17, 2019
1 parent 4eb1c2a commit d7b1713
Show file tree
Hide file tree
Showing 8 changed files with 238 additions and 9 deletions.
2 changes: 0 additions & 2 deletions background/background-process.html
Original file line number Diff line number Diff line change
Expand Up @@ -685,12 +685,10 @@ <h1>Background process</h1>

try {
if(task === 'parse_data'){

//@TODO: Move CM parsing to backgroup-utils.js
if(options.dataType === 'CM' && options.vendor !== 'BODASTAGE'){
processCMDumps(options.vendor, options.format, options.inputFolder, options.outputFolder)
}else{

const rtn = await utils.parseData(
options.dataType,
options.vendor,
Expand Down
17 changes: 15 additions & 2 deletions background/background-utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ const { VENDOR_CM_FORMATS, VENDOR_PM_FORMATS, VENDOR_FM_FORMATS,
const tems = window.require('./tems');
const csvToExcelCombiner = window.require('./csv-to-excel-combiner');
const EXCEL = window.require('./excel');
const bodaPM= window.require('./boda-pm');

//Fix PATH env variable on Mac OSX
if(process.platform === 'darwin'){
Expand Down Expand Up @@ -815,7 +816,7 @@ function parseNokiaPMXML(vendor, format, inputFolder, outputFolder, beforeFilePa
return {status: 'success', message: `${vendor} PM files successfully parsed.`}
}

function parsePMFiles(vendor, format, inputFolder, outputFolder, beforeFileParse, afterFileParse, beforeParse, afterParse){
function parsePMFiles(vendor, format, inputFolder, outputFolder, beforeFileParse, afterFileParse, beforeParse, afterParse, ){

if( vendor === 'ERICSSON' && format === 'MEAS_COLLEC_XML'){
return parseMeasuremenetCollectionXML(vendor, format, inputFolder, outputFolder, beforeFileParse, afterFileParse, beforeParse, afterParse)
Expand All @@ -829,6 +830,12 @@ function parsePMFiles(vendor, format, inputFolder, outputFolder, beforeFileParse
return parseNokiaPMXML(vendor, format, inputFolder, outputFolder, beforeFileParse, afterFileParse, beforeParse, afterParse)
}


if(vendor === 'BODASTAGE' && format === 'CSV'){
return {status: "success", message: "No parsing for PM data needed."}
}


return {status: 'error', message: 'PM processing not yet implemented.'}
}

Expand Down Expand Up @@ -1360,7 +1367,7 @@ async function loadCSVFiles(table, tableFields, inputFolder, truncateTables, bef
}

async function loadPMData(vendor, format, inputFolder, truncateTables, beforeFileLoad, afterFileLoad, beforeLoad, afterLoad){

if(vendor === 'ERICSSON' && format === 'MEAS_COLLEC_XML'){
return await loadEricssonMeasCollectXML(inputFolder, truncateTables, beforeFileLoad, afterFileLoad, beforeLoad, afterLoad);
}
Expand All @@ -1376,6 +1383,12 @@ async function loadPMData(vendor, format, inputFolder, truncateTables, beforeFil
let tableFields = ['filename','start_time','interval','base_id','local_moid','ne_type','measurement_type','counter_id','counter_value']
return loadCSVFiles(table, tableFields, inputFolder, truncateTables, beforeFileLoad, afterFileLoad, beforeLoad, afterLoad)
}

if(vendor === 'BODASTAGE' && format === 'CSV'){
let table = 'pm.kpis';
await bodaPM.loadBodaCSVKPIsDataViaStream(inputFolder, truncateTables, beforeFileLoad, afterFileLoad, beforeLoad, afterLoad);
return {status: "success", message: "Loading PM data completed."}
}

return {status: 'success', message: 'PM functionality is not ready!'}
}
Expand Down
216 changes: 216 additions & 0 deletions background/boda-pm.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,216 @@
const log = window.require('electron-log');
const { Client, Pool } = window.require('pg');
const copyFrom = require('pg-copy-streams').from;
const path = window.require('path');
const queryHelper = window.require('./query-helpers');
/**
*
* @param csvFolder string
* @param truncateTables boolean Truncate tables before load. Values are true or false
* @param callbacks {beforeFileLoad, afterFileLoad, beforeLoad, afterLoad}
*
*/
async function loadBodaCSVKPIsDataViaStream(csvFolder,truncateTables, beforeFileLoad, afterFileLoad, beforeLoad, afterLoad){

const dbConDetails = await queryHelper.getSQLiteDBConnectionDetails('boda');

const hostname = dbConDetails.hostname;
const port = dbConDetails.port;
const username = dbConDetails.username;
const password = dbConDetails.password;

const connectionString = `postgresql://${username}:${password}@${hostname}:${port}/boda`;

const pool = new Pool({
connectionString: connectionString,
})

pool.on('error', (err, client) => {
log.error(err.toString());
client.release();
})


if(typeof beforeLoad === 'function'){
beforeLoad();
}

if(truncateTables === true) {
log.info("Truncate tables before loading is set to true.")
}

items = fs.readdirSync(csvFolder, { withFileTypes: true }).filter(dirent => !dirent.isDirectory()).map(dirent => dirent.name);

//This will be used to wait for the loading to complete before existing the function
let csvFileCount = items.length;
let filesNotLoaded = 0; //Keep count of files not loaded

//100 mb
const highWaterMark = 100 * 1024 * 1024;

//Time to wait for load to complete
const waitTime = 1; //1 second

//Maximum times to check
const maxLoadWait = 10; // x waitTime

for (let i=0; i< items.length; i++) {
let fileName = items[i];
let filePath = path.join(csvFolder, items[i]);

let table = "pm.kpis";

//Use to wait for each file to load
let fileIsLoading = true;

let client = null;
let copyFromStream = null;
try{
//Get client from pool
client = await pool.connect();
if(client.processID === null){
log.error('Failed to connect to database');
return false;
}

//Truncate
if(truncateTables === true) await client.query(`TRUNCATE ${table} RESTART IDENTITY CASCADE`);


copyFromStream = await client.query(copyFrom(`COPY ${table} (data) FROM STDIN`,{writableHighWaterMark : highWaterMark}));
}catch(e){
if( copyFromStream !== null) copyFromStream.end();
if( client !== null) client.release();

log.error(`Pool_Connect_Query: ${e.toString()}`);
log.info(`Skipping loading of ${fileName}`);

//reduce the file count the needs to be processed
--csvFileCount;
fileIsLoading = false;

//Increament the count of files that have not been processed
++filesNotLoaded;

//Process next file
//@TODO:
continue;
}

copyFromStream.on('error', async (err) => {
log.error(`copyFromStream.errorEvent: ${err.toString()}. [${fileName}]`);

//Reduce load file count
//--csvFileCount;
fileIsLoading = false;

//By setting writeStatus to null, we are letting next write konw that there was an
//error in the previous attempt so we should exit csvToJson
writeStatus = null;
});


//Write stream status used to handle backpressure on the write stream
let writeStatus = true;
copyFromStream.on('drain', (err) => {
log.info(`Write stream drained for ${fileName}`);
writeStatus = true;
});

copyFromStream.on('end', (err) => {
//reduce process file count
--csvFileCount;

log.info(`Loading of ${fileName} is done. ${csvFileCount} csv files remaining to be processed.`);
writeStatus = true;

fileIsLoading = false;

});

if(typeof beforeFileLoad === 'function'){
beforeFileLoad(table, fileName, csvFolder);
}

//log.info(`copyFromStream.writableHighWaterMark: ${copyFromStream.writableHighWaterMark}`);

await new Promise((resolve, reject) => {
try{//@TODO: Test whether this try block is necessary
csv()
.fromFile(filePath)
.subscribe(async (json)=>{
// Remove """ from json
const jsonString = JSON.stringify(json);

//Escape backslash in jsonString
//Example scenario is "\"SubNetwork=ONRM_ROOT_MO_R\"" becomes ""SubNetwork=ONRM_ROOT_MO_R"" which causes an error on insertion.
//The replacement below escapes the backslash to preserve it ib the jsonString for insertion
var re = new RegExp(String.fromCharCode(92, 92), 'g');
const sanitizedJsonString= jsonString.replace(re,String.fromCharCode(92,92));

//Get out of subscribe if there was an error
if(writeStatus === null){
return;
}

writeStatus = copyFromStream.write(sanitizedJsonString + "\n");

//Sleep for 1s if status is false i.e to wait for the writable stream buffer/queue to free
while(writeStatus === false){
log.info(`Write status: ${writeStatus} for ${fileName}. Wait for 1 second for write buffer to clear.`);
await new Promise((rs, rj) => { setTimeout(rs,1000);});
}

},(err) => {//onError
log.error(`csvJoJson.onError: ${err.toString()}`);
copyFromStream.end();
reject();
},
()=>{//onComplete
log.info(`End of csvToJson for ${fileName}.`)
copyFromStream.end();
resolve(undefined);
});
}catch(e){
writeStatus = true; // -- to stop while(writeStatus === false) from continuing endlessly
log.error(`Catch:csvToJson Error: {e.toString()}`);
copyFromStream.end();
fileIsLoading = false;
reject(e)

}
});

//Wait for loading to complete. The csvToJson can complete before the streamWriter is done
await new Promise(async (rs, rj) => {
while(fileIsLoading === true ){
log.info(`Waiting for ${waitTime} seconds for loading of ${fileName} to complete...`);
await new Promise((rs, rj) => { setTimeout(rs, waitTime * 1000); });
}

//Release client i.e. return to pool
await client.release();
rs(undefined);

});

if(typeof afterFileLoad === 'function'){
afterFileLoad(table, fileName, csvFolder);
}

}

log.info(`${filesNotLoaded} files not loaded.`)

if(typeof afterLoad === 'function'){
afterLoad();
}


await pool.end();



}

exports.loadBodaCSVKPIsDataViaStream = loadBodaCSVKPIsDataViaStream;
2 changes: 1 addition & 1 deletion background/vendor-formats.js
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ const VENDOR_PM_FORMATS = {
'HUAWEI': ['NE_BASED_MEAS_COLLEC_XML'],
'ZTE': ['BULKCM','XLS'],
'NOKIA': ['RAML'],
'BODASTAGE': []
'BODASTAGE': ["CSV", "EXCEL"]
}

const VENDOR_FM_FORMATS = {
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "Boda-Lite",
"version": "0.3.1",
"version": "0.4.0",
"description": "Boda-Lite is a telecommunication network management application",
"private": true,
"homepage": "./",
Expand Down
4 changes: 3 additions & 1 deletion src/modules/cm/ParseAndImport.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -216,7 +216,9 @@ class ParseAndImport extends React.Component {
return;
}

if(this.state.outputFolderText === this.state.inputFileText){
if(this.state.outputFolderText === this.state.inputFileText &&
!(this.state.currentVendor === 'BODASTAGE' && this.state.currentDataType === 'PM' && this.state.currentFormat === 'CSV')
){
this.setState({errorMessage: `Input and output folders should be different.`})
return;
}
Expand Down
2 changes: 1 addition & 1 deletion src/modules/cm/VendorFormats.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ export const VENDOR_PM_FORMATS = {
'ZTE': ['MEAS_COLLEC_XML','EXCEL'],
'NOKIA': ['PM_XML'],
'MOTOROLA': [],
'BODASTAGE': []
'BODASTAGE': ["CSV","EXCEL"]
}

export const VENDOR_FM_FORMATS = {
Expand Down
2 changes: 1 addition & 1 deletion src/version.js
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
export const VERSION = "0.3.1";
export const VERSION = "0.4.0";

export default VERSION;

0 comments on commit d7b1713

Please sign in to comment.