Skip to content

Commit

Permalink
feat(backup): write xud db every 3 minutes max
Browse files Browse the repository at this point in the history
This changes the behavior of the xud backup from backing up the xud db
every time it changes (which is very often) to every 3 minutes, provided
that it has changed in the preceding 3 minutes. Note that this approach
still needs refinement to prevent corrupted backups as described in
issue #1652.

Closes #1368.
  • Loading branch information
sangaman committed Jun 16, 2020
1 parent ecca2ae commit cfdf8b6
Show file tree
Hide file tree
Showing 2 changed files with 33 additions and 87 deletions.
42 changes: 23 additions & 19 deletions lib/backup/Backup.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import { createHash } from 'crypto';
import { EventEmitter } from 'events';
import fs from 'fs';
import path from 'path';
Expand All @@ -9,7 +8,9 @@ import { getDefaultBackupDir } from '../utils/utils';

interface Backup {
on(event: 'newBackup', listener: (path: string) => void): this;
on(event: 'changeDetected', listener: (client: string) => void): this;
emit(event: 'newBackup', path: string): boolean;
emit(event: 'changeDetected', client: string): boolean;
}

class Backup extends EventEmitter {
Expand All @@ -22,6 +23,18 @@ class Backup extends EventEmitter {
private lndClients: LndClient[] = [];
private checkLndTimer: ReturnType<typeof setInterval> | undefined;

/** A map of client names to a boolean indicating whether they have changed since the last backup. */
private databaseChangedMap = new Map<string, boolean>();

private xudBackupTimer = setInterval(() => {
if (this.databaseChangedMap.get('xud') === true) {
const backupPath = this.getBackupPath('xud');
const content = this.readDatabase(this.config.dbpath);
this.writeBackup(backupPath, content);
this.databaseChangedMap.set('xud', false);
}
}, 180000);

public start = async (args: { [argName: string]: any }) => {
await this.config.load(args);

Expand Down Expand Up @@ -65,6 +78,8 @@ class Backup extends EventEmitter {
for (const lndClient of this.lndClients) {
lndClient.close();
}

clearInterval(this.xudBackupTimer);
}

private waitForLndConnected = (lndClient: LndClient) => {
Expand Down Expand Up @@ -131,15 +146,13 @@ class Backup extends EventEmitter {
}

private startFilewatcher = async (client: string, dbPath: string) => {
let previousDatabaseHash: string | undefined;
const backupPath = this.getBackupPath(client);

if (fs.existsSync(dbPath)) {
this.logger.verbose(`Writing initial ${client} database backup to: ${backupPath}`);
const { content, hash } = this.readDatabase(dbPath);
const content = this.readDatabase(dbPath);

this.writeBackup(backupPath, content);
previousDatabaseHash = hash;
} else {
this.logger.warn(`Could not find database file of ${client} at ${dbPath}, waiting for it to be created...`);
const dbDir = path.dirname(dbPath);
Expand All @@ -157,29 +170,19 @@ class Backup extends EventEmitter {

this.fileWatchers.push(fs.watch(dbPath, { persistent: true, recursive: false }, (event: string) => {
if (event === 'change') {
const { content, hash } = this.readDatabase(dbPath);

// Compare the MD5 hash of the current content of the file with hash of the content when
// it was backed up the last time to ensure that the content of the file has changed
if (hash !== previousDatabaseHash) {
this.logger.trace(`${client} database changed`);

previousDatabaseHash = hash;
this.writeBackup(backupPath, content);
}
this.logger.trace(`${client} database changed`);
this.emit('changeDetected', client);
this.databaseChangedMap.set(client, true);
}
}));

this.logger.verbose(`Listening for changes to the ${client} database`);
}

private readDatabase = (path: string): { content: Buffer, hash: string } => {
private readDatabase = (path: string) => {
const content = fs.readFileSync(path);

return {
content,
hash: createHash('md5').update(content).digest('base64'),
};
return content;
}

private writeBackup = (backupPath: string, data: Uint8Array) => {
Expand All @@ -188,6 +191,7 @@ class Backup extends EventEmitter {
backupPath,
data,
);
this.logger.trace(`new backup written to ${backupPath}`);
this.emit('newBackup', backupPath);
} catch (error) {
this.logger.error(`Could not write backup file: ${error}`);
Expand Down
78 changes: 10 additions & 68 deletions test/jest/Backup.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,18 +14,13 @@ const removeDir = (dir: string) => {

const backupdir = 'backup-test';

const raidenDatabasePath = 'raiden';
const xudDatabasePath = 'xud';

const backups = {
lnd: {
event: 'lnd event',
startup: 'lnd startup',
},
raiden: {
event: 'raiden event',
startup: 'raiden startup',
},
xud: {
event: 'xud event',
startup: 'xud startup',
Expand All @@ -35,7 +30,6 @@ const backups = {
let channelBackupCallback: any;

const onListenerMock = jest.fn((event, callback) => {

if (event === 'channelBackup') {
channelBackupCallback = callback;
} else {
Expand All @@ -61,29 +55,20 @@ describe('Backup', () => {
const backup = new Backup();

beforeAll(async () => {
await Promise.all([
fs.promises.writeFile(
raidenDatabasePath,
backups.raiden.startup,
),
fs.promises.writeFile(
xudDatabasePath,
backups.xud.startup,
),
]);
await fs.promises.writeFile(
xudDatabasePath,
backups.xud.startup,
);

await backup.start({
backupdir,
loglevel: 'error',
dbpath: xudDatabasePath,
raiden: {
dbpath: raidenDatabasePath,
},
});
});

afterAll(async () => {
await backup.stop();
afterAll(() => {
backup.stop();
});

test('should write LND backups on startup', () => {
Expand All @@ -106,39 +91,6 @@ describe('Backup', () => {
).toEqual(backups.lnd.event);
});

test('should write Raiden backups on startup', () => {
expect(
fs.readFileSync(
path.join(backupdir, 'raiden'),
'utf8',
),
).toEqual(backups.raiden.startup);
});

test('should write Raiden backups on new event', async () => {
fs.writeFileSync(
raidenDatabasePath,
backups.raiden.event,
);

// Wait to make sure the file watcher handled the new file
await new Promise((resolve, reject) => {
setTimeout(reject, 3000);
backup.on('newBackup', (path) => {
if (path.endsWith(raidenDatabasePath)) {
resolve();
}
});
});

expect(
fs.readFileSync(
path.join(backupdir, 'raiden'),
'utf8',
),
).toEqual(backups.raiden.event);
});

test('should write XUD database backups on startup', () => {
expect(
fs.readFileSync(
Expand All @@ -148,7 +100,7 @@ describe('Backup', () => {
).toEqual(backups.xud.startup);
});

test('should write XUD database backups on new event', async () => {
test('should detect XUD database backups on new event', async () => {
fs.writeFileSync(
xudDatabasePath,
backups.xud.event,
Expand All @@ -157,29 +109,19 @@ describe('Backup', () => {
// Wait to make sure the file watcher handled the new file
await new Promise((resolve, reject) => {
setTimeout(reject, 3000);
backup.on('newBackup', (path) => {
backup.on('changeDetected', (path) => {
if (path.endsWith(xudDatabasePath)) {
resolve();
}
});
});

expect(
fs.readFileSync(
path.join(backupdir, 'xud'),
'utf8',
),
).toEqual(backups.xud.event);
});

afterAll(async () => {
await backup.stop();
backup.stop();

removeDir(backupdir);

await Promise.all([
fs.promises.unlink(xudDatabasePath),
fs.promises.unlink(raidenDatabasePath),
]);
await fs.promises.unlink(xudDatabasePath);
});
});

0 comments on commit cfdf8b6

Please sign in to comment.