Skip to content

Commit

Permalink
backup vacuum
Browse files Browse the repository at this point in the history
  • Loading branch information
AvidDabbler committed Feb 5, 2024
1 parent a51fb23 commit 6a87038
Show file tree
Hide file tree
Showing 5 changed files with 739 additions and 37 deletions.
3 changes: 0 additions & 3 deletions app/config.server.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ const loadEnv = () => {
try {
// Path to the .env file
const envFilePath = path.resolve(__dirname, ".env");
console.log(envFilePath);

// Read the contents of the .env file
const envFileContent = fs.readFileSync(envFilePath, "utf8");
Expand All @@ -20,7 +19,6 @@ const loadEnv = () => {
const [key, value] = line.split("=");
if (key && value)
process.env[key.trim()] = value.trim().replaceAll('"', "");

Check failure on line 21 in app/config.server.ts

View workflow job for this annotation

GitHub Actions / ʦ TypeScript

Property 'replaceAll' does not exist on type 'string'. Do you need to change your target library? Try changing the 'lib' compiler option to 'es2021' or later.
console.log(key, value);
});
} catch {
return;
Expand All @@ -29,7 +27,6 @@ const loadEnv = () => {

const config = () => {
loadEnv();
console.log(process.env);
const _config = z
.object({
ADMIN_EMAIL: z.string(),
Expand Down
1 change: 0 additions & 1 deletion app/gtfs/gtfs.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ import { gtfsConfig, envConfig } from "../config.server";

export const load = async () => {
try {
console.log(envConfig);
await importGtfs(gtfsConfig);
await gtfsToHtml(gtfsConfig)
.then(() => {
Expand Down
88 changes: 85 additions & 3 deletions drizzle/backup.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@

import fs from 'fs'

Check failure on line 1 in drizzle/backup.ts

View workflow job for this annotation

GitHub Actions / ⬣ ESLint

There should be at least one empty line between import groups
import sqlite3 from 'sqlite3'
import cron from 'node-cron';

Check failure on line 3 in drizzle/backup.ts

View workflow job for this annotation

GitHub Actions / ⬣ ESLint

There should be at least one empty line between import groups

Check failure on line 3 in drizzle/backup.ts

View workflow job for this annotation

GitHub Actions / ⬣ ESLint

`node-cron` import should occur before import of `sqlite3`
import { envConfig } from '~/config.server';

Check failure on line 4 in drizzle/backup.ts

View workflow job for this annotation

GitHub Actions / ⬣ ESLint

There should be at least one empty line between import groups
import { S3Client, CopyObjectCommand } from '@aws-sdk/client-s3'
import { S3Client, CopyObjectCommand, PutObjectCommand } from '@aws-sdk/client-s3'
import path from 'path';

const s3Client = new S3Client({
region: "us-east-1", // Replace with your AWS region
Expand All @@ -26,6 +28,86 @@ export async function backupS3Object(bucket: string, sourceKey: string, destinat
}
}

const vacuumDb = (dbPath: string, backupPath: string) => {
// Open the SQLite database
const db = new sqlite3.Database(dbPath, sqlite3.OPEN_READWRITE, (err) => {
if (err) {
console.error('Error opening the database:', err.message);
return;
}

// Perform VACUUM operation
db.exec(`ATTACH DATABASE '${backupPath}' AS backup;`, (attachErr) => {
if (attachErr) {
console.error('Error attaching backup database:', attachErr.message);
db.close();
return;
}

db.exec('VACUUM INTO backup.main;', (vacuumErr) => {
if (vacuumErr) {
console.error('Error performing VACUUM:', vacuumErr.message);
} else {
console.log('VACUUM operation successful.');

// If needed, you can also copy the backup to another location
fs.copyFileSync(`${backupPath}.main`, `${backupPath}`);

console.log('Backup saved to:', backupPath);
}

// Detach the backup database
db.exec('DETACH DATABASE backup;', (detachErr) => {
if (detachErr) {
console.error('Error detaching backup database:', detachErr.message);
}

// Close the main database
db.close((closeErr) => {
if (closeErr) {
console.error('Error closing the database:', closeErr.message);
}
});
});
});
});
});
}

async function uploadFilesToS3(directory: string, prefix: string) {
// Read the contents of the local directory
const files = fs.readdirSync(directory);

// Upload each file to S3
for (const file of files) {
const filePath = path.join(directory, file);

// Skip directories
if (fs.statSync(filePath).isDirectory()) {
continue;
}

// Read the file content
const fileContent = fs.readFileSync(filePath);

// Create S3 PutObjectCommand with the specified prefix
const uploadParams = {
Bucket: envConfig.S3_BUCKET,
Key: `${prefix}/${file}`, // Include the prefix in the S3 key
Body: fileContent,
};

// Upload the file to S3
try {
const uploadResult = await s3Client.send(new PutObjectCommand(uploadParams));
console.log(`File uploaded successfully: ${file} - ETag: ${uploadResult.ETag}`);
} catch (error) {
console.error(`Error uploading file ${file} to S3:`, error);
}
}
}

cron.schedule('* * * * *', () => {
backupS3Object(envConfig.S3_BUCKET, 'data.db', '/drizzle/data.db')
vacuumDb('/drizzle/data.db', 'drizzle/backup')
uploadFilesToS3('drizzle/data.db', '/db-backup/data.db')
})
Loading

0 comments on commit 6a87038

Please sign in to comment.