Skip to content

Commit

Permalink
Merge pull request #47 from jkaninda/develop
Browse files Browse the repository at this point in the history
Develop
  • Loading branch information
jkaninda authored Aug 4, 2024
2 parents 296ee1e + ac30210 commit 42c4dd0
Show file tree
Hide file tree
Showing 8 changed files with 67 additions and 29 deletions.
32 changes: 32 additions & 0 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
name: Build
on:
push:
branches: ['develop']
env:
BUILDKIT_IMAGE: jkaninda/pg-bkup
jobs:
docker:
runs-on: ubuntu-latest
steps:
-
name: Set up QEMU
uses: docker/setup-qemu-action@v3
-
name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
-
name: Login to DockerHub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
-
name: Build and push
uses: docker/build-push-action@v3
with:
push: true
file: "./docker/Dockerfile"
platforms: linux/amd64,linux/arm64,linux/arm/v7
tags: |
"${{env.BUILDKIT_IMAGE}}:develop-${{ github.sha }}"
8 changes: 5 additions & 3 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,10 @@ docker-run-scheduled: docker-build


docker-run-scheduled-s3: docker-build
docker run --rm --network web --user 1000:1000 --name pg-bkup -v "./backup:/backup" -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "ACCESS_KEY=${ACCESS_KEY}" -e "SECRET_KEY=${SECRET_KEY}" -e "BUCKET_NAME=${BUCKET_NAME}" -e "S3_ENDPOINT=${S3_ENDPOINT}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" ${IMAGE_NAME} bkup backup --storage s3 --mode scheduled --path /custom-path --period "* * * * *"
docker run --rm --network web --name pg-bkup -v "./backup:/backup" -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "ACCESS_KEY=${ACCESS_KEY}" -e "SECRET_KEY=${SECRET_KEY}" -e "BUCKET_NAME=${BUCKET_NAME}" -e "S3_ENDPOINT=${AWS_S3_ENDPOINT}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" ${IMAGE_NAME} backup --storage s3 --mode scheduled --path /custom-path --period "* * * * *"

docker-run-s3: docker-build
docker run --rm --network web --name pg-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "ACCESS_KEY=${ACCESS_KEY}" -e "SECRET_KEY=${SECRET_KEY}" -e "AWS_S3_BUCKET_NAME=${AWS_S3_BUCKET_NAME}" -e "AWS_S3_ENDPOINT=${AWS_S3_ENDPOINT}" -e "AWS_REGION=eu2" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" ${IMAGE_NAME} bkup backup --storage s3 #--path /custom-path
docker run --rm --network web --name pg-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "ACCESS_KEY=${ACCESS_KEY}" -e "SECRET_KEY=${SECRET_KEY}" -e "AWS_S3_BUCKET_NAME=${AWS_S3_BUCKET_NAME}" -e "S3_ENDPOINT=${AWS_S3_ENDPOINT}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" ${IMAGE_NAME} bkup backup --storage s3 --mode scheduled --path custom-path --period "* * * * *"


docker-restore-s3: docker-build
Expand All @@ -41,8 +41,10 @@ docker-restore-s3: docker-build
docker-run-ssh: docker-build
docker run --rm --network web --name pg-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "SSH_USER=${SSH_USER}" -e "SSH_HOST_NAME=${SSH_HOST_NAME}" -e "SSH_REMOTE_PATH=${SSH_REMOTE_PATH}" -e "SSH_PASSWORD=${SSH_PASSWORD}" -e "SSH_PORT=${SSH_PORT}" -e "SSH_IDENTIFY_FILE=${SSH_IDENTIFY_FILE}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" ${IMAGE_NAME} bkup backup --storage ssh

docker-run-scheduled-ssh: docker-build
docker run --rm --network web --name pg-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "SSH_USER=${SSH_USER}" -e "SSH_HOST_NAME=${SSH_HOST_NAME}" -e "SSH_REMOTE_PATH=${SSH_REMOTE_PATH}" -e "SSH_PASSWORD=${SSH_PASSWORD}" -e "SSH_PORT=${SSH_PORT}" -e "SSH_IDENTIFY_FILE=${SSH_IDENTIFY_FILE}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" ${IMAGE_NAME} bkup backup --storage ssh --mode scheduled --period "* * * * *"
docker-restore-ssh: docker-build
docker run --rm --network web --name pg-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "SSH_USER=${SSH_USER}" -e "SSH_HOST_NAME=${SSH_HOST_NAME}" -e "SSH_REMOTE_PATH=${SSH_REMOTE_PATH}" -e "SSH_PASSWORD=${SSH_PASSWORD}" -e "SSH_PORT=${SSH_PORT}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" -e "SSH_IDENTIFY_FILE=${SSH_IDENTIFY_FILE}" ${IMAGE_NAME} bkup restore --storage ssh -f data_20240731_200104.sql.gz.gpg
docker run --rm --network web --name pg-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "SSH_USER=${SSH_USER}" -e "SSH_HOST_NAME=${SSH_HOST_NAME}" -e "SSH_REMOTE_PATH=${SSH_REMOTE_PATH}" -e "SSH_PASSWORD=${SSH_PASSWORD}" -e "SSH_PORT=${SSH_PORT}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" -e "SSH_IDENTIFY_FILE=${SSH_IDENTIFY_FILE}" ${IMAGE_NAME} bkup restore --storage ssh -f ${FILE_NAME}

run-docs:
cd docs && bundle exec jekyll serve -H 0.0.0.0 -t
4 changes: 3 additions & 1 deletion docker/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ ENV AWS_S3_ENDPOINT=""
ENV AWS_S3_BUCKET_NAME=""
ENV AWS_ACCESS_KEY=""
ENV AWS_SECRET_KEY=""
ENV AWS_S3_PATH=""
ENV AWS_REGION="us-west-2"
ENV AWS_DISABLE_SSL="false"
ENV GPG_PASSPHRASE=""
Expand Down Expand Up @@ -63,4 +64,5 @@ RUN ln -s /usr/local/bin/pg-bkup /usr/local/bin/bkup

ADD docker/supervisord.conf /etc/supervisor/supervisord.conf

WORKDIR $WORKDIR
WORKDIR $WORKDIR
#ENTRYPOINT ["/usr/local/bin/pg-bkup"]
22 changes: 13 additions & 9 deletions pkg/backup.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@ func StartBackup(cmd *cobra.Command) {
utils.GetEnv(cmd, "period", "SCHEDULE_PERIOD")

//Get flag value and set env
s3Path := utils.GetEnv(cmd, "path", "AWS_S3_PATH")
remotePath := utils.GetEnv(cmd, "path", "SSH_REMOTE_PATH")
storage = utils.GetEnv(cmd, "storage", "STORAGE")
file = utils.GetEnv(cmd, "file", "FILE_NAME")
Expand All @@ -35,6 +34,8 @@ func StartBackup(cmd *cobra.Command) {
executionMode, _ = cmd.Flags().GetString("mode")
dbName = os.Getenv("DB_NAME")
gpgPassphrase := os.Getenv("GPG_PASSPHRASE")
_ = utils.GetEnv(cmd, "path", "AWS_S3_PATH")

//
if gpgPassphrase != "" {
encryption = true
Expand All @@ -49,7 +50,7 @@ func StartBackup(cmd *cobra.Command) {
if executionMode == "default" {
switch storage {
case "s3":
s3Backup(backupFileName, s3Path, disableCompression, prune, backupRetention, encryption)
s3Backup(backupFileName, disableCompression, prune, backupRetention, encryption)
case "local":
localBackup(backupFileName, disableCompression, prune, backupRetention, encryption)
case "ssh", "remote":
Expand All @@ -61,22 +62,23 @@ func StartBackup(cmd *cobra.Command) {
}

} else if executionMode == "scheduled" {
scheduledMode()
scheduledMode(storage)
} else {
utils.Fatal("Error, unknown execution mode!")
}

}

// Run in scheduled mode
func scheduledMode() {
func scheduledMode(storage string) {

fmt.Println()
fmt.Println("**********************************")
fmt.Println(" Starting PostgreSQL Bkup... ")
fmt.Println("***********************************")
utils.Info("Running in Scheduled mode")
utils.Info("Execution period %s ", os.Getenv("SCHEDULE_PERIOD"))
utils.Info("Storage type %s ", storage)

//Test database connexion
utils.TestDatabaseConnection()
Expand All @@ -101,8 +103,9 @@ func scheduledMode() {
utils.Info("Supervisor stopped.")
}
}()

if _, err := os.Stat(cronLogFile); os.IsNotExist(err) {
utils.Fatal("Log file %s does not exist.", cronLogFile)
utils.Fatal(fmt.Sprintf("Log file %s does not exist.", cronLogFile))
}
t, err := tail.TailFile(cronLogFile, tail.Config{Follow: true})
if err != nil {
Expand Down Expand Up @@ -213,8 +216,9 @@ func localBackup(backupFileName string, disableCompression bool, prune bool, bac
}
}

func s3Backup(backupFileName string, s3Path string, disableCompression bool, prune bool, backupRetention int, encrypt bool) {
func s3Backup(backupFileName string, disableCompression bool, prune bool, backupRetention int, encrypt bool) {
bucket := utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME")
s3Path := utils.GetEnvVariable("AWS_S3_PATH", "S3_PATH")
utils.Info("Backup database to s3 storage")
//Backup database
BackupDatabase(backupFileName, disableCompression)
Expand Down Expand Up @@ -256,7 +260,7 @@ func sshBackup(backupFileName, remotePath string, disableCompression bool, prune
finalFileName = fmt.Sprintf("%s.%s", backupFileName, "gpg")
}
utils.Info("Uploading backup file to remote server...")
utils.Info("Backup name is ", backupFileName)
utils.Info("Backup name is %s", backupFileName)
err := CopyToRemote(finalFileName, remotePath)
if err != nil {
utils.Fatal("Error uploading file to the remote server: %s ", err)
Expand All @@ -266,7 +270,7 @@ func sshBackup(backupFileName, remotePath string, disableCompression bool, prune
//Delete backup file from tmp folder
err = utils.DeleteFile(filepath.Join(tmpPath, finalFileName))
if err != nil {
utils.Error("Error deleting file:", err)
utils.Error("Error deleting file: %v", err)

}
if prune {
Expand All @@ -282,7 +286,7 @@ func encryptBackup(backupFileName string) {
gpgPassphrase := os.Getenv("GPG_PASSPHRASE")
err := Encrypt(filepath.Join(tmpPath, backupFileName), gpgPassphrase)
if err != nil {
utils.Fatal("Error during encrypting backup %s", err)
utils.Fatal("Error during encrypting backup %v", err)
}

}
13 changes: 3 additions & 10 deletions pkg/scripts.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,17 +24,10 @@ func CreateCrontabScript(disableCompression bool, storage string) {

var scriptContent string

if storage == "s3" {
scriptContent = fmt.Sprintf(`#!/usr/bin/env bash
scriptContent = fmt.Sprintf(`#!/usr/bin/env bash
set -e
bkup backup --dbname %s --port %s --storage s3 --path %s %v
`, os.Getenv("DB_NAME"), os.Getenv("DB_PORT"), os.Getenv("S3_PATH"), disableC)
} else {
scriptContent = fmt.Sprintf(`#!/usr/bin/env bash
set -e
bkup backup --dbname %s --port %s %v
`, os.Getenv("DB_NAME"), os.Getenv("DB_PORT"), disableC)
}
/usr/local/bin/pg-bkup backup --dbname %s --port %s --storage %s %v
`, os.Getenv("DB_NAME"), os.Getenv("DB_PORT"), storage, disableC)

if err := utils.WriteToFile(backupCronFile, scriptContent); err != nil {
utils.Fatal("Error writing to %s: %v\n", backupCronFile, err)
Expand Down
11 changes: 9 additions & 2 deletions pkg/var.go
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
package pkg

const s3MountPath string = "/s3mnt"
const s3fsPasswdFile string = "/etc/passwd-s3fs"
const cronLogFile = "/var/log/pg-bkup.log"
const tmpPath = "/tmp/backup"
const backupCronFile = "/usr/local/bin/backup_cron.sh"
Expand Down Expand Up @@ -37,3 +35,12 @@ var sshVars = []string{
"SSH_HOST_NAME",
"SSH_PORT",
}

// AwsVars Required environment variables for AWS S3 storage
var awsVars = []string{
"AWS_S3_ENDPOINT",
"AWS_S3_BUCKET_NAME",
"AWS_ACCESS_KEY",
"AWS_SECRET_KEY",
"AWS_REGION",
}
1 change: 1 addition & 0 deletions utils/logger.go
Original file line number Diff line number Diff line change
Expand Up @@ -55,4 +55,5 @@ func Fatal(msg string, args ...any) {
fmt.Printf("%s ERROR: %s\n", currentTime, formattedMessage)
}
os.Exit(1)
os.Kill.Signal()
}
5 changes: 1 addition & 4 deletions utils/s3.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,6 @@ func CreateSession() (*session.Session, error) {
"AWS_ACCESS_KEY",
"AWS_SECRET_KEY",
"AWS_REGION",
"AWS_REGION",
"AWS_REGION",
}

endPoint := GetEnvVariable("AWS_S3_ENDPOINT", "S3_ENDPOINT")
Expand All @@ -41,8 +39,7 @@ func CreateSession() (*session.Session, error) {

err = CheckEnvVars(awsVars)
if err != nil {
Error("Error checking environment variables\n: %s", err)
os.Exit(1)
Fatal("Error checking environment variables\n: %s", err)
}
// Configure to use MinIO Server
s3Config := &aws.Config{
Expand Down

0 comments on commit 42c4dd0

Please sign in to comment.