From 620c8c259b6388d4cc7c9c1cb8a071669d72ce4c Mon Sep 17 00:00:00 2001 From: purarue <7804791+purarue@users.noreply.github.com> Date: Sun, 27 Oct 2024 13:27:19 -0700 Subject: [PATCH] chore: update urls --- README.md | 6 +++--- bin/approve-comments | 2 +- bin/mediaproxy | 2 +- bin/remsync | 2 +- bin/remsync-image | 2 +- bin/shorten | 4 ++-- bin/update-recent-page-hits | 2 +- bin/vps_backup | 4 ++-- functions.sh | 8 ++++---- jobs/linux/backup_server_tar.job | 2 +- jobs/linux/check_fish_server.job | 6 +++--- jobs/linux/guestbook_comments.job | 4 ++-- jobs/linux/page_hit_count.job | 2 +- 13 files changed, 23 insertions(+), 23 deletions(-) diff --git a/README.md b/README.md index e6447b5..d895908 100644 --- a/README.md +++ b/README.md @@ -15,19 +15,19 @@ This runs on an Debian server, but it should be OS agnostic. `vps_install` will [`bin`](./bin) includes scripts that are run on my machine or on the server -See [here](https://sean.fish/x/blog/server-setup/) for a blog post describing how I set up this server. +See [here](https://purarue.xyz/x/blog/server-setup/) for a blog post describing how I set up this server. - [`vps_install`](./bin/vps_install) clones and sets up environments for each application. Checks that you have corresponding commands/packages installed and that required credential/files are in the right location, installs virtual environments/packages for each application. - [`super`](./super) lets me interact with the underlying `supervisord`/`supervisorctl` processes with my environment variables/configuration. - [`logs`](./logs) streams the logs from all applications -- [`vps_backup`](./bin/vps_backup) copies cache/token files to a tar.gz so they can be backed up. [runs with bgproc](https://github.com/purarue/bgproc). [`backup_server`](./backup_server) is run from my computer, which ssh's into the server to run that. Runs once per day, in [`housekeeping`](https://sean.fish/d/housekeeping) +- [`vps_backup`](./bin/vps_backup) copies cache/token files to a tar.gz so they can be backed up. [runs with bgproc](https://github.com/purarue/bgproc). [`backup_server`](./backup_server) is run from my computer, which ssh's into the server to run that. Runs once per day, in [`housekeeping`](https://purarue.xyz/d/housekeeping) - [`vps_deploy`](./bin/vps_deploy) and [`deploy`](./deploy) are a basic ssh/git pull/restart/view logs script for projects which I deploy frequently - [`vps_generate_static_sites`](./bin/vps_generate_static_sites) builds my static websites and places them in `~/static_files`. - [`remsync`](./bin/remsync) is a script that's run on my machine, which rsyncs files from a local directory to the server. That directory is served with nginx, so I can sync something to the server from my CLI and send someone a link. [example output](https://gist.github.com/purarue/2b11729859d248069a0eabf2e91e2800). Has two endpoints, `f` and `p`, which specify private (a non-autoindexed nginx listing) and public indexes. - [`playlist`](./bin/playlist) interfaces with my [playlist manager](https://github.com/purarue/plaintext-playlist). It allows me to select multiple playlists, combines all the files from those playlists into a single mp3 and syncs that up to my server with `remsync`. Often run this on my machine before I leave my house; I then listen to the file on my phone by going to corresponding URL. - [`mediaproxy`](./bin/mediaproxy) to ssh into the server, `youtube-dl`/`ffmpeg` something and host it on a readable link. Has video/audio wrapper that use more of my [personal scripts](https://github.com/purarue/dotfiles/) to prompt me to to select format codes, (similar to [`mpvf`](https://github.com/purarue/mpvf/)). That way, I can press a keybind, which grabs the URL from my clipboard and re-hosts it on my server. - [`shorten`](./bin/shorten) creates a shortened url using [`no-db-shorturl`](https://github.com/purarue/no-db-shorturl) -- [`approve-comments`](./bin/approve-comments) approves comments for my guest book at [https://sean.fish](https://github.com/purarue/glue) +- [`approve-comments`](./bin/approve-comments) approves comments for my guest book at [https://purarue.xyz](https://github.com/purarue/glue) - [`mnu`](./bin/mnu) runs the periodic job to update the [google sheet](https://github.com/purarue/mnu_gsheets) - [`directories`](./directories) is a helper script sourced at the top of other scripts that defines common application location environment variables diff --git a/bin/approve-comments b/bin/approve-comments index ec77e28..6413568 100755 --- a/bin/approve-comments +++ b/bin/approve-comments @@ -1,5 +1,5 @@ #!/bin/bash -# run from my computer to approve guest book comments at https://sean.fish/# +# run from my computer to approve guest book comments at https://purarue.xyz/# # specify one of --approve-comments, --review-comments, --print-count or --print-new-comments to this script # defaults to --approve-comments # see https://github.com/purarue/glue/blob/master/production_server diff --git a/bin/mediaproxy b/bin/mediaproxy index 0274916..ca9aec1 100755 --- a/bin/mediaproxy +++ b/bin/mediaproxy @@ -17,7 +17,7 @@ readonly SYNC_USER="sean" readonly SYNC_KEYFILE="${HOME}/.ssh/vultr" # ssh keyfile readonly TO_SERVER=140.82.50.43 -readonly BASE_URL="https://sean.fish/m" +readonly BASE_URL="https://purarue.xyz/m" ######### SETUP diff --git a/bin/remsync b/bin/remsync index 1d88412..8a13bc2 100755 --- a/bin/remsync +++ b/bin/remsync @@ -54,7 +54,7 @@ readonly SYNC_USER="sean" readonly SSH_TO='vultr' # setup in ~/.ssh/config declare TO_DIR="/home/${SYNC_USER}/f/" [[ -n "$REMSYNC_PUBLIC" ]] && TO_DIR="/home/${SYNC_USER}/p/" -readonly BASE_URL="https://sean.fish" +readonly BASE_URL="https://purarue.xyz" readonly TO_DIR # local information diff --git a/bin/remsync-image b/bin/remsync-image index 084f35e..d846227 100755 --- a/bin/remsync-image +++ b/bin/remsync-image @@ -29,5 +29,5 @@ cp -p "$image" "${XDG_DOCUMENTS_DIR}/remsync/i/${name}" || { } remsync || exit 1 # create URL -url="https://sean.fish/f/i/${name}" +url="https://purarue.xyz/f/i/${name}" printf '%s' "$url" | clp diff --git a/bin/shorten b/bin/shorten index 6325422..61a8e1e 100755 --- a/bin/shorten +++ b/bin/shorten @@ -6,11 +6,11 @@ # the server generates a random hash # e.g. # shorten "https://wiki.archlinux.org/index.php/File_opener" open -# would return "https://sean.fish/s/open" +# would return "https://purarue.xyz/s/open" # which now redirects to the archwiki link # handle user input -readonly UPLOAD_TO="https://sean.fish/s/" +readonly UPLOAD_TO="https://purarue.xyz/s/" readonly SHORTURL_TOKEN="${SHORTURL_TOKEN:?No shorturl token set}" readonly URL="${1:?No url provided to shorten}" readonly HASH="$2" # is fine if this is empty diff --git a/bin/update-recent-page-hits b/bin/update-recent-page-hits index 0270dd2..144a6bb 100755 --- a/bin/update-recent-page-hits +++ b/bin/update-recent-page-hits @@ -3,7 +3,7 @@ set -o pipefail main() { - data="$(curl -sL 'https://sean.fish/api/page_hit/7' | jq)" || return $? + data="$(curl -sL 'https://purarue.xyz/api/page_hit/7' | jq)" || return $? count="$(echo "$data" | jq -r '.count')" # https://github.com/purarue/is-integer if is-integer "$count" >/dev/null; then diff --git a/bin/vps_backup b/bin/vps_backup index d8bcc26..3d37e7b 100755 --- a/bin/vps_backup +++ b/bin/vps_backup @@ -76,8 +76,8 @@ mkdir_if_not_exists "$BACKUP_COUNTDOWN" mkdir_if_not_exists "$BACKUP_NOTIFY" # save data from my website -curl -s 'https://sean.fish/api/gb_comment' >"$BACKUP_DIR/gb_comment.json" -curl -s 'https://sean.fish/api/page_hit' >"$BACKUP_DIR/page_hit.json" +curl -s 'https://purarue.xyz/api/gb_comment' >"$BACKUP_DIR/gb_comment.json" +curl -s 'https://purarue.xyz/api/page_hit' >"$BACKUP_DIR/page_hit.json" expect_file_and_copy "$NOTIFY_BOT/token.yaml" "$BACKUP_NOTIFY" expect_file_and_copy "$NOTIFY_BOT/old" "$BACKUP_NOTIFY" diff --git a/functions.sh b/functions.sh index 94e7edc..40a88e8 100644 --- a/functions.sh +++ b/functions.sh @@ -9,8 +9,8 @@ alias remsync-public='REMSYNC_PUBLIC=1 remsync' # to push to /p/ (public index) alias remsync-ranger='ranger "${XDG_DOCUMENTS_DIR}/remsync" && remsync' alias remsync-public-ranger='ranger "${HOME}/Files/remsync_public" && remsync-public' alias print-new-comments='approve-comments --print-new-comments' -alias page-hits="curl -s 'https://sean.fish/api/page_hit' | jq '.count'" -alias gb-comments="curl 'https://sean.fish/api/gb_comment' | jq 'reverse'" +alias page-hits="curl -s 'https://purarue.xyz/api/page_hit' | jq '.count'" +alias gb-comments="curl 'https://purarue.xyz/api/gb_comment' | jq 'reverse'" gb-comments-pretty() { gb-comments | jq '.[]' -c | @@ -21,10 +21,10 @@ gb-comments-pretty() { # print/select open shortened urls # https://github.com/purarue/no-db-shorturl alias shorturls="ssh vultr 'ls shorturls'" -alias shz="shorturls | fzf | sed -e 's|^|https://sean.fish/s/|' | tee /dev/tty | clipcopy" +alias shz="shorturls | fzf | sed -e 's|^|https://purarue.xyz/s/|' | tee /dev/tty | clipcopy" remsync-html-from-stdin() { local tmpf - # https://sean.fish/d/pipehtml?redirect + # https://purarue.xyz/d/pipehtml?redirect tmpf="$(pipehtml "$*")" remsync "$tmpf" rm -f "$tmpf" diff --git a/jobs/linux/backup_server_tar.job b/jobs/linux/backup_server_tar.job index 414aa02..cde4e40 100644 --- a/jobs/linux/backup_server_tar.job +++ b/jobs/linux/backup_server_tar.job @@ -3,6 +3,6 @@ evry 2 weeks -backup-fish-backup && { backup_to="${HOME}/Files/Backups/fish_server" mkdir -p "${backup_to}" - printlog 'backing up tar.gz sean.fish...' + printlog 'backing up tar.gz purarue.xyz...' cp ~/.cache/backup_dir.tar.gz "${backup_to}/$(epoch)_backup_dir.tar.gz" } diff --git a/jobs/linux/check_fish_server.job b/jobs/linux/check_fish_server.job index b9961db..42f665a 100644 --- a/jobs/linux/check_fish_server.job +++ b/jobs/linux/check_fish_server.job @@ -3,7 +3,7 @@ wait-for-internet -q --timeout "${WFI_TIMEOUT:-10}" || exit 0 evry 30 minutes -check-fish-server && { - printlog 'checking sean.fish...' - HTTP_CODE="$(curl -L -so /dev/null -w "%{http_code}" 'https://sean.fish')" - [[ "$HTTP_CODE" != "200" ]] && send-error "sean.fish is down" + printlog 'checking purarue.xyz...' + HTTP_CODE="$(curl -L -so /dev/null -w "%{http_code}" 'https://purarue.xyz')" + [[ "$HTTP_CODE" != "200" ]] && send-error "purarue.xyz is down" } diff --git a/jobs/linux/guestbook_comments.job b/jobs/linux/guestbook_comments.job index 56e9fab..1e156e4 100644 --- a/jobs/linux/guestbook_comments.job +++ b/jobs/linux/guestbook_comments.job @@ -1,6 +1,6 @@ #!/usr/bin/env bash # saves the number of unapproved comments to a cache file -# this is for my guest book on https://sean.fish/ +# this is for my guest book on https://purarue.xyz/ wait-for-internet -q --timeout "${WFI_TIMEOUT:-10}" || exit 0 @@ -8,7 +8,7 @@ evry 15 minutes -guestbook_comments && { get_count() { local COUNT_LINE - COUNT_LINE="$(curl -sL 'https://sean.fish/api/gb_comment/1' | jq -r .count)" || return $? + COUNT_LINE="$(curl -sL 'https://purarue.xyz/api/gb_comment/1' | jq -r .count)" || return $? echo "$COUNT_LINE" } diff --git a/jobs/linux/page_hit_count.job b/jobs/linux/page_hit_count.job index 71ad3eb..3e5a728 100644 --- a/jobs/linux/page_hit_count.job +++ b/jobs/linux/page_hit_count.job @@ -3,6 +3,6 @@ wait-for-internet -q --timeout "${WFI_TIMEOUT:-10}" || exit 0 evry 30 minutes -recent_page_hits && { - printlog 'recent_page_hits:getting recent page hit count from sean.fish' + printlog 'recent_page_hits:getting recent page hit count from purarue.xyz' update-recent-page-hits }