Skip to content

Commit

Permalink
Merge branch 'master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
nickmansrob authored Sep 24, 2024
2 parents c547abe + fde34d4 commit 5fdbfa0
Show file tree
Hide file tree
Showing 5 changed files with 18 additions and 23 deletions.
1 change: 1 addition & 0 deletions ENVIRONMENT.rst
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,7 @@ Environment Configuration Settings
- **AZURE_TENANT_ID**: (optional) Tenant ID of the Service Principal
- **CALLBACK_SCRIPT**: the callback script to run on various cluster actions (on start, on stop, on restart, on role change). The script will receive the cluster name, connection string and the current action. See `Patroni <http://patroni.readthedocs.io/en/latest/SETTINGS.html?highlight=callback#postgresql>`__ documentation for details.
- **LOG_S3_BUCKET**: path to the S3 bucket used for PostgreSQL daily log files (i.e. foobar, without `s3://` prefix). Spilo will add `/spilo/{LOG_BUCKET_SCOPE_PREFIX}{SCOPE}{LOG_BUCKET_SCOPE_SUFFIX}/log/` to that path. Logs are shipped if this variable is set.
- **LOG_S3_TAGS**: map of key value pairs to be used for tagging files uploaded to S3. Values should be referencing existing environment variables e.g. ``{"ClusterName": "SCOPE", "Namespace": "POD_NAMESPACE"}``
- **LOG_SHIP_SCHEDULE**: cron schedule for shipping compressed logs from ``pg_log`` (if this feature is enabled, '00 02 * * *' by default)
- **LOG_ENV_DIR**: directory to store environment variables necessary for log shipping
- **LOG_TMPDIR**: directory to store temporary compressed daily log files. PGROOT/../tmp by default.
Expand Down
2 changes: 1 addition & 1 deletion postgres-appliance/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ COPY --from=dependencies-builder /builddeps/wal-g /usr/local/bin/
COPY build_scripts/patroni_wale.sh build_scripts/compress_build.sh /builddeps/

# Install patroni and wal-e
ENV PATRONIVERSION=3.3.2
ENV PATRONIVERSION=3.3.3
ENV WALE_VERSION=1.1.1

WORKDIR /
Expand Down
25 changes: 5 additions & 20 deletions postgres-appliance/bootstrap/maybe_pg_upgrade.py
Original file line number Diff line number Diff line change
@@ -1,36 +1,21 @@
#!/usr/bin/env python
import datetime
import logging
import os
import subprocess
import sys
import glob

logger = logging.getLogger(__name__)


def tail_postgres_log(weekday):
def tail_postgres_logs():
logdir = os.environ.get('PGLOG', '/home/postgres/pgdata/pgroot/pg_log')
logfile = os.path.join(logdir, 'postgresql-{0}.csv'.format(weekday))
csv_files = glob.glob(os.path.join(logdir, '*.csv'))
# Find the last modified CSV file
logfile = max(csv_files, key=os.path.getmtime)
return subprocess.check_output(['tail', '-n5', logfile]).decode('utf-8')


def tail_postgres_logs():
weekday = datetime.datetime.today().isoweekday()
try:
ret = tail_postgres_log(weekday)
except Exception:
ret = ''
if not ret:
weekday += 6
if weekday > 7:
weekday %= 7
try:
ret = tail_postgres_log(weekday) # maybe log just switched? try yesterday
except Exception:
ret = ''
return ret


def wait_end_of_recovery(postgresql):
from patroni.utils import polling_loop

Expand Down
9 changes: 8 additions & 1 deletion postgres-appliance/scripts/configure_spilo.py
Original file line number Diff line number Diff line change
Expand Up @@ -583,6 +583,7 @@ def get_placeholders(provider):
placeholders.setdefault('LOG_SHIP_SCHEDULE', '1 0 * * *')
placeholders.setdefault('LOG_S3_BUCKET', '')
placeholders.setdefault('LOG_S3_ENDPOINT', '')
placeholders.setdefault('LOG_S3_TAGS', '{}')
placeholders.setdefault('LOG_TMPDIR', os.path.abspath(os.path.join(placeholders['PGROOT'], '../tmp')))
placeholders.setdefault('LOG_BUCKET_SCOPE_SUFFIX', '')

Expand Down Expand Up @@ -771,7 +772,13 @@ def write_log_environment(placeholders):
if not os.path.exists(log_env['LOG_ENV_DIR']):
os.makedirs(log_env['LOG_ENV_DIR'])

for var in ('LOG_TMPDIR', 'LOG_AWS_REGION', 'LOG_S3_ENDPOINT', 'LOG_S3_KEY', 'LOG_S3_BUCKET', 'PGLOG'):
for var in ('LOG_TMPDIR',
'LOG_AWS_REGION',
'LOG_S3_ENDPOINT',
'LOG_S3_KEY',
'LOG_S3_BUCKET',
'LOG_S3_TAGS',
'PGLOG'):
write_file(log_env[var], os.path.join(log_env['LOG_ENV_DIR'], var), True)


Expand Down
4 changes: 3 additions & 1 deletion postgres-appliance/scripts/upload_pg_log_to_s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,9 +53,11 @@ def upload_to_s3(local_file_path):

chunk_size = 52428800 # 50 MiB
config = TransferConfig(multipart_threshold=chunk_size, multipart_chunksize=chunk_size)
tags = eval(os.getenv('LOG_S3_TAGS'))
s3_tags_str = "&".join(f"{key}={os.getenv(value)}" for key, value in tags.items())

try:
bucket.upload_file(local_file_path, key_name, Config=config)
bucket.upload_file(local_file_path, key_name, Config=config, ExtraArgs={'Tagging': s3_tags_str})
except S3UploadFailedError as e:
logger.exception('Failed to upload the %s to the bucket %s under the key %s. Exception: %r',
local_file_path, bucket_name, key_name, e)
Expand Down

0 comments on commit 5fdbfa0

Please sign in to comment.