Skip to content

Commit 5103747

Browse files
committed
Update script to generate planet dump and pg client
1 parent 156a40e commit 5103747

File tree

3 files changed

+71
-14
lines changed

3 files changed

+71
-14
lines changed

images/full-history/start.sh

Lines changed: 31 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -32,30 +32,55 @@ fi
3232
download_dump_file() {
3333
echo "Downloading db .dump file from cloud..."
3434

35+
local temp_dump_file="$dumpFile.tmp"
36+
local actual_dump_url=""
37+
3538
if [ "$CLOUDPROVIDER" == "aws" ]; then
3639
if [[ "$DUMP_CLOUD_URL" == *.txt ]]; then
3740
temp_txt="$VOLUME_DIR/tmp_dump_url.txt"
3841
aws s3 cp "$DUMP_CLOUD_URL" "$temp_txt"
3942

4043
# Get the first line (S3 URL to the .dump file)
41-
first_line=$(head -n 1 "$temp_txt")
42-
echo "Found dump URL in txt: $first_line"
44+
actual_dump_url=$(head -n 1 "$temp_txt")
45+
echo "Found dump URL in txt: $actual_dump_url"
4346

44-
aws s3 cp "$first_line" "$dumpFile"
47+
aws s3 cp "$actual_dump_url" "$temp_dump_file"
4548
rm -f "$temp_txt"
4649

4750
else
48-
aws s3 cp "$DUMP_CLOUD_URL" "$dumpFile"
51+
actual_dump_url="$DUMP_CLOUD_URL"
52+
aws s3 cp "$DUMP_CLOUD_URL" "$temp_dump_file"
4953
fi
5054

5155
elif [ "$CLOUDPROVIDER" == "gcp" ]; then
52-
gsutil cp "$DUMP_CLOUD_URL" "$dumpFile"
56+
actual_dump_url="$DUMP_CLOUD_URL"
57+
gsutil cp "$DUMP_CLOUD_URL" "$temp_dump_file"
5358
else
5459
echo "Unsupported CLOUDPROVIDER: $CLOUDPROVIDER"
5560
exit 1
5661
fi
5762

58-
echo "Dump file ready at: $dumpFile"
63+
# Check if downloaded file is gzip compressed and decompress if needed
64+
# Check by file extension, file type, or magic bytes
65+
local is_gzip=false
66+
if [[ "$actual_dump_url" == *.gz ]] || [[ "$temp_dump_file" == *.gz ]]; then
67+
is_gzip=true
68+
elif command -v file >/dev/null 2>&1 && file "$temp_dump_file" 2>/dev/null | grep -q "gzip compressed"; then
69+
is_gzip=true
70+
elif head -c 2 "$temp_dump_file" 2>/dev/null | od -An -tx1 | grep -q "1f 8b"; then
71+
# Check for gzip magic bytes (1f 8b)
72+
is_gzip=true
73+
fi
74+
75+
if [ "$is_gzip" = true ]; then
76+
echo "Detected gzip compressed dump file, decompressing..."
77+
gunzip -c "$temp_dump_file" > "$dumpFile"
78+
rm -f "$temp_dump_file"
79+
else
80+
mv "$temp_dump_file" "$dumpFile"
81+
fi
82+
83+
echo "Dump file ready at: $dumpFile (PostgreSQL 17 compatible)"
5984
}
6085

6186
# ===============================

images/osm-processor/Dockerfile

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -32,20 +32,27 @@ ENV workdir /mnt/data
3232
WORKDIR $workdir
3333

3434
RUN set -ex \
35+
&& apt-get update \
36+
&& DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
37+
ca-certificates \
38+
curl \
39+
gnupg \
40+
lsb-release \
41+
&& echo "deb http://apt.postgresql.org/pub/repos/apt/ bookworm-pgdg main" > /etc/apt/sources.list.d/pgdg.list \
42+
&& curl -L https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - \
3543
&& apt-get update \
3644
&& DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
3745
osmosis \
3846
osmium-tool \
3947
awscli \
4048
gsutil \
4149
azure-cli \
42-
postgresql-client \
50+
postgresql-client-17 \
4351
rsync \
4452
pyosmium \
4553
tmux \
4654
zsh \
4755
git \
48-
curl \
4956
libxml2 \
5057
libboost-filesystem1.74.0 \
5158
libboost-program-options1.74.0 \

images/planet-dump/start.sh

Lines changed: 31 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -30,30 +30,55 @@ fi
3030
download_dump_file() {
3131
echo "Downloading db .dump file from cloud..."
3232

33+
local temp_dump_file="$dumpFile.tmp"
34+
local actual_dump_url=""
35+
3336
if [ "$CLOUDPROVIDER" == "aws" ]; then
3437
if [[ "$DUMP_CLOUD_URL" == *.txt ]]; then
3538
temp_txt="$VOLUME_DIR/tmp_dump_url.txt"
3639
aws s3 cp "$DUMP_CLOUD_URL" "$temp_txt"
3740

3841
# Get the first line (S3 URL to the .dump file)
39-
first_line=$(head -n 1 "$temp_txt")
40-
echo "Found dump URL in txt: $first_line"
42+
actual_dump_url=$(head -n 1 "$temp_txt")
43+
echo "Found dump URL in txt: $actual_dump_url"
4144

42-
aws s3 cp "$first_line" "$dumpFile"
45+
aws s3 cp "$actual_dump_url" "$temp_dump_file"
4346
rm -f "$temp_txt"
4447

4548
else
46-
aws s3 cp "$DUMP_CLOUD_URL" "$dumpFile"
49+
actual_dump_url="$DUMP_CLOUD_URL"
50+
aws s3 cp "$DUMP_CLOUD_URL" "$temp_dump_file"
4751
fi
4852

4953
elif [ "$CLOUDPROVIDER" == "gcp" ]; then
50-
gsutil cp "$DUMP_CLOUD_URL" "$dumpFile"
54+
actual_dump_url="$DUMP_CLOUD_URL"
55+
gsutil cp "$DUMP_CLOUD_URL" "$temp_dump_file"
5156
else
5257
echo "Unsupported CLOUDPROVIDER: $CLOUDPROVIDER"
5358
exit 1
5459
fi
5560

56-
echo "Dump file ready at: $dumpFile"
61+
# Check if downloaded file is gzip compressed and decompress if needed
62+
# Check by file extension, file type, or magic bytes
63+
local is_gzip=false
64+
if [[ "$actual_dump_url" == *.gz ]] || [[ "$temp_dump_file" == *.gz ]]; then
65+
is_gzip=true
66+
elif command -v file >/dev/null 2>&1 && file "$temp_dump_file" 2>/dev/null | grep -q "gzip compressed"; then
67+
is_gzip=true
68+
elif head -c 2 "$temp_dump_file" 2>/dev/null | od -An -tx1 | grep -q "1f 8b"; then
69+
# Check for gzip magic bytes (1f 8b)
70+
is_gzip=true
71+
fi
72+
73+
if [ "$is_gzip" = true ]; then
74+
echo "Detected gzip compressed dump file, decompressing..."
75+
gunzip -c "$temp_dump_file" > "$dumpFile"
76+
rm -f "$temp_dump_file"
77+
else
78+
mv "$temp_dump_file" "$dumpFile"
79+
fi
80+
81+
echo "Dump file ready at: $dumpFile (PostgreSQL 17 compatible)"
5782
}
5883

5984

0 commit comments

Comments
 (0)