Skip to content
This repository was archived by the owner on Aug 19, 2021. It is now read-only.

Commit db3f72e

Browse files
authored
DA-1889 Auto create backend bucket (#60)
1 parent 488c260 commit db3f72e

File tree

6 files changed

+79
-45
lines changed

6 files changed

+79
-45
lines changed

docs/source/commands.rst

Lines changed: 0 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -122,21 +122,6 @@ the current operation. Acceptable values are: ``gcs`` or ``s3``.
122122
.. index::
123123
triple: worker; options; --backend
124124

125-
\\-\\-backend-bucket
126-
++++++++++++++++++++
127-
128-
The **\\-\\-backend-bucket** option specifies the name of the backend bucket that should
129-
be used to house the terraform state files.
130-
131-
.. warning::
132-
133-
**terraform-worker** does not create the backend bucket. Creation of this file is
134-
a prerequistie for running **terraform-worker** with with either a ``gcs`` or
135-
``s3`` backend.
136-
137-
.. index::
138-
triple: worker; options; --backend-bucket
139-
140125
.. _backend-prefix:
141126

142127
\\-\\-backend-prefix

docs/source/quick-start.rst

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -79,8 +79,7 @@ Next, run the following from a \*nix shell session.
7979
8080
.. note::
8181
Be sure to replace ``<YOUR_AWS_PROFILE>``, ``<YOUR_BACKEND_BUCKET>``, and ``<YOUR_AWS_REGION>`` with the
82-
the appropriate values. Currently **terraform-worker** does not create the bucket. It will need to be
83-
created separately.
82+
the appropriate values.
8483

8584
Once the operation is complete, the console should contain text similar to the following:
8685

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[tool.poetry]
22
name = "terraform-worker"
3-
version = "0.10.3"
3+
version = "0.10.4"
44
description = "An orchestration tool for Terraform"
55
authors = ["Richard Maynard <richard.maynard@objectrocket.com>"]
66
packages = [

tests/conftest.py

Lines changed: 16 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -248,7 +248,7 @@ def rootc_options(s3_client, dynamodb_client, sts_client):
248248

249249

250250
@pytest.fixture
251-
def basec(rootc):
251+
def basec(rootc, s3_client):
252252
with mock.patch(
253253
"tfworker.commands.base.BaseCommand.get_terraform_version",
254254
side_effect=lambda x: (13, 3),
@@ -272,9 +272,12 @@ def gbasec(grootc):
272272
"tfworker.commands.base.which",
273273
side_effect=lambda x: "/usr/local/bin/terraform",
274274
):
275-
return tfworker.commands.base.BaseCommand(
276-
grootc, "test-0001", tf_version_major=13
277-
)
275+
with mock.patch(
276+
"tfworker.backends.gcs.storage.Client.from_service_account_json"
277+
):
278+
return tfworker.commands.base.BaseCommand(
279+
grootc, "test-0001", tf_version_major=13
280+
)
278281

279282

280283
@pytest.fixture
@@ -352,12 +355,15 @@ def tf_13cmd_options(rootc_options):
352355
"tfworker.commands.base.which",
353356
side_effect=lambda x: "/usr/local/bin/terraform",
354357
):
355-
return tfworker.commands.terraform.TerraformCommand(
356-
rootc_options,
357-
deployment="test-0001-options",
358-
tf_version=(13, 5),
359-
b64_encode=False,
360-
)
358+
with mock.patch(
359+
"tfworker.backends.gcs.storage.Client.from_service_account_json"
360+
):
361+
return tfworker.commands.terraform.TerraformCommand(
362+
rootc_options,
363+
deployment="test-0001-options",
364+
tf_version=(13, 5),
365+
b64_encode=False,
366+
)
361367

362368

363369
@pytest.fixture

tfworker/backends/gcs.py

Lines changed: 15 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717
import click
1818
from google.api_core import page_iterator
1919
from google.cloud import storage
20+
from google.cloud.exceptions import Conflict
2021

2122
from .base import BackendError, BaseBackend, validate_backend_empty
2223

@@ -38,6 +39,20 @@ def __init__(self, authenticators, definitions, deployment=None):
3839
if not self._gcs_prefix.endswith("/"):
3940
self._gcs_prefix = f"{self._gcs_prefix}/"
4041

42+
if self._authenticator.creds_path:
43+
self._storage_client = storage.Client.from_service_account_json(
44+
self._authenticator.creds_path
45+
)
46+
else:
47+
self._storage_client = storage.Client(
48+
project=self._authenticator.project
49+
)
50+
51+
try:
52+
self._storage_client.create_bucket(self._gcs_bucket)
53+
except Conflict:
54+
pass
55+
4156
def _clean_deployment_limit(self, limit: tuple) -> None:
4257
""" only clean items within limit """
4358
full_state_list = self._get_state_list()
@@ -123,16 +138,6 @@ def clean(self, deployment: str, limit: tuple = None) -> None:
123138
if self._gcs_prefix is None or self._gcs_bucket is None:
124139
raise BackendError("clean attempted without proper authenticator setup")
125140

126-
if not hasattr(self, "_storage_client"):
127-
if self._authenticator.creds_path:
128-
self._storage_client = storage.Client.from_service_account_json(
129-
self._authenticator.creds_path
130-
)
131-
else:
132-
self._storage_client = storage.Client(
133-
project=self._authenticator.project
134-
)
135-
136141
# clean entire deployment
137142
if not limit:
138143
self._clean_prefix(self._gcs_prefix)

tfworker/backends/s3.py

Lines changed: 46 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
from contextlib import closing
1717

1818
import boto3
19+
import botocore
1920
import click
2021

2122
from .base import BackendError, BaseBackend, validate_backend_empty
@@ -55,6 +56,48 @@ def __init__(self, authenticators, definitions, deployment=None):
5556
)
5657
self._create_table(locking_table_name)
5758

59+
# Initialize s3 client and create bucket if necessary. Op should create if
60+
# not exists
61+
self._s3_client = self._authenticator.backend_session.client("s3")
62+
try:
63+
self._s3_client.create_bucket(
64+
Bucket=self._authenticator.bucket,
65+
CreateBucketConfiguration={
66+
"LocationConstraint": self._authenticator.region
67+
},
68+
ACL="private",
69+
)
70+
except botocore.exceptions.ClientError as err:
71+
err_str = str(err)
72+
if "InvalidLocationConstraint" in err_str:
73+
click.secho(
74+
"InvalidLocationConstraint raised when trying to create a bucket. "
75+
"Verify the AWS Region passed to the worker matches the AWS region "
76+
"in the profile.",
77+
fg="red",
78+
)
79+
elif (
80+
"BucketAlreadyExists" not in err_str
81+
and "BucketAlreadyOwnedByYou" not in err_str
82+
):
83+
raise err
84+
85+
# Block public access
86+
self._s3_client.put_public_access_block(
87+
Bucket=self._authenticator.bucket,
88+
PublicAccessBlockConfiguration={
89+
"BlockPublicAcls": True,
90+
"IgnorePublicAcls": True,
91+
"BlockPublicPolicy": True,
92+
"RestrictPublicBuckets": True,
93+
},
94+
)
95+
96+
# Enable versioning on the bucket
97+
s3_resource = self._authenticator.backend_session.resource("s3")
98+
versioning = s3_resource.BucketVersioning(self._authenticator.bucket)
99+
versioning.enable()
100+
58101
def _check_table_exists(self, name: str) -> bool:
59102
""" check if a supplied dynamodb table exists """
60103
if name in self._ddb_client.list_tables()["TableNames"]:
@@ -69,10 +112,7 @@ def _clean_bucket_state(self, definition=None):
69112
optionally definition can be passed to limit the cleanup
70113
to a single definition
71114
"""
72-
s3_paginator = self._authenticator.backend_session.client("s3").get_paginator(
73-
"list_objects_v2"
74-
)
75-
s3_client = self._authenticator.backend_session.client("s3")
115+
s3_paginator = self._s3_client.get_paginator("list_objects_v2")
76116

77117
if definition is None:
78118
prefix = self._authenticator.prefix
@@ -82,7 +122,7 @@ def _clean_bucket_state(self, definition=None):
82122
for s3_object in self.filter_keys(
83123
s3_paginator, self._authenticator.bucket, prefix
84124
):
85-
backend_file = s3_client.get_object(
125+
backend_file = self._s3_client.get_object(
86126
Bucket=self._authenticator.bucket, Key=s3_object
87127
)
88128
body = backend_file["Body"]
@@ -149,8 +189,7 @@ def _delete_with_versions(self, key):
149189
note: in initial testing this isn't required, but is inconsistent with how S3 delete markers, and the boto
150190
delete object call work there may be some configurations that require extra handling.
151191
"""
152-
s3_client = self._authenticator.backend_session.client("s3")
153-
s3_client.delete_object(Bucket=self._authenticator.bucket, Key=key)
192+
self._s3_client.delete_object(Bucket=self._authenticator.bucket, Key=key)
154193

155194
def clean(self, deployment: str, limit: tuple = None) -> None:
156195
"""

0 commit comments

Comments
 (0)