Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions airflow/providers/docker/credentials/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
78 changes: 78 additions & 0 deletions airflow/providers/docker/credentials/base.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

from dataclasses import dataclass
from typing import TYPE_CHECKING, Optional, Sequence

from airflow.utils.log.logging_mixin import LoggingMixin
from airflow.utils.log.secrets_masker import mask_secret

if TYPE_CHECKING:
from airflow.models import Connection


@dataclass
class DockerLoginCredentials:
"""Class for keeping authentication information for Docker Registry."""

username: str
password: str
registry: str
email: Optional[str] = None
reauth: bool = False

def __post_init__(self):
if self.password:
# Mask password if it specified.
mask_secret(self.password)


class BaseDockerCredentialHelper(LoggingMixin):
"""Base class for authentication in Docker Registry.

:param conn: Reference to Docker hook connection object.
"""

def __init__(self, *, conn: "Connection", **kwargs):
super().__init__()
self.conn = conn
self.conn_extra = conn.extra_dejson

@property
def reauth(self) -> bool:
"""The reauth property from connection."""
val = self.conn_extra.get("reauth", True)
if isinstance(val, bool):
return val
val = str(val).lower()
if val in ('y', 'yes', 't', 'true', 'on', '1'):
return True
if val in ('n', 'no', 'f', 'false', 'off', '0'):
return False
raise ValueError(f"{val!r} is not a boolean-like string value.")

@property
def email(self) -> Optional[str]:
"""The email for the registry account from connection."""
return self.conn_extra.get("email")

def get_credentials(self) -> Optional[Sequence[DockerLoginCredentials]]:
"""
Method uses for return credentials to Docker Registry.
It might or might not use credentials from Connection.
"""
raise NotImplementedError()
48 changes: 48 additions & 0 deletions airflow/providers/docker/credentials/connection.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

from typing import Optional, Sequence

from airflow.exceptions import AirflowException
from airflow.providers.docker.credentials.base import BaseDockerCredentialHelper, DockerLoginCredentials


class AirflowConnectionDockerCredentialHelper(BaseDockerCredentialHelper):
"""Default helper for authentication in Docker Registry.
Use Airflow Connection information.
"""

def get_credentials(self) -> Optional[Sequence[DockerLoginCredentials]]:
if not self.conn.host:
raise AirflowException('No Docker URL provided.')
if not self.conn.login:
raise AirflowException('No username provided.')

if self.conn.port:
registry = f"{self.conn.host}:{self.conn.port}"
else:
registry = self.conn.host

return [
DockerLoginCredentials(
username=self.conn.login,
password=self.conn.password,
registry=registry,
email=self.email,
reauth=self.reauth,
)
]
89 changes: 89 additions & 0 deletions airflow/providers/docker/credentials/ecr.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

import base64
from typing import List, Optional, Sequence, Union

from airflow.exceptions import AirflowException
from airflow.providers.docker.credentials.base import BaseDockerCredentialHelper, DockerLoginCredentials


class EcrDockerCredentialHelper(BaseDockerCredentialHelper):
"""
Authenticate into Amazon ECR (Elastic Container Registry).

This helper ignore all connection information such as password and registry host otherwise use
credentials retrieved from AWS API.

.. seealso::
- `ECR Registry Auth <https://docs.aws.amazon.com/AmazonECR/latest/userguide/registry_auth.html>`_
- :ref:`howto/connection:aws:configuring-the-connection`

:param aws_conn_id: The Airflow connection used for AWS credentials.
If this is ``None`` or empty then the default boto3 behaviour is used.
:param region_name: AWS Region Name.
:param registry_ids: A list of Amazon Web Services account IDs that are associated with the registries
for which to get AuthorizationData objects.
If you do not specify a registry, the default registry is assumed.
"""

def __init__(
self,
*,
aws_conn_id: Optional[str] = "aws_default",
region_name: Optional[str] = None,
registry_ids: Optional[Union[str, List[str]]] = None,
**kwargs,
):
super().__init__(**kwargs)
self.aws_conn_id = aws_conn_id
self.region_name = region_name
if isinstance(registry_ids, str):
registry_ids = [registry_ids]
self.registry_ids = registry_ids

def get_credentials(self) -> Optional[Sequence[DockerLoginCredentials]]:
try:
from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
except ImportError:
raise AirflowException(
"apache-airflow-providers-amazon not installed, run: "
"pip install 'apache-airflow-providers-docker[amazon]'."
)

aws_hook = AwsBaseHook(aws_conn_id=self.aws_conn_id, region_name=self.region_name, client_type="ecr")
if self.registry_ids:
response = aws_hook.conn.get_authorization_token(registryIds=self.registry_ids)
else:
response = aws_hook.conn.get_authorization_token()

creds = []
for auth_data in response["authorizationData"]:
username, password = base64.b64decode(auth_data["authorizationToken"]).decode("utf-8").split(":")
registry: str = auth_data['proxyEndpoint']
creds.append(
DockerLoginCredentials(
username=username,
password=password,
# https://github.com/docker/docker-py/issues/2256#issuecomment-824940506
registry=registry.replace("https://", ""),
reauth=True,
)
)
self.log.info("Credentials to Amazon ECR %r expires at %s.", registry, auth_data['expiresAt'])

return creds
96 changes: 58 additions & 38 deletions airflow/providers/docker/hooks/docker.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,18 +15,23 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

import json
from typing import Any, Dict, Optional

from docker import APIClient # type: ignore[attr-defined]
from docker.constants import DEFAULT_TIMEOUT_SECONDS # type: ignore[attr-defined]
from docker.errors import APIError # type: ignore[attr-defined]
from docker import APIClient
from docker.constants import DEFAULT_TIMEOUT_SECONDS
from docker.errors import APIError

from airflow.compat.functools import cached_property
from airflow.exceptions import AirflowException
from airflow.hooks.base import BaseHook
from airflow.utils.log.logging_mixin import LoggingMixin
from airflow.providers.docker.credentials.base import BaseDockerCredentialHelper
from airflow.providers.docker.credentials.connection import AirflowConnectionDockerCredentialHelper
from airflow.utils.module_loading import import_string


class DockerHook(BaseHook, LoggingMixin):
class DockerHook(BaseHook):
"""
Interact with a Docker Daemon or Registry.

Expand All @@ -48,6 +53,16 @@ def get_ui_field_behaviour() -> Dict[str, Any]:
'host': 'Registry URL',
'login': 'Username',
},
"placeholders": {
'extra': json.dumps(
{
'reauth': False,
'email': 'Jane.Doe@example.org',
'credential_helper': 'dotted.path.to.credential.Helper',
'credential_helper_kwargs': {"foo": "bar"},
}
)
},
}

def __init__(
Expand All @@ -66,46 +81,51 @@ def __init__(

if not docker_conn_id:
raise AirflowException('No Docker connection id provided')

conn = self.get_connection(docker_conn_id)

if not conn.host:
raise AirflowException('No Docker URL provided')
if not conn.login:
raise AirflowException('No username provided')
extra_options = conn.extra_dejson

self.docker_conn_id = docker_conn_id
self.__base_url = base_url
self.__version = version
self.__tls = tls
self.__timeout = timeout
if conn.port:
self.__registry = f"{conn.host}:{conn.port}"
else:
self.__registry = conn.host
self.__username = conn.login
self.__password = conn.password
self.__email = extra_options.get('email')
self.__reauth = extra_options.get('reauth') != 'no'

def get_conn(self) -> APIClient:
@cached_property
def api_client(self) -> APIClient:
"""Create connection to docker host and login to the docker registries. (cached)"""
conn = self.get_connection(self.docker_conn_id)
client = APIClient(
base_url=self.__base_url, version=self.__version, tls=self.__tls, timeout=self.__timeout
)
self.__login(client)

credential_helper = conn.extra_dejson.get("credential_helper")
if not credential_helper:
# If not specified credential helper than retrieve information from Connection.
credential_helper = AirflowConnectionDockerCredentialHelper
credential_helper_kwargs = {}
else:
credential_helper = import_string(credential_helper)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It is not secure. We should not load user-defined classes as this makes our application vulnerable to CWE-502: Deserialization of Untrusted Data weakness.

We should change the logic so that it is not needed as @poituk suggest, or add a list of allowed classes as is done during DAG deserialization. See:

if _operator_link_class_path in get_operator_extra_links():
single_op_link_class = import_string(_operator_link_class_path)

Copy link
Member

@mik-laj mik-laj Sep 5, 2022

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

As far as I know also Secrets Backends, REST API auth_backends and Amazon Session Factory also load by import_string method without any validation.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Also personally I add import retry handlers to SlackHook by #25852
If it security issue it could be removed. No slack provider released since this feature added.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Secrets Backends, REST API auth_backends

In this case, the values are not controlled by the user (e.g. via the Web UI), but by the administrator. You must be able to change the files on the disk to change the secret backend or auth backend.

The problem occurs when the value of the import_string parameter is controlled remotely, e.g. it is read from the database.

In the case of Slack, we should fix it so that only allowed/safe values can be imported.

Copy link
Member

@mik-laj mik-laj Sep 6, 2022

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

There is no need for the class to be specially downloaded, as the class/function may already exist in any packages installed in the system, but we can misuse it.
I will have a hard time giving an example for Python now, but here you can see what an attack looks like in Java.
https://wololo.net/2022/06/11/ps5-ps4-hacker-theflow-discloses-blu-ray-disc-exploit-toolchain-ps5-piracy-not-a-matter-of-if-but-when/

The class com.sony.gemstack.org.dvb.user.UserPreferenceManagerImpl deserializes the userprefs file under privileged context using readObject() which is insecure
The class com.oracle.security.Service contains a method newInstance which calls Class.forName on an arbitrary class name. This allows arbitrary classes, even restricted ones (for example in sun.), to be instantiated.
The class com.sony.gemstack.org.dvb.io.ixc.IxcProxy contains the protected method invokeMethod which can call methods under privileged context. Permission checks in methods can be bypassed

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

In this case, we would have to look for a class or function or method that takes a conn argument or ignores unknown key arguments in order to be able to exploit that class, but it can be any class in any package, so we have quite a lot of potential attack vector.

Copy link
Contributor Author

@Taragolis Taragolis Sep 6, 2022

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I mean most possible attack it is the fact that both import_string or import load module first.

Some sample

# airflow.providers.exploit.hooks.some_db_api

def unsafe_code():
    """Grab fernet key from configs, nudes, decode, send to someone and post on Twitter."""
    ...

unsafe_code()

class SomeDbApiHook(DbApiHook):
    conn_type = "awesome_conn_type"
    ...

So it would be the same if user call

  1. from airflow.providers.exploit.hooks.some_db_api import SomeDbApiHook
  2. import_string("airflow.providers.exploit.hooks.some_db_api.SomeDbApiHook")
  3. Or use airflow.providers.slack.transfers.sql_to_slack.SqlToSlackOperator in Airflow < 2.3 with connection type referenced to awesome_conn_type

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

DAG File is considered trusted content as it is recommended that it passes through code review. Changing the value in the database does not require code review, so there is a risk.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

conn_id also part of the DAG, so it should pass the review as well as all components which DAG expected to use.

Let's imagine that someone change connection after DAG deployed (some one with Admin or Op role) that could be mainly by two reason

  1. Someone with access to change connections compromised their account and "bad guy" also have access to Airflow which mostly intend to live in private subnets with access by proxy, bastion/jump hosts and VPNs.
  2. Someone with Admin or Op is "bad guy/imposer"

If some kind of exploit already in environment there is no problem to use Option 2 or might be Option 3 - it also uses import_string for DB Api Hooks.

But also it a lot of Operator expected to run template especially BashOperator might not be safe if templates stored in Airflow Variables. 'bash_command' + 'env' = run whatever you want.

One side effect of all above - required to much steps and a lot of things have to happen: docker installed, bash operators uses with Variables, credentials of airflow leaked, Airflow webserver exposed to the Internet.

Don't get me wrong I've also think that too much security Is Never Enough.

But I believe that more chance that user expose somehow cloud credentials which use by Airflow (a lot of chances with this creds has access to almost everything): e.g. some cred info stored in extra field in Connection.

Or also almost all providers which grant ability to send messages e.g.: Slack, Discord, Telegram and MS Teams (just a PR) has ability to get token/credentials directly in operator code (without connections) and do not mask it as secrets (actually it doesn't required because user will store directly in DAG code). A lot of chance that it expose in logs especially if it uses HttpHook. Some "bad guy" could easily use this creds for send messages in corporate messenger - e.g. nice link to change domain user/password.

credential_helper_kwargs = conn.extra_dejson.get("credential_helper_kwargs", {})

if not issubclass(credential_helper, BaseDockerCredentialHelper):
raise TypeError(
f"Your credential_helper `{credential_helper.__name__}` is not a subclass "
f"of `{BaseDockerCredentialHelper.__name__}`."
)

for creds in credential_helper(conn=conn, **credential_helper_kwargs).get_credentials() or []:
try:
self.log.info('Login into Docker Registry: %s', creds.registry)
client.login(
username=creds.username,
password=creds.password,
registry=creds.registry,
email=creds.email,
reauth=creds.reauth,
)
self.log.debug('Login successful')
except APIError as docker_error:
self.log.error('Docker login failed: %s', str(docker_error))
raise AirflowException(f'Docker login failed: {docker_error}')
return client

def __login(self, client) -> None:
self.log.debug('Logging into Docker')
try:
client.login(
username=self.__username,
password=self.__password,
registry=self.__registry,
email=self.__email,
reauth=self.__reauth,
)
self.log.debug('Login successful')
except APIError as docker_error:
self.log.error('Docker login failed: %s', str(docker_error))
raise AirflowException(f'Docker login failed: {docker_error}')
def get_conn(self) -> APIClient:
"""Create connection to docker host and login to the docker registries. (cached)"""
return self.api_client
1 change: 1 addition & 0 deletions docs/spelling_wordlist.txt
Original file line number Diff line number Diff line change
Expand Up @@ -1147,6 +1147,7 @@ ReadOnlyCredentials
readthedocs
Realtime
realtime
reauth
rebase
Rebasing
Rebrand
Expand Down
4 changes: 3 additions & 1 deletion generated/provider_dependencies.json
Original file line number Diff line number Diff line change
Expand Up @@ -253,7 +253,9 @@
"apache-airflow>=2.2.0",
"docker>=5.0.3"
],
"cross-providers-deps": []
"cross-providers-deps": [
"amazon"
]
},
"elasticsearch": {
"deps": [
Expand Down
Loading