Skip to content

Commit

Permalink
Support all Trino Security Access Control modes in Delta lake Connector
Browse files Browse the repository at this point in the history
  • Loading branch information
mdesmet authored and kokosing committed Sep 6, 2022
1 parent 4852d25 commit 95849b0
Show file tree
Hide file tree
Showing 10 changed files with 237 additions and 121 deletions.
31 changes: 31 additions & 0 deletions docs/src/main/sphinx/connector/delta-lake.rst
Original file line number Diff line number Diff line change
Expand Up @@ -237,6 +237,37 @@ configure processing of Parquet files.
* - ``parquet_writer_page_size``
- The maximum page size created by the Parquet writer.

.. _delta-lake-authorization:

Authorization checks
^^^^^^^^^^^^^^^^^^^^

You can enable authorization checks for the connector by setting
the ``delta.security`` property in the catalog properties file. This
property must be one of the following values:

.. list-table:: Delta Lake security values
:widths: 30, 60
:header-rows: 1

* - Property value
- Description
* - ``ALLOW_ALL`` (default value)
- No authorization checks are enforced.
* - ``SYSTEM``
- The connector relies on system-level access control.
* - ``READ_ONLY``
- Operations that read data or metadata, such as :doc:`/sql/select` are
permitted. No operations that write data or metadata, such as
:doc:`/sql/create-table`, :doc:`/sql/insert`, or :doc:`/sql/delete` are
allowed.
* - ``FILE``
- Authorization checks are enforced using a catalog-level access control
configuration file whose path is specified in the ``security.config-file``
catalog configuration property. See
:ref:`catalog-file-based-access-control` for information on the
authorization configuration file.

.. _delta-lake-type-mapping:

Type mapping
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -56,11 +56,9 @@
import io.trino.plugin.hive.TableAlreadyExistsException;
import io.trino.plugin.hive.metastore.Column;
import io.trino.plugin.hive.metastore.Database;
import io.trino.plugin.hive.metastore.HivePrincipal;
import io.trino.plugin.hive.metastore.PrincipalPrivileges;
import io.trino.plugin.hive.metastore.StorageFormat;
import io.trino.plugin.hive.metastore.Table;
import io.trino.plugin.hive.security.AccessControlMetadata;
import io.trino.spi.NodeManager;
import io.trino.spi.TrinoException;
import io.trino.spi.block.Block;
Expand Down Expand Up @@ -98,9 +96,6 @@
import io.trino.spi.expression.Variable;
import io.trino.spi.predicate.Domain;
import io.trino.spi.predicate.TupleDomain;
import io.trino.spi.security.GrantInfo;
import io.trino.spi.security.Privilege;
import io.trino.spi.security.RoleGrant;
import io.trino.spi.security.TrinoPrincipal;
import io.trino.spi.statistics.ColumnStatisticMetadata;
import io.trino.spi.statistics.ColumnStatisticType;
Expand Down Expand Up @@ -273,7 +268,6 @@ public class DeltaLakeMetadata
private final TrinoFileSystemFactory fileSystemFactory;
private final HdfsEnvironment hdfsEnvironment;
private final TypeManager typeManager;
private final AccessControlMetadata accessControlMetadata;
private final CheckpointWriterManager checkpointWriterManager;
private final long defaultCheckpointInterval;
private final boolean ignoreCheckpointWriteFailures;
Expand All @@ -297,7 +291,6 @@ public DeltaLakeMetadata(
TrinoFileSystemFactory fileSystemFactory,
HdfsEnvironment hdfsEnvironment,
TypeManager typeManager,
AccessControlMetadata accessControlMetadata,
int domainCompactionThreshold,
boolean unsafeWritesEnabled,
JsonCodec<DataFileInfo> dataFileInfoCodec,
Expand All @@ -318,7 +311,6 @@ public DeltaLakeMetadata(
this.fileSystemFactory = requireNonNull(fileSystemFactory, "fileSystemFactory is null");
this.hdfsEnvironment = requireNonNull(hdfsEnvironment, "hdfsEnvironment is null");
this.typeManager = requireNonNull(typeManager, "typeManager is null");
this.accessControlMetadata = requireNonNull(accessControlMetadata, "accessControlMetadata is null");
this.domainCompactionThreshold = domainCompactionThreshold;
this.unsafeWritesEnabled = unsafeWritesEnabled;
this.dataFileInfoCodec = requireNonNull(dataFileInfoCodec, "dataFileInfoCodec is null");
Expand Down Expand Up @@ -2008,83 +2000,6 @@ public Map<String, Object> getSchemaProperties(ConnectorSession session, Catalog
return db.map(DeltaLakeSchemaProperties::fromDatabase).orElseThrow(() -> new SchemaNotFoundException(schema));
}

@Override
public void createRole(ConnectorSession session, String role, Optional<TrinoPrincipal> grantor)
{
accessControlMetadata.createRole(session, role, grantor.map(HivePrincipal::from));
}

@Override
public void dropRole(ConnectorSession session, String role)
{
accessControlMetadata.dropRole(session, role);
}

@Override
public Set<String> listRoles(ConnectorSession session)
{
return accessControlMetadata.listRoles(session);
}

@Override
public Set<RoleGrant> listRoleGrants(ConnectorSession session, TrinoPrincipal principal)
{
return ImmutableSet.copyOf(accessControlMetadata.listRoleGrants(session, HivePrincipal.from(principal)));
}

@Override
public void grantRoles(ConnectorSession session, Set<String> roles, Set<TrinoPrincipal> grantees, boolean withAdminOption, Optional<TrinoPrincipal> grantor)
{
accessControlMetadata.grantRoles(session, roles, HivePrincipal.from(grantees), withAdminOption, grantor.map(HivePrincipal::from));
}

@Override
public void revokeRoles(ConnectorSession session, Set<String> roles, Set<TrinoPrincipal> grantees, boolean adminOptionFor, Optional<TrinoPrincipal> grantor)
{
accessControlMetadata.revokeRoles(session, roles, HivePrincipal.from(grantees), adminOptionFor, grantor.map(HivePrincipal::from));
}

@Override
public Set<RoleGrant> listApplicableRoles(ConnectorSession session, TrinoPrincipal principal)
{
return accessControlMetadata.listApplicableRoles(session, HivePrincipal.from(principal));
}

@Override
public Set<String> listEnabledRoles(ConnectorSession session)
{
return accessControlMetadata.listEnabledRoles(session);
}

@Override
public void grantTablePrivileges(ConnectorSession session, SchemaTableName schemaTableName, Set<Privilege> privileges, TrinoPrincipal grantee, boolean grantOption)
{
accessControlMetadata.grantTablePrivileges(session, schemaTableName, privileges, HivePrincipal.from(grantee), grantOption);
}

@Override
public void revokeTablePrivileges(ConnectorSession session, SchemaTableName schemaTableName, Set<Privilege> privileges, TrinoPrincipal grantee, boolean grantOption)
{
accessControlMetadata.revokeTablePrivileges(session, schemaTableName, privileges, HivePrincipal.from(grantee), grantOption);
}

@Override
public List<GrantInfo> listTablePrivileges(ConnectorSession session, SchemaTablePrefix schemaTablePrefix)
{
return accessControlMetadata.listTablePrivileges(session, listTables(session, schemaTablePrefix));
}

private List<SchemaTableName> listTables(ConnectorSession session, SchemaTablePrefix prefix)
{
if (prefix.getTable().isEmpty()) {
return listTables(session, prefix.getSchema());
}
SchemaTableName tableName = prefix.toSchemaTableName();
return metastore.getTable(tableName.getSchemaName(), tableName.getTableName())
.map(table -> ImmutableList.of(tableName))
.orElse(ImmutableList.of());
}

private void setRollback(Runnable action)
{
checkState(rollbackAction.compareAndSet(null, action), "rollback action is already set");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,6 @@ public class DeltaLakeMetadataFactory
private final HdfsEnvironment hdfsEnvironment;
private final TransactionLogAccess transactionLogAccess;
private final TypeManager typeManager;
private final DeltaLakeAccessControlMetadataFactory accessControlMetadataFactory;
private final JsonCodec<DataFileInfo> dataFileInfoCodec;
private final JsonCodec<DeltaLakeUpdateResult> updateResultJsonCodec;
private final JsonCodec<DeltaLakeMergeResult> mergeResultJsonCodec;
Expand All @@ -68,7 +67,6 @@ public DeltaLakeMetadataFactory(
HdfsEnvironment hdfsEnvironment,
TransactionLogAccess transactionLogAccess,
TypeManager typeManager,
DeltaLakeAccessControlMetadataFactory accessControlMetadataFactory,
DeltaLakeConfig deltaLakeConfig,
JsonCodec<DataFileInfo> dataFileInfoCodec,
JsonCodec<DeltaLakeUpdateResult> updateResultJsonCodec,
Expand All @@ -85,7 +83,6 @@ public DeltaLakeMetadataFactory(
this.hdfsEnvironment = requireNonNull(hdfsEnvironment, "hdfsEnvironment is null");
this.transactionLogAccess = requireNonNull(transactionLogAccess, "transactionLogAccess is null");
this.typeManager = requireNonNull(typeManager, "typeManager is null");
this.accessControlMetadataFactory = requireNonNull(accessControlMetadataFactory, "accessControlMetadataFactory is null");
this.dataFileInfoCodec = requireNonNull(dataFileInfoCodec, "dataFileInfoCodec is null");
this.updateResultJsonCodec = requireNonNull(updateResultJsonCodec, "updateResultJsonCodec is null");
this.mergeResultJsonCodec = requireNonNull(mergeResultJsonCodec, "mergeResultJsonCodec is null");
Expand Down Expand Up @@ -123,7 +120,6 @@ public DeltaLakeMetadata create(ConnectorIdentity identity)
fileSystemFactory,
hdfsEnvironment,
typeManager,
accessControlMetadataFactory.create(cachingHiveMetastore),
domainCompactionThreshold,
unsafeWritesEnabled,
dataFileInfoCodec,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,8 +96,7 @@ public void setup(Binder binder)
configBinder(binder).bindConfigDefaults(ParquetWriterConfig.class, config -> config.setParquetOptimizedWriterEnabled(true));

install(new ConnectorAccessControlModule());
newOptionalBinder(binder, DeltaLakeAccessControlMetadataFactory.class)
.setDefault().toInstance(DeltaLakeAccessControlMetadataFactory.SYSTEM);
configBinder(binder).bindConfig(DeltaLakeSecurityConfig.class);

Multibinder<SystemTableProvider> systemTableProviders = newSetBinder(binder, SystemTableProvider.class);
systemTableProviders.addBinding().to(PropertiesSystemTableProvider.class).in(Scopes.SINGLETON);
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.plugin.deltalake;

import io.airlift.configuration.Config;
import io.airlift.configuration.ConfigDescription;

import javax.validation.constraints.NotNull;

import static io.trino.plugin.deltalake.DeltaLakeSecurityConfig.DeltaLakeSecurity.ALLOW_ALL;

public class DeltaLakeSecurityConfig
{
public enum DeltaLakeSecurity
{
ALLOW_ALL,
READ_ONLY,
SYSTEM,
FILE,
}

private DeltaLakeSecurity securitySystem = ALLOW_ALL;

@NotNull
public DeltaLakeSecurity getSecuritySystem()
{
return securitySystem;
}

@Config("delta.security")
@ConfigDescription("Authorization checks for Delta Lake connector")
public DeltaLakeSecurityConfig setSecuritySystem(DeltaLakeSecurity securitySystem)
{
this.securitySystem = securitySystem;
return this;
}}
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.plugin.deltalake;

import com.google.inject.Binder;
import com.google.inject.Module;
import io.airlift.configuration.AbstractConfigurationAwareModule;
import io.trino.plugin.base.security.ConnectorAccessControlModule;
import io.trino.plugin.base.security.FileBasedAccessControlModule;
import io.trino.plugin.base.security.ReadOnlySecurityModule;
import io.trino.plugin.deltalake.DeltaLakeSecurityConfig.DeltaLakeSecurity;
import io.trino.plugin.hive.security.AllowAllSecurityModule;

import static io.airlift.configuration.ConditionalModule.conditionalModule;
import static io.trino.plugin.deltalake.DeltaLakeSecurityConfig.DeltaLakeSecurity.ALLOW_ALL;
import static io.trino.plugin.deltalake.DeltaLakeSecurityConfig.DeltaLakeSecurity.FILE;
import static io.trino.plugin.deltalake.DeltaLakeSecurityConfig.DeltaLakeSecurity.READ_ONLY;

public class DeltaLakeSecurityModule
extends AbstractConfigurationAwareModule
{
@Override
protected void setup(Binder binder)
{
install(new ConnectorAccessControlModule());
bindSecurityModule(ALLOW_ALL, new AllowAllSecurityModule());
bindSecurityModule(READ_ONLY, new ReadOnlySecurityModule());
bindSecurityModule(FILE, new FileBasedAccessControlModule());
// SYSTEM: do not bind an ConnectorAccessControl so the engine will use system security with system roles
}

private void bindSecurityModule(DeltaLakeSecurity deltaLakeSecurity, Module module)
{
install(conditionalModule(
DeltaLakeSecurityConfig.class,
security -> deltaLakeSecurity == security.getSecuritySystem(),
module));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,7 @@ public static Connector createConnector(
new CatalogNameModule(catalogName),
new DeltaLakeMetastoreModule(),
new DeltaLakeModule(),
new DeltaLakeSecurityModule(),
binder -> {
binder.bind(NodeVersion.class).toInstance(new NodeVersion(context.getNodeManager().getCurrentNode().getVersion()));
binder.bind(NodeManager.class).toInstance(context.getNodeManager());
Expand Down
Loading

0 comments on commit 95849b0

Please sign in to comment.