From d3f26c63aa75c19876e5e9cee266996dda5f0eb6 Mon Sep 17 00:00:00 2001 From: Xun Date: Mon, 26 Aug 2024 16:57:47 +0800 Subject: [PATCH] [#4662] improve(IT): Add ranger authorization Hive E2E test (#4677) ### What changes were proposed in this pull request? 1. Use an isolated class loader to create an authorization Ranger plugin. 2. Add authorization for Hive E2E tests using Ranger via Gravitino RESTful access control. 3. Copy the authorization ranger jar to the `distribution/package/authorizations/ranger/libs`. 4. Add `Apache Ranger` to license.bin ### Why are the changes needed? #4662 ### Does this PR introduce _any_ user-facing change? N/A ### How was this patch tested? CI --- .../access-control-integration-test.yml | 96 +++++++ .../workflows/backend-integration-test.yml | 73 ++++- LICENSE.bin | 5 +- NOTICE.bin | 3 + .../authorization-ranger/build.gradle.kts | 25 +- .../integration/test/RangerHiveE2EIT.java | 242 ++++++++++++++++ .../ranger/integration/test/RangerHiveIT.java | 264 ++++++------------ .../ranger/integration/test/RangerITEnv.java | 216 +++++++++++++- build.gradle.kts | 19 +- .../jdbc/JdbcCatalogPropertiesMetadata.java | 7 +- .../kafka/KafkaCatalogPropertiesMetadata.java | 18 +- .../gravitino/catalog/CatalogManager.java | 42 ++- .../gravitino/connector/BaseCatalog.java | 70 ++--- .../authorization/TestAuthorization.java | 7 + gradle/libs.versions.toml | 2 + .../test/container/MySQLContainer.java | 2 +- 16 files changed, 850 insertions(+), 241 deletions(-) create mode 100644 .github/workflows/access-control-integration-test.yml create mode 100644 authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java diff --git a/.github/workflows/access-control-integration-test.yml b/.github/workflows/access-control-integration-test.yml new file mode 100644 index 00000000000..9f7d5f2bdc4 --- /dev/null +++ b/.github/workflows/access-control-integration-test.yml @@ -0,0 +1,96 @@ +name: Access Control Integration Test + +# Controls when the workflow will run +on: + # Triggers the workflow on push or pull request events but only for the "main" branch + push: + branches: [ "main", "branch-*" ] + pull_request: + branches: [ "main", "branch-*" ] + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +jobs: + changes: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: dorny/paths-filter@v2 + id: filter + with: + filters: | + source_changes: + - api/** + - authorizations/** + - catalogs/** + - clients/client-java/** + - clients/client-java-runtime/** + - common/** + - core/** + - integration-test-common/** + - server/** + - server-common/** + outputs: + source_changes: ${{ steps.filter.outputs.source_changes }} + + # Integration test for AMD64 architecture + test-amd64-arch: + needs: changes + if: needs.changes.outputs.source_changes == 'true' + runs-on: ubuntu-latest + timeout-minutes: 60 + strategy: + matrix: + # Integration test for AMD64 architecture + architecture: [linux/amd64] + java-version: [ 17 ] + test-mode: [ embedded, deploy ] + include: + - test-mode: 'embedded' + backend: 'h2' + - test-mode: 'deploy' + backend: 'mysql' + + env: + PLATFORM: ${{ matrix.architecture }} + steps: + - uses: actions/checkout@v3 + + - uses: actions/setup-java@v4 + with: + java-version: ${{ matrix.java-version }} + distribution: 'temurin' + cache: 'gradle' + + - name: Set up QEMU + uses: docker/setup-qemu-action@v2 + + - name: Check required command + run: | + dev/ci/check_commands.sh + + - name: Package Gravitino + if : ${{ matrix.test-mode == 'deploy' }} + run: | + ./gradlew compileDistribution -x test -PjdkVersion=${{ matrix.java-version }} + + - name: Free up disk space + run: | + dev/ci/util_free_space.sh + + - name: Authorization Integration Test (JDK${{ matrix.java-version }}-${{ matrix.test-mode }}-${{ matrix.backend }}) + id: integrationTest + run: | + ./gradlew -PskipTests -PtestMode=${{ matrix.test-mode }} -PjdbcBackend=${{ matrix.backend }} -PjdkVersion=${{ matrix.java-version }} -PskipDockerTests=false :authorizations:authorization-ranger:test --tests "org.apache.gravitino.authorization.ranger.integration.test.**" + + - name: Upload integrate tests reports + uses: actions/upload-artifact@v3 + if: ${{ (failure() && steps.integrationTest.outcome == 'failure') || contains(github.event.pull_request.labels.*.name, 'upload log') }} + with: + name: authorizations-integrate-test-reports-${{ matrix.java-version }} + path: | + build/reports + distribution/package/logs/gravitino-server.out + distribution/package/logs/gravitino-server.log diff --git a/.github/workflows/backend-integration-test.yml b/.github/workflows/backend-integration-test.yml index 2e2026aa379..87f6729ca31 100644 --- a/.github/workflows/backend-integration-test.yml +++ b/.github/workflows/backend-integration-test.yml @@ -90,13 +90,84 @@ jobs: run: | dev/ci/util_free_space.sh - - name: Backend Integration Test(JDK${{ matrix.java-version }}-${{ matrix.test-mode }}-${{ matrix.backend }}) + - name: Backend Integration Test (JDK${{ matrix.java-version }}-${{ matrix.test-mode }}-${{ matrix.backend }}) id: integrationTest run: > ./gradlew test -PskipTests -PtestMode=${{ matrix.test-mode }} -PjdkVersion=${{ matrix.java-version }} -PjdbcBackend=${{ matrix.backend }} -PskipWebITs -PskipDockerTests=false -x :web:test -x :clients:client-python:test -x :flink-connector:test -x :spark-connector:test -x :spark-connector:spark-common:test -x :spark-connector:spark-3.3:test -x :spark-connector:spark-3.4:test -x :spark-connector:spark-3.5:test -x :spark-connector:spark-runtime-3.3:test -x :spark-connector:spark-runtime-3.4:test -x :spark-connector:spark-runtime-3.5:test + -x :authorizations:authorization-ranger:test + + - name: Upload integrate tests reports + uses: actions/upload-artifact@v3 + if: ${{ (failure() && steps.integrationTest.outcome == 'failure') || contains(github.event.pull_request.labels.*.name, 'upload log') }} + with: + name: integrate-test-reports-${{ matrix.java-version }}-${{ matrix.test-mode }}-${{ matrix.backend }} + path: | + build/reports + iceberg/iceberg-rest-server/build/*.log + integration-test/build/*.log + integration-test/build/*.tar + integration-test/build/trino-ci-container-log + distribution/package/logs/*.out + distribution/package/logs/*.log + catalogs/**/*.log + catalogs/**/*.tar + distribution/**/*.log + + test-on-pr: + needs: changes + if: (github.event_name == 'pull_request' && needs.changes.outputs.source_changes == 'true') + runs-on: ubuntu-latest + timeout-minutes: 90 + strategy: + matrix: + # Integration test for AMD64 architecture + architecture: [ linux/amd64 ] + java-version: [ 17 ] + test-mode: [ embedded, deploy ] + include: + - test-mode: 'embedded' + backend: 'h2' + - test-mode: 'deploy' + backend: 'mysql' + + env: + PLATFORM: ${{ matrix.architecture }} + steps: + - uses: actions/checkout@v3 + + - uses: actions/setup-java@v4 + with: + java-version: ${{ matrix.java-version }} + distribution: 'temurin' + cache: 'gradle' + + - name: Set up QEMU + uses: docker/setup-qemu-action@v2 + + - name: Check required command + run: | + dev/ci/check_commands.sh + + - name: Package Gravitino + if: ${{ matrix.test-mode == 'deploy' }} + run: | + ./gradlew compileDistribution -x test -PjdkVersion=${{ matrix.java-version }} + + - name: Free up disk space + run: | + dev/ci/util_free_space.sh + + - name: Backend Integration Test (JDK${{ matrix.java-version }}-${{ matrix.test-mode }}-${{ matrix.backend }}) + id: integrationTest + run: > + ./gradlew test -PskipTests -PtestMode=${{ matrix.test-mode }} -PjdkVersion=${{ matrix.java-version }} -PjdbcBackend=${{ matrix.backend }} -PskipWebITs -PskipDockerTests=false + -x :web:test -x :clients:client-python:test -x :flink-connector:test -x :spark-connector:test -x :spark-connector:spark-common:test + -x :spark-connector:spark-3.3:test -x :spark-connector:spark-3.4:test -x :spark-connector:spark-3.5:test + -x :spark-connector:spark-runtime-3.3:test -x :spark-connector:spark-runtime-3.4:test -x :spark-connector:spark-runtime-3.5:test + -x :authorizations:authorization-ranger:test - name: Upload integrate tests reports uses: actions/upload-artifact@v3 diff --git a/LICENSE.bin b/LICENSE.bin index b23d1bfefd4..ca9218f771a 100644 --- a/LICENSE.bin +++ b/LICENSE.bin @@ -310,6 +310,8 @@ Apache Yetus - Audience Annotations Apache Kerby Apache Kyuubi + Apache Ranger + Apache Ranger intg Jackson JSON processor DataNucleus Modernizer Maven Plugin @@ -364,7 +366,7 @@ This product bundles various third-party components also under the BSD license - + JSR305 LevelDB JNI RocksDB JNI @@ -446,6 +448,7 @@ Jakarta RESTful WS API Jakarta XML Binding API JavaServer Pages(TM) API + Javax WS RS API HK2 API Module HK2 Service Locator HK2 Utils diff --git a/NOTICE.bin b/NOTICE.bin index 5c63b8189ce..44cb17a5a4d 100644 --- a/NOTICE.bin +++ b/NOTICE.bin @@ -22,6 +22,9 @@ Copyright 2008-2023 The Apache Software Foundation Apache Zeppelin Copyright 2016-2023 The Apache Software Foundation +Apache Ranger +Copyright 2014-2024 The Apache Software Foundation + Apache Hadoop Copyright 2006 and onwards The Apache Software Foundation. diff --git a/authorizations/authorization-ranger/build.gradle.kts b/authorizations/authorization-ranger/build.gradle.kts index be7303517c1..9121a414ad0 100644 --- a/authorizations/authorization-ranger/build.gradle.kts +++ b/authorizations/authorization-ranger/build.gradle.kts @@ -51,11 +51,17 @@ dependencies { exclude("org.apache.ranger", "ranger-plugins-audit") exclude("org.apache.ranger", "ranger-plugins-cred") exclude("org.apache.ranger", "ranger-plugins-classloader") + exclude("javax.ws.rs") } + implementation(libs.javax.ws.rs.api) implementation(libs.javax.jaxb.api) { exclude("*") } + testImplementation(project(":common")) + testImplementation(project(":clients:client-java")) + testImplementation(project(":server")) + testImplementation(project(":catalogs:catalog-common")) testImplementation(project(":integration-test-common", "testArtifacts")) testImplementation(libs.junit.jupiter.api) testImplementation(libs.mockito.core) @@ -69,10 +75,12 @@ dependencies { exclude("org.elasticsearch") exclude("org.elasticsearch.client") exclude("org.elasticsearch.plugin") + exclude("javax.ws.rs") } testImplementation(libs.hive2.jdbc) { exclude("org.slf4j") } + testImplementation(libs.mysql.driver) } tasks { @@ -80,13 +88,20 @@ tasks { from(configurations.runtimeClasspath) into("build/libs") } -} -tasks.build { - dependsOn("runtimeJars", "javadoc") + val copyAuthorizationLibs by registering(Copy::class) { + dependsOn("jar", "runtimeJars") + from("build/libs") + into("$rootDir/distribution/package/authorizations/ranger/libs") + } + + register("copyLibAndConfig", Copy::class) { + dependsOn(copyAuthorizationLibs) + } } tasks.test { + dependsOn(":catalogs:catalog-hive:jar", ":catalogs:catalog-hive:runtimeJars") val skipUTs = project.hasProperty("skipTests") if (skipUTs) { // Only run integration tests @@ -101,7 +116,3 @@ tasks.test { dependsOn(tasks.jar) } } - -tasks.getByName("generateMetadataFileForMavenJavaPublication") { - dependsOn("runtimeJars") -} diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java new file mode 100644 index 00000000000..89ecbc849ad --- /dev/null +++ b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java @@ -0,0 +1,242 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.gravitino.authorization.ranger.integration.test; + +import static org.apache.gravitino.Catalog.AUTHORIZATION_PROVIDER; +import static org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_ADMIN_URL; +import static org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_AUTH_TYPE; +import static org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_PASSWORD; +import static org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_SERVICE_NAME; +import static org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_USERNAME; +import static org.apache.gravitino.integration.test.container.RangerContainer.RANGER_SERVER_PORT; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import java.io.IOException; +import java.util.Collections; +import java.util.Map; +import org.apache.gravitino.Catalog; +import org.apache.gravitino.Configs; +import org.apache.gravitino.MetadataObject; +import org.apache.gravitino.NameIdentifier; +import org.apache.gravitino.Schema; +import org.apache.gravitino.auth.AuthConstants; +import org.apache.gravitino.authorization.Privileges; +import org.apache.gravitino.authorization.Role; +import org.apache.gravitino.authorization.SecurableObject; +import org.apache.gravitino.authorization.SecurableObjects; +import org.apache.gravitino.authorization.ranger.RangerAuthorizationPlugin; +import org.apache.gravitino.catalog.hive.HiveConstants; +import org.apache.gravitino.client.GravitinoMetalake; +import org.apache.gravitino.connector.AuthorizationPropertiesMeta; +import org.apache.gravitino.integration.test.container.HiveContainer; +import org.apache.gravitino.integration.test.container.RangerContainer; +import org.apache.gravitino.integration.test.util.AbstractIT; +import org.apache.gravitino.integration.test.util.GravitinoITUtils; +import org.apache.gravitino.rel.Column; +import org.apache.gravitino.rel.Table; +import org.apache.gravitino.rel.expressions.NamedReference; +import org.apache.gravitino.rel.expressions.distributions.Distribution; +import org.apache.gravitino.rel.expressions.distributions.Distributions; +import org.apache.gravitino.rel.expressions.distributions.Strategy; +import org.apache.gravitino.rel.expressions.sorts.NullOrdering; +import org.apache.gravitino.rel.expressions.sorts.SortDirection; +import org.apache.gravitino.rel.expressions.sorts.SortOrder; +import org.apache.gravitino.rel.expressions.sorts.SortOrders; +import org.apache.gravitino.rel.expressions.transforms.Transforms; +import org.apache.gravitino.rel.types.Types; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Tag("gravitino-docker-test") +public class RangerHiveE2EIT extends AbstractIT { + private static final Logger LOG = LoggerFactory.getLogger(RangerHiveE2EIT.class); + + private static RangerAuthorizationPlugin rangerAuthPlugin; + public static final String metalakeName = + GravitinoITUtils.genRandomName("RangerHiveAuthIT_metalake").toLowerCase(); + public static final String catalogName = + GravitinoITUtils.genRandomName("RangerHiveAuthIT_catalog").toLowerCase(); + public static final String schemaName = + GravitinoITUtils.genRandomName("RangerHiveAuthIT_schema").toLowerCase(); + public static final String tableName = + GravitinoITUtils.genRandomName("RangerHiveAuthIT_table").toLowerCase(); + + public static final String HIVE_COL_NAME1 = "hive_col_name1"; + public static final String HIVE_COL_NAME2 = "hive_col_name2"; + public static final String HIVE_COL_NAME3 = "hive_col_name3"; + + private static GravitinoMetalake metalake; + private static Catalog catalog; + private static final String provider = "hive"; + private static String HIVE_METASTORE_URIS; + + @BeforeAll + public static void startIntegrationTest() throws Exception { + Map configs = Maps.newHashMap(); + configs.put(Configs.ENABLE_AUTHORIZATION.getKey(), String.valueOf(true)); + configs.put(Configs.SERVICE_ADMINS.getKey(), AuthConstants.ANONYMOUS_USER); + registerCustomConfigs(configs); + AbstractIT.startIntegrationTest(); + + RangerITEnv.setup(); + containerSuite.startHiveContainer(); + HIVE_METASTORE_URIS = + String.format( + "thrift://%s:%d", + containerSuite.getHiveContainer().getContainerIpAddress(), + HiveContainer.HIVE_METASTORE_PORT); + + createMetalake(); + createCatalogAndRangerAuthPlugin(); + createSchema(); + createHiveTable(); + } + + @AfterAll + public static void stop() throws IOException { + AbstractIT.client = null; + } + + @Test + void testCreateRole() { + String roleName = RangerITEnv.currentFunName(); + Map properties = Maps.newHashMap(); + properties.put("k1", "v1"); + + SecurableObject table1 = + SecurableObjects.parse( + String.format("%s.%s.%s", catalogName, schemaName, tableName), + MetadataObject.Type.TABLE, + Lists.newArrayList(Privileges.SelectTable.allow())); + Role role = metalake.createRole(roleName, properties, Lists.newArrayList(table1)); + RangerITEnv.verifyRoleInRanger(rangerAuthPlugin, role); + } + + private static void createMetalake() { + GravitinoMetalake[] gravitinoMetalakes = client.listMetalakes(); + Assertions.assertEquals(0, gravitinoMetalakes.length); + + GravitinoMetalake createdMetalake = + client.createMetalake(metalakeName, "comment", Collections.emptyMap()); + GravitinoMetalake loadMetalake = client.loadMetalake(metalakeName); + Assertions.assertEquals(createdMetalake, loadMetalake); + + metalake = loadMetalake; + } + + private static void createCatalogAndRangerAuthPlugin() { + rangerAuthPlugin = + new RangerAuthorizationPlugin( + "hive", + ImmutableMap.of( + AuthorizationPropertiesMeta.RANGER_ADMIN_URL, + String.format( + "http://%s:%d", + containerSuite.getRangerContainer().getContainerIpAddress(), + RangerContainer.RANGER_SERVER_PORT), + AuthorizationPropertiesMeta.RANGER_AUTH_TYPE, + RangerContainer.authType, + AuthorizationPropertiesMeta.RANGER_USERNAME, + RangerContainer.rangerUserName, + AuthorizationPropertiesMeta.RANGER_PASSWORD, + RangerContainer.rangerPassword, + AuthorizationPropertiesMeta.RANGER_SERVICE_NAME, + RangerITEnv.RANGER_HIVE_REPO_NAME)); + + Map properties = Maps.newHashMap(); + properties.put(HiveConstants.METASTORE_URIS, HIVE_METASTORE_URIS); + properties.put(AUTHORIZATION_PROVIDER, "ranger"); + properties.put(RANGER_SERVICE_NAME, RangerITEnv.RANGER_HIVE_REPO_NAME); + properties.put( + RANGER_ADMIN_URL, + String.format( + "http://localhost:%s", + containerSuite.getRangerContainer().getMappedPort(RANGER_SERVER_PORT))); + properties.put(RANGER_AUTH_TYPE, RangerContainer.authType); + properties.put(RANGER_USERNAME, RangerContainer.rangerUserName); + properties.put(RANGER_PASSWORD, RangerContainer.rangerPassword); + + metalake.createCatalog(catalogName, Catalog.Type.RELATIONAL, provider, "comment", properties); + catalog = metalake.loadCatalog(catalogName); + LOG.info("Catalog created: {}", catalog); + } + + private static void createSchema() { + Map properties = Maps.newHashMap(); + properties.put("key1", "val1"); + properties.put("key2", "val2"); + properties.put( + "location", + String.format( + "hdfs://%s:%d/user/hive/warehouse/%s.db", + containerSuite.getHiveContainer().getContainerIpAddress(), + HiveContainer.HDFS_DEFAULTFS_PORT, + schemaName.toLowerCase())); + String comment = "comment"; + + catalog.asSchemas().createSchema(schemaName, comment, properties); + Schema loadSchema = catalog.asSchemas().loadSchema(schemaName); + Assertions.assertEquals(schemaName.toLowerCase(), loadSchema.name()); + } + + public static void createHiveTable() { + // Create table from Gravitino API + Column[] columns = createColumns(); + NameIdentifier nameIdentifier = NameIdentifier.of(schemaName, tableName); + + Distribution distribution = + Distributions.of(Strategy.EVEN, 10, NamedReference.field(HIVE_COL_NAME1)); + + final SortOrder[] sortOrders = + new SortOrder[] { + SortOrders.of( + NamedReference.field(HIVE_COL_NAME2), + SortDirection.DESCENDING, + NullOrdering.NULLS_FIRST) + }; + + Map properties = ImmutableMap.of("key1", "val1", "key2", "val2"); + Table createdTable = + catalog + .asTableCatalog() + .createTable( + nameIdentifier, + columns, + "table_comment", + properties, + Transforms.EMPTY_TRANSFORM, + distribution, + sortOrders); + LOG.info("Table created: {}", createdTable); + } + + private static Column[] createColumns() { + Column col1 = Column.of(HIVE_COL_NAME1, Types.ByteType.get(), "col_1_comment"); + Column col2 = Column.of(HIVE_COL_NAME2, Types.DateType.get(), "col_2_comment"); + Column col3 = Column.of(HIVE_COL_NAME3, Types.StringType.get(), "col_3_comment"); + return new Column[] {col1, col2, col3}; + } +} diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveIT.java b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveIT.java index c21a281b4ed..79f66ef287a 100644 --- a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveIT.java +++ b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveIT.java @@ -19,6 +19,8 @@ package org.apache.gravitino.authorization.ranger.integration.test; import static org.apache.gravitino.authorization.SecurableObjects.DOT_SPLITTER; +import static org.apache.gravitino.authorization.ranger.integration.test.RangerITEnv.currentFunName; +import static org.apache.gravitino.authorization.ranger.integration.test.RangerITEnv.verifyRoleInRanger; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; @@ -34,7 +36,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Set; import org.apache.gravitino.MetadataObject; import org.apache.gravitino.MetadataObjects; import org.apache.gravitino.authorization.Owner; @@ -57,7 +58,6 @@ import org.apache.gravitino.meta.UserEntity; import org.apache.ranger.RangerServiceException; import org.apache.ranger.plugin.model.RangerPolicy; -import org.apache.ranger.plugin.model.RangerRole; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Tag; @@ -66,7 +66,7 @@ import org.slf4j.LoggerFactory; @Tag("gravitino-docker-test") -public class RangerHiveIT extends RangerITEnv { +public class RangerHiveIT { private static final Logger LOG = LoggerFactory.getLogger(RangerHiveIT.class); private static final ContainerSuite containerSuite = ContainerSuite.getInstance(); @@ -119,13 +119,6 @@ public static void setup() { RangerITEnv.RANGER_HIVE_REPO_NAME)); rangerPolicyHelper = new RangerHelper(rangerAuthPlugin, "hive"); - createRangerHdfsRepository( - containerSuite.getHiveRangerContainer().getContainerIpAddress(), true); - createRangerHiveRepository( - containerSuite.getHiveRangerContainer().getContainerIpAddress(), true); - allowAnyoneAccessHDFS(); - allowAnyoneAccessInformationSchema(); - // Create hive connection String url = String.format( @@ -182,7 +175,7 @@ public RoleEntity mock3TableRole(String roleName) { public void testOnRoleCreated() { RoleEntity role = mock3TableRole(currentFunName()); Assertions.assertTrue(rangerAuthPlugin.onRoleCreated(role)); - verifyRoleInRanger(role); + verifyRoleInRanger(rangerAuthPlugin, role); } @Test @@ -297,9 +290,9 @@ static void createHivePolicy(List metaObjects, String roleName) { policyItem.setAccesses( Arrays.asList( new RangerPolicy.RangerPolicyItemAccess(RangerDefines.ACCESS_TYPE_HIVE_SELECT))); - updateOrCreateRangerPolicy( + RangerITEnv.updateOrCreateRangerPolicy( RangerDefines.SERVICE_TYPE_HIVE, - RANGER_HIVE_REPO_NAME, + RangerITEnv.RANGER_HIVE_REPO_NAME, roleName, policyResourceMap, Collections.singletonList(policyItem)); @@ -328,7 +321,7 @@ public void testRoleChangeAddSecurableObject() { .withAuditInfo(auditInfo) .withSecurableObjects(Lists.newArrayList(securableObject1)) .build(); - verifyRoleInRanger(verifyRole1); + verifyRoleInRanger(rangerAuthPlugin, verifyRole1); // 2. Multi-call Add a same entity and privilege to the role, because support idempotent // operation, so return true @@ -336,7 +329,7 @@ public void testRoleChangeAddSecurableObject() { rangerAuthPlugin.onRoleUpdated( mockCatalogRole, RoleChange.addSecurableObject(mockCatalogRole.name(), securableObject1))); - verifyRoleInRanger(verifyRole1); + verifyRoleInRanger(rangerAuthPlugin, verifyRole1); // 3. Add the same metadata but have different privileges to the role SecurableObject securableObject3 = @@ -376,7 +369,7 @@ public void testRoleChangeRemoveSecurableObject() { .withAuditInfo(auditInfo) .withSecurableObjects(securableObjects) .build(); - verifyRoleInRanger(verifyRole); + verifyRoleInRanger(rangerAuthPlugin, verifyRole); } } } @@ -418,7 +411,7 @@ public void testRoleChangeUpdateSecurableObject() { .withAuditInfo(auditInfo) .withSecurableObjects(Lists.newArrayList(newSecurableObject)) .build(); - verifyRoleInRanger(verifyRole); + verifyRoleInRanger(rangerAuthPlugin, verifyRole); } @Test @@ -465,7 +458,7 @@ public void testRoleChangeCombinedOperation() { .withAuditInfo(auditInfo) .withSecurableObjects(Lists.newArrayList(newSecurableObject)) .build(); - verifyRoleInRanger(verifyRole); + verifyRoleInRanger(rangerAuthPlugin, verifyRole); verifyOwnerInRanger(oldMetadataObject, Lists.newArrayList(userName)); // Delete the role @@ -496,12 +489,12 @@ public void testOnGrantedRolesToUser() { .build(); Assertions.assertTrue( rangerAuthPlugin.onGrantedRolesToUser(Lists.newArrayList(role), userEntity1)); - verifyRoleInRanger(role, Lists.newArrayList(userName1)); + verifyRoleInRanger(rangerAuthPlugin, role, Lists.newArrayList(userName1)); // multi-call to granted role to the user1 Assertions.assertTrue( rangerAuthPlugin.onGrantedRolesToUser(Lists.newArrayList(role), userEntity1)); - verifyRoleInRanger(role, Lists.newArrayList(userName1)); + verifyRoleInRanger(rangerAuthPlugin, role, Lists.newArrayList(userName1)); // granted a role to the user2 String userName2 = "user2"; @@ -517,7 +510,7 @@ public void testOnGrantedRolesToUser() { rangerAuthPlugin.onGrantedRolesToUser(Lists.newArrayList(role), userEntity2)); // Same to verify user1 and user2 - verifyRoleInRanger(role, Lists.newArrayList(userName1, userName2)); + verifyRoleInRanger(rangerAuthPlugin, role, Lists.newArrayList(userName1, userName2)); } @Test @@ -538,16 +531,16 @@ public void testOnRevokedRolesFromUser() { .build(); Assertions.assertTrue( rangerAuthPlugin.onGrantedRolesToUser(Lists.newArrayList(role), userEntity1)); - verifyRoleInRanger(role, Lists.newArrayList(userName1)); + verifyRoleInRanger(rangerAuthPlugin, role, Lists.newArrayList(userName1)); Assertions.assertTrue( rangerAuthPlugin.onRevokedRolesFromUser(Lists.newArrayList(role), userEntity1)); - verifyRoleInRanger(role, null, Lists.newArrayList(userName1)); + verifyRoleInRanger(rangerAuthPlugin, role, null, Lists.newArrayList(userName1)); // multi-call to revoked role from user1 Assertions.assertTrue( rangerAuthPlugin.onRevokedRolesFromUser(Lists.newArrayList(role), userEntity1)); - verifyRoleInRanger(role, null, Lists.newArrayList(userName1)); + verifyRoleInRanger(rangerAuthPlugin, role, null, Lists.newArrayList(userName1)); } @Test @@ -568,12 +561,12 @@ public void testOnGrantedRolesToGroup() { .build(); Assertions.assertTrue( rangerAuthPlugin.onGrantedRolesToGroup(Lists.newArrayList(role), groupEntity1)); - verifyRoleInRanger(role, null, null, Lists.newArrayList(groupName1)); + verifyRoleInRanger(rangerAuthPlugin, role, null, null, Lists.newArrayList(groupName1)); // multi-call to grant a role to the group1 test idempotent operation Assertions.assertTrue( rangerAuthPlugin.onGrantedRolesToGroup(Lists.newArrayList(role), groupEntity1)); - verifyRoleInRanger(role, null, null, Lists.newArrayList(groupName1)); + verifyRoleInRanger(rangerAuthPlugin, role, null, null, Lists.newArrayList(groupName1)); // granted a role to the group2 String groupName2 = "group2"; @@ -589,7 +582,8 @@ public void testOnGrantedRolesToGroup() { rangerAuthPlugin.onGrantedRolesToGroup(Lists.newArrayList(role), groupEntity2)); // Same to verify group1 and group2 - verifyRoleInRanger(role, null, null, Lists.newArrayList(groupName1, groupName2)); + verifyRoleInRanger( + rangerAuthPlugin, role, null, null, Lists.newArrayList(groupName1, groupName2)); } @Test @@ -610,16 +604,16 @@ public void testOnRevokedRolesFromGroup() { .build(); Assertions.assertTrue( rangerAuthPlugin.onGrantedRolesToGroup(Lists.newArrayList(role), groupEntity1)); - verifyRoleInRanger(role, null, null, Lists.newArrayList(groupName1)); + verifyRoleInRanger(rangerAuthPlugin, role, null, null, Lists.newArrayList(groupName1)); Assertions.assertTrue( rangerAuthPlugin.onRevokedRolesFromGroup(Lists.newArrayList(role), groupEntity1)); - verifyRoleInRanger(role, null, null, null, Lists.newArrayList(groupName1)); + verifyRoleInRanger(rangerAuthPlugin, role, null, null, null, Lists.newArrayList(groupName1)); // multi-call to revoke from the group1 Assertions.assertTrue( rangerAuthPlugin.onRevokedRolesFromGroup(Lists.newArrayList(role), groupEntity1)); - verifyRoleInRanger(role, null, null, null, Lists.newArrayList(groupName1)); + verifyRoleInRanger(rangerAuthPlugin, role, null, null, null, Lists.newArrayList(groupName1)); } private static class MockOwner implements Owner { @@ -734,7 +728,7 @@ public void testCombinationOperation() { .withSecurableObjects(Lists.newArrayList(securableObject1)) .build(); Assertions.assertTrue(rangerAuthPlugin.onRoleCreated(role1)); - verifyRoleInRanger(role1); + verifyRoleInRanger(rangerAuthPlugin, role1); // Create a `SelectTable` privilege role SecurableObject securableObject2 = @@ -750,7 +744,7 @@ public void testCombinationOperation() { .withSecurableObjects(Lists.newArrayList(securableObject2)) .build(); Assertions.assertTrue(rangerAuthPlugin.onRoleCreated(role2)); - verifyRoleInRanger(role2); + verifyRoleInRanger(rangerAuthPlugin, role2); // Create a `ModifyTable` privilege role SecurableObject securableObject3 = @@ -766,7 +760,7 @@ public void testCombinationOperation() { .withSecurableObjects(Lists.newArrayList(securableObject3)) .build(); Assertions.assertTrue(rangerAuthPlugin.onRoleCreated(role3)); - verifyRoleInRanger(role3); + verifyRoleInRanger(rangerAuthPlugin, role3); /** Test grant to user */ // granted role1 to the user1 @@ -784,7 +778,7 @@ public void testCombinationOperation() { // multiple call to grant role1 to the user1 to test idempotent operation Assertions.assertTrue( rangerAuthPlugin.onGrantedRolesToUser(Lists.newArrayList(role1), userEntity1)); - verifyRoleInRanger(role1, Lists.newArrayList(userName1)); + verifyRoleInRanger(rangerAuthPlugin, role1, Lists.newArrayList(userName1)); // granted role1 to the user2 String userName2 = "user2"; @@ -798,7 +792,7 @@ public void testCombinationOperation() { .build(); Assertions.assertTrue( rangerAuthPlugin.onGrantedRolesToUser(Lists.newArrayList(role1), userEntity2)); - verifyRoleInRanger(role1, Lists.newArrayList(userName1, userName2)); + verifyRoleInRanger(rangerAuthPlugin, role1, Lists.newArrayList(userName1, userName2)); // granted role1 to the user3 String userName3 = "user3"; @@ -812,22 +806,25 @@ public void testCombinationOperation() { .build(); Assertions.assertTrue( rangerAuthPlugin.onGrantedRolesToUser(Lists.newArrayList(role1), userEntity3)); - verifyRoleInRanger(role1, Lists.newArrayList(userName1, userName2, userName3)); + verifyRoleInRanger( + rangerAuthPlugin, role1, Lists.newArrayList(userName1, userName2, userName3)); // Same granted role2 and role3 to the user1 and user2 and user3 Assertions.assertTrue( rangerAuthPlugin.onGrantedRolesToUser(Lists.newArrayList(role2, role3), userEntity1)); - verifyRoleInRanger(role2, Lists.newArrayList(userName1)); - verifyRoleInRanger(role3, Lists.newArrayList(userName1)); + verifyRoleInRanger(rangerAuthPlugin, role2, Lists.newArrayList(userName1)); + verifyRoleInRanger(rangerAuthPlugin, role3, Lists.newArrayList(userName1)); Assertions.assertTrue( rangerAuthPlugin.onGrantedRolesToUser(Lists.newArrayList(role2, role3), userEntity2)); - verifyRoleInRanger(role2, Lists.newArrayList(userName1, userName2)); - verifyRoleInRanger(role3, Lists.newArrayList(userName1, userName2)); + verifyRoleInRanger(rangerAuthPlugin, role2, Lists.newArrayList(userName1, userName2)); + verifyRoleInRanger(rangerAuthPlugin, role3, Lists.newArrayList(userName1, userName2)); Assertions.assertTrue( rangerAuthPlugin.onGrantedRolesToUser(Lists.newArrayList(role2, role3), userEntity3)); - verifyRoleInRanger(role2, Lists.newArrayList(userName1, userName2, userName3)); - verifyRoleInRanger(role3, Lists.newArrayList(userName1, userName2, userName3)); + verifyRoleInRanger( + rangerAuthPlugin, role2, Lists.newArrayList(userName1, userName2, userName3)); + verifyRoleInRanger( + rangerAuthPlugin, role3, Lists.newArrayList(userName1, userName2, userName3)); /** Test grant to group */ // granted role1 to the group1 @@ -843,6 +840,7 @@ public void testCombinationOperation() { Assertions.assertTrue( rangerAuthPlugin.onGrantedRolesToGroup(Lists.newArrayList(role1), groupEntity1)); verifyRoleInRanger( + rangerAuthPlugin, role1, Lists.newArrayList(userName1, userName2, userName3), null, @@ -861,6 +859,7 @@ public void testCombinationOperation() { Assertions.assertTrue( rangerAuthPlugin.onGrantedRolesToGroup(Lists.newArrayList(role1), groupEntity2)); verifyRoleInRanger( + rangerAuthPlugin, role1, Lists.newArrayList(userName1, userName2, userName3), null, @@ -879,6 +878,7 @@ public void testCombinationOperation() { Assertions.assertTrue( rangerAuthPlugin.onGrantedRolesToGroup(Lists.newArrayList(role1), groupEntity3)); verifyRoleInRanger( + rangerAuthPlugin, role1, Lists.newArrayList(userName1, userName2, userName3), null, @@ -888,11 +888,13 @@ public void testCombinationOperation() { Assertions.assertTrue( rangerAuthPlugin.onGrantedRolesToGroup(Lists.newArrayList(role2, role3), groupEntity1)); verifyRoleInRanger( + rangerAuthPlugin, role2, Lists.newArrayList(userName1, userName2, userName3), null, Lists.newArrayList(groupName1)); verifyRoleInRanger( + rangerAuthPlugin, role3, Lists.newArrayList(userName1, userName2, userName3), null, @@ -901,11 +903,13 @@ public void testCombinationOperation() { Assertions.assertTrue( rangerAuthPlugin.onGrantedRolesToGroup(Lists.newArrayList(role2, role3), groupEntity2)); verifyRoleInRanger( + rangerAuthPlugin, role2, Lists.newArrayList(userName1, userName2, userName3), null, Lists.newArrayList(groupName1, groupName2)); verifyRoleInRanger( + rangerAuthPlugin, role3, Lists.newArrayList(userName1, userName2, userName3), null, @@ -913,11 +917,13 @@ public void testCombinationOperation() { Assertions.assertTrue( rangerAuthPlugin.onGrantedRolesToGroup(Lists.newArrayList(role2, role3), groupEntity3)); verifyRoleInRanger( + rangerAuthPlugin, role2, Lists.newArrayList(userName1, userName2, userName3), null, Lists.newArrayList(groupName1, groupName2, groupName3)); verifyRoleInRanger( + rangerAuthPlugin, role3, Lists.newArrayList(userName1, userName2, userName3), null, @@ -928,6 +934,7 @@ public void testCombinationOperation() { Assertions.assertTrue( rangerAuthPlugin.onRevokedRolesFromUser(Lists.newArrayList(role1), userEntity1)); verifyRoleInRanger( + rangerAuthPlugin, role1, Lists.newArrayList(userName2, userName3), Lists.newArrayList(userName1), @@ -937,6 +944,7 @@ public void testCombinationOperation() { Assertions.assertTrue( rangerAuthPlugin.onRevokedRolesFromUser(Lists.newArrayList(role1), userEntity2)); verifyRoleInRanger( + rangerAuthPlugin, role1, Lists.newArrayList(userName3), Lists.newArrayList(userName1, userName2), @@ -946,6 +954,7 @@ public void testCombinationOperation() { Assertions.assertTrue( rangerAuthPlugin.onRevokedRolesFromUser(Lists.newArrayList(role1), userEntity3)); verifyRoleInRanger( + rangerAuthPlugin, role1, null, Lists.newArrayList(userName1, userName2, userName3), @@ -955,29 +964,44 @@ public void testCombinationOperation() { Assertions.assertTrue( rangerAuthPlugin.onRevokedRolesFromUser(Lists.newArrayList(role2, role3), userEntity1)); verifyRoleInRanger( - role2, Lists.newArrayList(userName2, userName3), Lists.newArrayList(userName1)); + rangerAuthPlugin, + role2, + Lists.newArrayList(userName2, userName3), + Lists.newArrayList(userName1)); verifyRoleInRanger( - role3, Lists.newArrayList(userName2, userName3), Lists.newArrayList(userName1)); + rangerAuthPlugin, + role3, + Lists.newArrayList(userName2, userName3), + Lists.newArrayList(userName1)); Assertions.assertTrue( rangerAuthPlugin.onRevokedRolesFromUser(Lists.newArrayList(role2, role3), userEntity2)); verifyRoleInRanger( - role2, Lists.newArrayList(userName3), Lists.newArrayList(userName1, userName2)); + rangerAuthPlugin, + role2, + Lists.newArrayList(userName3), + Lists.newArrayList(userName1, userName2)); verifyRoleInRanger( - role3, Lists.newArrayList(userName3), Lists.newArrayList(userName1, userName2)); + rangerAuthPlugin, + role3, + Lists.newArrayList(userName3), + Lists.newArrayList(userName1, userName2)); Assertions.assertTrue( rangerAuthPlugin.onRevokedRolesFromUser(Lists.newArrayList(role2, role3), userEntity3)); Assertions.assertTrue( rangerAuthPlugin.onRevokedRolesFromUser(Lists.newArrayList(role2, role3), userEntity3)); - verifyRoleInRanger(role2, null, Lists.newArrayList(userName1, userName2, userName3)); - verifyRoleInRanger(role3, null, Lists.newArrayList(userName1, userName2, userName3)); + verifyRoleInRanger( + rangerAuthPlugin, role2, null, Lists.newArrayList(userName1, userName2, userName3)); + verifyRoleInRanger( + rangerAuthPlugin, role3, null, Lists.newArrayList(userName1, userName2, userName3)); /** Test revoke from group */ // revoke role1 from the group1 Assertions.assertTrue( rangerAuthPlugin.onRevokedRolesFromGroup(Lists.newArrayList(role1), groupEntity1)); verifyRoleInRanger( + rangerAuthPlugin, role1, null, Lists.newArrayList(userName1, userName2, userName3), @@ -988,6 +1012,7 @@ public void testCombinationOperation() { Assertions.assertTrue( rangerAuthPlugin.onRevokedRolesFromGroup(Lists.newArrayList(role1), groupEntity2)); verifyRoleInRanger( + rangerAuthPlugin, role1, null, Lists.newArrayList(userName1, userName2, userName3), @@ -998,6 +1023,7 @@ public void testCombinationOperation() { Assertions.assertTrue( rangerAuthPlugin.onRevokedRolesFromGroup(Lists.newArrayList(role1), groupEntity3)); verifyRoleInRanger( + rangerAuthPlugin, role1, null, Lists.newArrayList(userName1, userName2, userName3), @@ -1008,12 +1034,14 @@ public void testCombinationOperation() { Assertions.assertTrue( rangerAuthPlugin.onRevokedRolesFromGroup(Lists.newArrayList(role2, role3), groupEntity1)); verifyRoleInRanger( + rangerAuthPlugin, role2, null, Lists.newArrayList(userName1, userName2, userName3), Lists.newArrayList(groupName2, groupName3), Lists.newArrayList(groupName1)); verifyRoleInRanger( + rangerAuthPlugin, role3, null, Lists.newArrayList(userName1, userName2, userName3), @@ -1023,12 +1051,14 @@ public void testCombinationOperation() { Assertions.assertTrue( rangerAuthPlugin.onRevokedRolesFromGroup(Lists.newArrayList(role2, role3), groupEntity2)); verifyRoleInRanger( + rangerAuthPlugin, role2, null, Lists.newArrayList(userName1, userName2, userName3), Lists.newArrayList(groupName3), Lists.newArrayList(groupName1, groupName2)); verifyRoleInRanger( + rangerAuthPlugin, role3, null, Lists.newArrayList(userName1, userName2, userName3), @@ -1038,12 +1068,14 @@ public void testCombinationOperation() { Assertions.assertTrue( rangerAuthPlugin.onRevokedRolesFromGroup(Lists.newArrayList(role2, role3), groupEntity3)); verifyRoleInRanger( + rangerAuthPlugin, role2, null, Lists.newArrayList(userName1, userName2, userName3), null, Lists.newArrayList(groupName1, groupName2, groupName3)); verifyRoleInRanger( + rangerAuthPlugin, role3, null, Lists.newArrayList(userName1, userName2, userName3), @@ -1068,102 +1100,6 @@ public void testCombinationOperation() { Assertions.assertNotNull(rangerPolicyHelper.findManagedPolicy(securableObject))); } - private void verifyRoleInRanger( - Role role, - List includeUsers, - List excludeUsers, - List includeGroups, - List excludeGroups) { - // Verify role in RangerRole - RangerRole rangerRole = null; - try { - rangerRole = - rangerClient.getRole( - role.name(), rangerAuthPlugin.rangerAdminName, RANGER_HIVE_REPO_NAME); - LOG.info("rangerRole: " + rangerRole.toString()); - } catch (RangerServiceException e) { - throw new RuntimeException(e); - } - rangerRole - .getUsers() - .forEach( - user -> { - if (includeUsers != null && !includeUsers.isEmpty()) { - Assertions.assertTrue( - includeUsers.contains(user.getName()), - "includeUsersInRole: " + includeUsers + ", user: " + user.getName()); - } - if (excludeUsers != null && !excludeUsers.isEmpty()) { - Assertions.assertFalse( - excludeUsers.contains(user.getName()), - "excludeUsersInRole: " + excludeUsers.toString() + ", user: " + user.getName()); - } - }); - rangerRole - .getGroups() - .forEach( - group -> { - if (includeGroups != null && !includeGroups.isEmpty()) { - Assertions.assertTrue( - includeGroups.contains(group.getName()), - "includeGroupsInRole: " - + includeGroups.toString() - + ", group: " - + group.getName()); - } - if (excludeGroups != null && !excludeGroups.isEmpty()) { - Assertions.assertFalse( - excludeGroups.contains(group.getName()), - "excludeGroupsInRole: " - + excludeGroups.toString() - + ", group: " - + group.getName()); - } - }); - - // Verify role in RangerPolicy - role.securableObjects() - .forEach( - securableObject -> { - RangerPolicy policy; - try { - policy = - rangerClient.getPolicy( - RangerITEnv.RANGER_HIVE_REPO_NAME, securableObject.fullName()); - LOG.info("policy: " + policy.toString()); - } catch (RangerServiceException e) { - LOG.error("Failed to get policy: " + securableObject.fullName()); - throw new RuntimeException(e); - } - - securableObject - .privileges() - .forEach( - gravitinoPrivilege -> { - Set mappedPrivileges = - rangerAuthPlugin.translatePrivilege(gravitinoPrivilege.name()); - - boolean match = - policy.getPolicyItems().stream() - .filter( - policyItem -> { - // Filter Ranger policy item by Gravitino privilege - return policyItem.getAccesses().stream() - .anyMatch( - access -> { - return mappedPrivileges.contains(access.getType()); - }); - }) - .allMatch( - policyItem -> { - // Verify role name in Ranger policy item - return policyItem.getRoles().contains(role.name()); - }); - Assertions.assertTrue(match); - }); - }); - } - /** Verify the Gravitino role in Ranger service */ private void verifyOwnerInRanger( MetadataObject metadataObject, @@ -1175,7 +1111,7 @@ private void verifyOwnerInRanger( String policyName = metadataObject.fullName(); RangerPolicy policy; try { - policy = rangerClient.getPolicy(RangerITEnv.RANGER_HIVE_REPO_NAME, policyName); + policy = RangerITEnv.rangerClient.getPolicy(RangerITEnv.RANGER_HIVE_REPO_NAME, policyName); LOG.info("policy: " + policy.toString()); } catch (RangerServiceException e) { LOG.error("Failed to get policy: " + policyName); @@ -1259,29 +1195,11 @@ private void verifyOwnerInRanger(MetadataObject metadataObject, List inc verifyOwnerInRanger(metadataObject, includeUsers, null, null, null); } - private void verifyRoleInRanger(Role role) { - verifyRoleInRanger(role, null, null, null, null); - } - - private void verifyRoleInRanger(Role role, List includeRolesInPolicyItem) { - verifyRoleInRanger(role, includeRolesInPolicyItem, null, null, null); - } - - private void verifyRoleInRanger( - Role role, List includeRolesInPolicyItem, List excludeRolesInPolicyItem) { - verifyRoleInRanger(role, includeRolesInPolicyItem, excludeRolesInPolicyItem, null, null); - } - - private void verifyRoleInRanger( - Role role, List includeUsers, List excludeUsers, List includeGroups) { - verifyRoleInRanger(role, includeUsers, excludeUsers, includeGroups, null); - } - /** Currently we only test Ranger Hive, So wo Allow anyone to visit HDFS */ static void allowAnyoneAccessHDFS() { String policyName = currentFunName(); try { - if (null != rangerClient.getPolicy(RangerDefines.SERVICE_TYPE_HDFS, policyName)) { + if (null != RangerITEnv.rangerClient.getPolicy(RangerDefines.SERVICE_TYPE_HDFS, policyName)) { return; } } catch (RangerServiceException e) { @@ -1297,9 +1215,9 @@ static void allowAnyoneAccessHDFS() { new RangerPolicy.RangerPolicyItemAccess(RangerDefines.ACCESS_TYPE_HDFS_READ), new RangerPolicy.RangerPolicyItemAccess(RangerDefines.ACCESS_TYPE_HDFS_WRITE), new RangerPolicy.RangerPolicyItemAccess(RangerDefines.ACCESS_TYPE_HDFS_EXECUTE))); - updateOrCreateRangerPolicy( + RangerITEnv.updateOrCreateRangerPolicy( RangerDefines.SERVICE_TYPE_HDFS, - RANGER_HDFS_REPO_NAME, + RangerITEnv.RANGER_HDFS_REPO_NAME, policyName, policyResourceMap, Collections.singletonList(policyItem)); @@ -1312,7 +1230,7 @@ static void allowAnyoneAccessHDFS() { static void allowAnyoneAccessInformationSchema() { String policyName = currentFunName(); try { - if (null != rangerClient.getPolicy(RangerDefines.SERVICE_TYPE_HIVE, policyName)) { + if (null != RangerITEnv.rangerClient.getPolicy(RangerDefines.SERVICE_TYPE_HIVE, policyName)) { return; } } catch (RangerServiceException e) { @@ -1332,9 +1250,9 @@ static void allowAnyoneAccessInformationSchema() { policyItem.setAccesses( Arrays.asList( new RangerPolicy.RangerPolicyItemAccess(RangerDefines.ACCESS_TYPE_HIVE_SELECT))); - updateOrCreateRangerPolicy( + RangerITEnv.updateOrCreateRangerPolicy( RangerDefines.SERVICE_TYPE_HIVE, - RANGER_HIVE_REPO_NAME, + RangerITEnv.RANGER_HIVE_REPO_NAME, policyName, policyResourceMap, Collections.singletonList(policyItem)); @@ -1353,9 +1271,9 @@ public void testCreateDatabase() throws Exception { policyItem.setUsers(Arrays.asList(adminUser)); policyItem.setAccesses( Arrays.asList(new RangerPolicy.RangerPolicyItemAccess(RangerDefines.ACCESS_TYPE_HIVE_ALL))); - updateOrCreateRangerPolicy( + RangerITEnv.updateOrCreateRangerPolicy( RangerDefines.SERVICE_TYPE_HIVE, - RANGER_HIVE_REPO_NAME, + RangerITEnv.RANGER_HIVE_REPO_NAME, "testAllowShowDatabase", policyResourceMap, Collections.singletonList(policyItem)); @@ -1383,9 +1301,9 @@ public void testCreateDatabase() throws Exception { policyItem.setUsers(Arrays.asList(adminUser, anonymousUser)); policyItem.setAccesses( Arrays.asList(new RangerPolicy.RangerPolicyItemAccess(RangerDefines.ACCESS_TYPE_HIVE_ALL))); - updateOrCreateRangerPolicy( + RangerITEnv.updateOrCreateRangerPolicy( RangerDefines.SERVICE_TYPE_HIVE, - RANGER_HIVE_REPO_NAME, + RangerITEnv.RANGER_HIVE_REPO_NAME, "testAllowShowDatabase", policyResourceMap, Collections.singletonList(policyItem)); diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java index 784f91b2bb4..2808a2b796d 100644 --- a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java +++ b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java @@ -20,10 +20,15 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; +import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.stream.Collectors; +import org.apache.gravitino.authorization.Role; +import org.apache.gravitino.authorization.ranger.RangerAuthorizationPlugin; import org.apache.gravitino.authorization.ranger.RangerHelper; import org.apache.gravitino.authorization.ranger.reference.RangerDefines; import org.apache.gravitino.integration.test.container.ContainerSuite; @@ -32,11 +37,10 @@ import org.apache.ranger.RangerClient; import org.apache.ranger.RangerServiceException; import org.apache.ranger.plugin.model.RangerPolicy; +import org.apache.ranger.plugin.model.RangerRole; import org.apache.ranger.plugin.model.RangerService; import org.apache.ranger.plugin.util.SearchFilter; -import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeAll; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -50,16 +54,25 @@ public class RangerITEnv { protected static final String RANGER_HDFS_REPO_NAME = "hdfsDev"; private static final String RANGER_HDFS_TYPE = "hdfs"; protected static RangerClient rangerClient; - + private static volatile boolean initRangerService = false; private static final ContainerSuite containerSuite = ContainerSuite.getInstance(); - @BeforeAll public static void setup() { containerSuite.startRangerContainer(); rangerClient = containerSuite.getRangerContainer().rangerClient; + + if (!initRangerService) { + synchronized (RangerITEnv.class) { + // No IP address set, no impact on testing + createRangerHdfsRepository("", true); + createRangerHiveRepository("", true); + allowAnyoneAccessHDFS(); + allowAnyoneAccessInformationSchema(); + initRangerService = true; + } + } } - @AfterAll public static void cleanup() { try { if (rangerClient != null) { @@ -75,6 +88,71 @@ public static void cleanup() { } } + /** Currently we only test Ranger Hive, So wo Allow anyone to visit HDFS */ + static void allowAnyoneAccessHDFS() { + String policyName = currentFunName(); + try { + if (null != rangerClient.getPolicy(RangerDefines.SERVICE_TYPE_HDFS, policyName)) { + return; + } + } catch (RangerServiceException e) { + // If the policy doesn't exist, we will create it + LOG.warn("Error while fetching policy: {}", e.getMessage()); + } + + Map policyResourceMap = + ImmutableMap.of(RangerDefines.RESOURCE_PATH, new RangerPolicy.RangerPolicyResource("/*")); + RangerPolicy.RangerPolicyItem policyItem = new RangerPolicy.RangerPolicyItem(); + policyItem.setUsers(Arrays.asList(RangerDefines.CURRENT_USER)); + policyItem.setAccesses( + Arrays.asList( + new RangerPolicy.RangerPolicyItemAccess(RangerDefines.ACCESS_TYPE_HDFS_READ), + new RangerPolicy.RangerPolicyItemAccess(RangerDefines.ACCESS_TYPE_HDFS_WRITE), + new RangerPolicy.RangerPolicyItemAccess(RangerDefines.ACCESS_TYPE_HDFS_EXECUTE))); + updateOrCreateRangerPolicy( + RangerDefines.SERVICE_TYPE_HDFS, + RANGER_HDFS_REPO_NAME, + policyName, + policyResourceMap, + Collections.singletonList(policyItem)); + } + + /** + * Hive must have this policy Allow anyone can access information schema to show `database`, + * `tables` and `columns` + */ + static void allowAnyoneAccessInformationSchema() { + String policyName = currentFunName(); + try { + if (null != rangerClient.getPolicy(RangerDefines.SERVICE_TYPE_HIVE, policyName)) { + return; + } + } catch (RangerServiceException e) { + // If the policy doesn't exist, we will create it + LOG.warn("Error while fetching policy: {}", e.getMessage()); + } + + Map policyResourceMap = + ImmutableMap.of( + RangerDefines.RESOURCE_DATABASE, + new RangerPolicy.RangerPolicyResource("information_schema"), + RangerDefines.RESOURCE_TABLE, + new RangerPolicy.RangerPolicyResource("*"), + RangerDefines.RESOURCE_COLUMN, + new RangerPolicy.RangerPolicyResource("*")); + RangerPolicy.RangerPolicyItem policyItem = new RangerPolicy.RangerPolicyItem(); + policyItem.setGroups(Arrays.asList(RangerDefines.PUBLIC_GROUP)); + policyItem.setAccesses( + Arrays.asList( + new RangerPolicy.RangerPolicyItemAccess(RangerDefines.ACCESS_TYPE_HIVE_SELECT))); + updateOrCreateRangerPolicy( + RangerDefines.SERVICE_TYPE_HIVE, + RANGER_HIVE_REPO_NAME, + policyName, + policyResourceMap, + Collections.singletonList(policyItem)); + } + public void createRangerTrinoRepository(String trinoIp) { String usernameKey = "username"; String usernameVal = "admin"; @@ -221,6 +299,134 @@ public static void createRangerHdfsRepository(String hdfsIp, boolean cleanAllPol } } + protected static void verifyRoleInRanger( + RangerAuthorizationPlugin rangerAuthPlugin, + Role role, + List includeUsers, + List excludeUsers, + List includeGroups, + List excludeGroups) { + // Verify role in RangerRole + RangerRole rangerRole = null; + try { + rangerRole = + RangerITEnv.rangerClient.getRole( + role.name(), rangerAuthPlugin.rangerAdminName, RangerITEnv.RANGER_HIVE_REPO_NAME); + LOG.info("rangerRole: " + rangerRole.toString()); + } catch (RangerServiceException e) { + throw new RuntimeException(e); + } + rangerRole + .getUsers() + .forEach( + user -> { + if (includeUsers != null && !includeUsers.isEmpty()) { + Assertions.assertTrue( + includeUsers.contains(user.getName()), + "includeUsersInRole: " + includeUsers + ", user: " + user.getName()); + } + if (excludeUsers != null && !excludeUsers.isEmpty()) { + Assertions.assertFalse( + excludeUsers.contains(user.getName()), + "excludeUsersInRole: " + excludeUsers.toString() + ", user: " + user.getName()); + } + }); + rangerRole + .getGroups() + .forEach( + group -> { + if (includeGroups != null && !includeGroups.isEmpty()) { + Assertions.assertTrue( + includeGroups.contains(group.getName()), + "includeGroupsInRole: " + + includeGroups.toString() + + ", group: " + + group.getName()); + } + if (excludeGroups != null && !excludeGroups.isEmpty()) { + Assertions.assertFalse( + excludeGroups.contains(group.getName()), + "excludeGroupsInRole: " + + excludeGroups.toString() + + ", group: " + + group.getName()); + } + }); + + // Verify role in RangerPolicy + role.securableObjects() + .forEach( + securableObject -> { + RangerPolicy policy; + try { + policy = + RangerITEnv.rangerClient.getPolicy( + RangerITEnv.RANGER_HIVE_REPO_NAME, securableObject.fullName()); + LOG.info("policy: " + policy.toString()); + } catch (RangerServiceException e) { + LOG.error("Failed to get policy: " + securableObject.fullName()); + throw new RuntimeException(e); + } + + securableObject + .privileges() + .forEach( + gravitinoPrivilege -> { + Set mappedPrivileges = + rangerAuthPlugin.translatePrivilege(gravitinoPrivilege.name()); + + boolean match = + policy.getPolicyItems().stream() + .filter( + policyItem -> { + // Filter Ranger policy item by Gravitino privilege + return policyItem.getAccesses().stream() + .anyMatch( + access -> { + return mappedPrivileges.contains(access.getType()); + }); + }) + .allMatch( + policyItem -> { + // Verify role name in Ranger policy item + return policyItem.getRoles().contains(role.name()); + }); + Assertions.assertTrue(match); + }); + }); + } + + protected static void verifyRoleInRanger(RangerAuthorizationPlugin rangerAuthPlugin, Role role) { + RangerITEnv.verifyRoleInRanger(rangerAuthPlugin, role, null, null, null, null); + } + + protected static void verifyRoleInRanger( + RangerAuthorizationPlugin rangerAuthPlugin, + Role role, + List includeRolesInPolicyItem) { + RangerITEnv.verifyRoleInRanger( + rangerAuthPlugin, role, includeRolesInPolicyItem, null, null, null); + } + + protected static void verifyRoleInRanger( + RangerAuthorizationPlugin rangerAuthPlugin, + Role role, + List includeRolesInPolicyItem, + List excludeRolesInPolicyItem) { + RangerITEnv.verifyRoleInRanger( + rangerAuthPlugin, role, includeRolesInPolicyItem, excludeRolesInPolicyItem, null, null); + } + + protected static void verifyRoleInRanger( + RangerAuthorizationPlugin rangerAuthPlugin, + Role role, + List includeUsers, + List excludeUsers, + List includeGroups) { + RangerITEnv.verifyRoleInRanger( + rangerAuthPlugin, role, includeUsers, excludeUsers, includeGroups, null); + } + protected static void updateOrCreateRangerPolicy( String type, String serviceName, diff --git a/build.gradle.kts b/build.gradle.kts index 1abcb7a4c88..c19ee9945d0 100644 --- a/build.gradle.kts +++ b/build.gradle.kts @@ -543,7 +543,7 @@ tasks { val outputDir = projectDir.dir("distribution") val compileDistribution by registering { - dependsOn("copySubprojectDependencies", "copyCatalogLibAndConfigs", "copySubprojectLib", "iceberg:iceberg-rest-server:copyLibAndConfigs") + dependsOn("copySubprojectDependencies", "copyCatalogLibAndConfigs", "copyAuthorizationLibAndConfigs", "copySubprojectLib", "iceberg:iceberg-rest-server:copyLibAndConfigs") group = "gravitino distribution" outputs.dir(projectDir.dir("distribution/package")) @@ -713,6 +713,7 @@ tasks { register("copySubprojectDependencies", Copy::class) { subprojects.forEach() { if (!it.name.startsWith("catalog") && + !it.name.startsWith("authorization") && !it.name.startsWith("client") && !it.name.startsWith("filesystem") && !it.name.startsWith("spark") && !it.name.startsWith("iceberg") && it.name != "trino-connector" && it.name != "integration-test" && it.name != "bundled-catalog" && it.name != "flink-connector" ) { @@ -726,20 +727,16 @@ tasks { subprojects.forEach() { if (!it.name.startsWith("catalog") && !it.name.startsWith("client") && + !it.name.startsWith("authorization") && !it.name.startsWith("filesystem") && !it.name.startsWith("spark") && !it.name.startsWith("iceberg") && !it.name.startsWith("integration-test") && - it.name != "authorizations" && it.name != "trino-connector" && it.name != "bundled-catalog" && it.name != "flink-connector" ) { - if (it.name.startsWith("authorization-")) { - dependsOn(":authorizations:${it.name}:build") - } else { - dependsOn("${it.name}:build") - } + dependsOn("${it.name}:build") from("${it.name}/build/libs") into("distribution/package/libs") include("*.jar") @@ -757,7 +754,13 @@ tasks { ":catalogs:catalog-jdbc-mysql:copyLibAndConfig", ":catalogs:catalog-jdbc-postgresql:copyLibAndConfig", ":catalogs:catalog-hadoop:copyLibAndConfig", - "catalogs:catalog-kafka:copyLibAndConfig" + ":catalogs:catalog-kafka:copyLibAndConfig" + ) + } + + register("copyAuthorizationLibAndConfigs", Copy::class) { + dependsOn( + ":authorizations:authorization-ranger:copyLibAndConfig" ) } diff --git a/catalogs/catalog-jdbc-common/src/main/java/org/apache/gravitino/catalog/jdbc/JdbcCatalogPropertiesMetadata.java b/catalogs/catalog-jdbc-common/src/main/java/org/apache/gravitino/catalog/jdbc/JdbcCatalogPropertiesMetadata.java index 915417f8ba2..ca0275beb06 100644 --- a/catalogs/catalog-jdbc-common/src/main/java/org/apache/gravitino/catalog/jdbc/JdbcCatalogPropertiesMetadata.java +++ b/catalogs/catalog-jdbc-common/src/main/java/org/apache/gravitino/catalog/jdbc/JdbcCatalogPropertiesMetadata.java @@ -23,6 +23,7 @@ import static org.apache.gravitino.connector.PropertyEntry.stringPropertyEntry; import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import java.util.List; import java.util.Map; @@ -94,7 +95,11 @@ public class JdbcCatalogPropertiesMetadata extends BaseCatalogPropertiesMetadata JdbcConfig.POOL_MAX_SIZE.getDefaultValue(), true /* hidden */, false /* reserved */)); - PROPERTIES_METADATA = Maps.uniqueIndex(propertyEntries, PropertyEntry::getName); + PROPERTIES_METADATA = + ImmutableMap.>builder() + .putAll(BASIC_CATALOG_PROPERTY_ENTRIES) + .putAll(Maps.uniqueIndex(propertyEntries, PropertyEntry::getName)) + .build(); } @Override diff --git a/catalogs/catalog-kafka/src/main/java/org/apache/gravitino/catalog/kafka/KafkaCatalogPropertiesMetadata.java b/catalogs/catalog-kafka/src/main/java/org/apache/gravitino/catalog/kafka/KafkaCatalogPropertiesMetadata.java index caff199423a..00193a37aa7 100644 --- a/catalogs/catalog-kafka/src/main/java/org/apache/gravitino/catalog/kafka/KafkaCatalogPropertiesMetadata.java +++ b/catalogs/catalog-kafka/src/main/java/org/apache/gravitino/catalog/kafka/KafkaCatalogPropertiesMetadata.java @@ -18,7 +18,7 @@ */ package org.apache.gravitino.catalog.kafka; -import java.util.Collections; +import com.google.common.collect.ImmutableMap; import java.util.Map; import org.apache.gravitino.connector.BaseCatalogPropertiesMetadata; import org.apache.gravitino.connector.PropertyEntry; @@ -30,13 +30,17 @@ public class KafkaCatalogPropertiesMetadata extends BaseCatalogPropertiesMetadat public static final String BOOTSTRAP_SERVERS = "bootstrap.servers"; private static final Map> KAFKA_CATALOG_PROPERTY_ENTRIES = - Collections.singletonMap( - BOOTSTRAP_SERVERS, - PropertyEntry.stringRequiredPropertyEntry( + ImmutableMap.>builder() + .put( BOOTSTRAP_SERVERS, - "The Kafka broker(s) to connect to, allowing for multiple brokers by comma-separating them", - false /* immutable */, - false /* hidden */)); + PropertyEntry.stringRequiredPropertyEntry( + BOOTSTRAP_SERVERS, + "The Kafka broker(s) to connect to, allowing for multiple brokers by " + + "comma-separating them", + false /* immutable */, + false /* hidden */)) + .putAll(BASIC_CATALOG_PROPERTY_ENTRIES) + .build(); @Override protected Map> specificPropertyEntries() { diff --git a/core/src/main/java/org/apache/gravitino/catalog/CatalogManager.java b/core/src/main/java/org/apache/gravitino/catalog/CatalogManager.java index 66e0ea385d7..f351a3271fe 100644 --- a/core/src/main/java/org/apache/gravitino/catalog/CatalogManager.java +++ b/core/src/main/java/org/apache/gravitino/catalog/CatalogManager.java @@ -39,6 +39,7 @@ import java.io.IOException; import java.io.InputStream; import java.time.Instant; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; @@ -52,6 +53,7 @@ import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import org.apache.commons.io.FileUtils; +import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.tuple.Pair; import org.apache.gravitino.Catalog; import org.apache.gravitino.CatalogChange; @@ -701,14 +703,17 @@ private BaseCatalog createBaseCatalog(IsolatedClassLoader classLoader, Catalo // Load Catalog class instance BaseCatalog catalog = createCatalogInstance(classLoader, entity.getProvider()); catalog.withCatalogConf(entity.getProperties()).withCatalogEntity(entity); + catalog.initAuthorizationPluginInstance(classLoader); return catalog; } private IsolatedClassLoader createClassLoader(String provider, Map conf) { if (config.get(Configs.CATALOG_LOAD_ISOLATED)) { - String pkgPath = buildPkgPath(conf, provider); - String confPath = buildConfPath(conf, provider); - return IsolatedClassLoader.buildClassLoader(Lists.newArrayList(pkgPath, confPath)); + String catalogPkgPath = buildPkgPath(conf, provider); + String catalogConfPath = buildConfPath(conf, provider); + ArrayList libAndResourcesPaths = Lists.newArrayList(catalogPkgPath, catalogConfPath); + buildAuthorizationPkgPath(conf).ifPresent(libAndResourcesPaths::add); + return IsolatedClassLoader.buildClassLoader(libAndResourcesPaths); } else { // This will use the current class loader, it is mainly used for test. return new IsolatedClassLoader( @@ -824,6 +829,37 @@ private String buildPkgPath(Map conf, String provider) { return pkgPath; } + private Optional buildAuthorizationPkgPath(Map conf) { + String gravitinoHome = System.getenv("GRAVITINO_HOME"); + Preconditions.checkArgument(gravitinoHome != null, "GRAVITINO_HOME not set"); + boolean testEnv = System.getenv("GRAVITINO_TEST") != null; + + String authorizationProvider = conf.get(Catalog.AUTHORIZATION_PROVIDER); + if (StringUtils.isBlank(authorizationProvider)) { + return Optional.empty(); + } + + String pkgPath; + if (testEnv) { + // In test, the authorization package is under the build directory. + pkgPath = + String.join( + File.separator, + gravitinoHome, + "authorizations", + "authorization-" + authorizationProvider, + "build", + "libs"); + } else { + // In real environment, the authorization package is under the authorization directory. + pkgPath = + String.join( + File.separator, gravitinoHome, "authorizations", authorizationProvider, "libs"); + } + + return Optional.of(pkgPath); + } + private Class lookupCatalogProvider(String provider, ClassLoader cl) { ServiceLoader loader = ServiceLoader.load(CatalogProvider.class, cl); diff --git a/core/src/main/java/org/apache/gravitino/connector/BaseCatalog.java b/core/src/main/java/org/apache/gravitino/connector/BaseCatalog.java index 45a7b909850..bb3c2f9bd7f 100644 --- a/core/src/main/java/org/apache/gravitino/connector/BaseCatalog.java +++ b/core/src/main/java/org/apache/gravitino/connector/BaseCatalog.java @@ -38,6 +38,7 @@ import org.apache.gravitino.connector.authorization.BaseAuthorization; import org.apache.gravitino.connector.capability.Capability; import org.apache.gravitino.meta.CatalogEntity; +import org.apache.gravitino.utils.IsolatedClassLoader; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -181,51 +182,52 @@ public CatalogOperations ops() { public AuthorizationPlugin getAuthorizationPlugin() { if (authorization == null) { - synchronized (this) { - if (authorization == null) { - BaseAuthorization baseAuthorization = createAuthorizationPluginInstance(); - if (baseAuthorization == null) { - return null; - } - authorization = baseAuthorization; - } - } + return null; } return authorization.plugin(provider(), this.conf); } - private BaseAuthorization createAuthorizationPluginInstance() { - String authorizationProvider = - catalogPropertiesMetadata().containsProperty(AUTHORIZATION_PROVIDER) - ? (String) catalogPropertiesMetadata().getOrDefault(conf, AUTHORIZATION_PROVIDER) - : null; + public void initAuthorizationPluginInstance(IsolatedClassLoader classLoader) { + if (authorization != null) { + return; + } + String authorizationProvider = + (String) catalogPropertiesMetadata().getOrDefault(conf, AUTHORIZATION_PROVIDER); if (authorizationProvider == null) { LOG.info("Authorization provider is not set!"); - return null; + return; } - ServiceLoader loader = - ServiceLoader.load( - AuthorizationProvider.class, Thread.currentThread().getContextClassLoader()); - - List> providers = - Streams.stream(loader.iterator()) - .filter(p -> p.shortName().equalsIgnoreCase(authorizationProvider)) - .map(AuthorizationProvider::getClass) - .collect(Collectors.toList()); - if (providers.isEmpty()) { - throw new IllegalArgumentException( - "No authorization provider found for: " + authorizationProvider); - } else if (providers.size() > 1) { - throw new IllegalArgumentException( - "Multiple authorization providers found for: " + authorizationProvider); - } try { - return (BaseAuthorization) - Iterables.getOnlyElement(providers).getDeclaredConstructor().newInstance(); + authorization = + classLoader.withClassLoader( + cl -> { + try { + ServiceLoader loader = + ServiceLoader.load(AuthorizationProvider.class, cl); + + List> providers = + Streams.stream(loader.iterator()) + .filter(p -> p.shortName().equalsIgnoreCase(authorizationProvider)) + .map(AuthorizationProvider::getClass) + .collect(Collectors.toList()); + if (providers.isEmpty()) { + throw new IllegalArgumentException( + "No authorization provider found for: " + authorizationProvider); + } else if (providers.size() > 1) { + throw new IllegalArgumentException( + "Multiple authorization providers found for: " + authorizationProvider); + } + return (BaseAuthorization) + Iterables.getOnlyElement(providers).getDeclaredConstructor().newInstance(); + } catch (Exception e) { + LOG.error("Failed to create authorization instance", e); + throw new RuntimeException(e); + } + }); } catch (Exception e) { - LOG.error("Failed to create authorization instance", e); + LOG.error("Failed to load authorization with class loader", e); throw new RuntimeException(e); } } diff --git a/core/src/test/java/org/apache/gravitino/connector/authorization/TestAuthorization.java b/core/src/test/java/org/apache/gravitino/connector/authorization/TestAuthorization.java index 8ccf3a2de91..554ef0cec8b 100644 --- a/core/src/test/java/org/apache/gravitino/connector/authorization/TestAuthorization.java +++ b/core/src/test/java/org/apache/gravitino/connector/authorization/TestAuthorization.java @@ -20,6 +20,7 @@ import com.google.common.collect.ImmutableMap; import java.time.Instant; +import java.util.Collections; import org.apache.gravitino.Catalog; import org.apache.gravitino.Namespace; import org.apache.gravitino.TestCatalog; @@ -27,6 +28,7 @@ import org.apache.gravitino.connector.authorization.ranger.TestRangerAuthorizationPlugin; import org.apache.gravitino.meta.AuditInfo; import org.apache.gravitino.meta.CatalogEntity; +import org.apache.gravitino.utils.IsolatedClassLoader; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; @@ -54,6 +56,10 @@ public static void setUp() throws Exception { new TestCatalog() .withCatalogConf(ImmutableMap.of(Catalog.AUTHORIZATION_PROVIDER, "ranger")) .withCatalogEntity(hiveCatalogEntity); + IsolatedClassLoader isolatedClassLoader = + new IsolatedClassLoader( + Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); + hiveCatalog.initAuthorizationPluginInstance(isolatedClassLoader); CatalogEntity mySQLEntity = CatalogEntity.builder() @@ -69,6 +75,7 @@ public static void setUp() throws Exception { new TestCatalog() .withCatalogConf(ImmutableMap.of(Catalog.AUTHORIZATION_PROVIDER, "mysql")) .withCatalogEntity(mySQLEntity); + mySQLCatalog.initAuthorizationPluginInstance(isolatedClassLoader); } @Test diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml index d9d0ab3cc50..8cc2b0f4813 100644 --- a/gradle/libs.versions.toml +++ b/gradle/libs.versions.toml @@ -79,6 +79,7 @@ flink = "1.18.0" cglib = "2.2" ranger = "2.4.0" javax-jaxb-api = "2.3.1" +javax-ws-rs-api = "2.1.1" protobuf-plugin = "0.9.2" spotless-plugin = '6.11.0' gradle-extensions-plugin = '1.74' @@ -196,6 +197,7 @@ cglib = { group = "cglib", name = "cglib", version.ref = "cglib"} ranger-intg = { group = "org.apache.ranger", name = "ranger-intg", version.ref = "ranger" } javax-jaxb-api = { group = "javax.xml.bind", name = "jaxb-api", version.ref = "javax-jaxb-api" } +javax-ws-rs-api = { group = "javax.ws.rs", name = "javax.ws.rs-api", version.ref = "javax-ws-rs-api" } selenium = { group = "org.seleniumhq.selenium", name = "selenium-java", version.ref = "selenium" } rauschig = { group = "org.rauschig", name = "jarchivelib", version.ref = "rauschig" } mybatis = { group = "org.mybatis", name = "mybatis", version.ref = "mybatis"} diff --git a/integration-test-common/src/test/java/org/apache/gravitino/integration/test/container/MySQLContainer.java b/integration-test-common/src/test/java/org/apache/gravitino/integration/test/container/MySQLContainer.java index 036235e4a93..cc5cd53ab5a 100644 --- a/integration-test-common/src/test/java/org/apache/gravitino/integration/test/container/MySQLContainer.java +++ b/integration-test-common/src/test/java/org/apache/gravitino/integration/test/container/MySQLContainer.java @@ -129,7 +129,7 @@ public void createDatabase(TestDatabaseName testDatabaseName) { statement.execute(query); LOG.info(String.format("MySQL container database %s has been created", testDatabaseName)); } catch (Exception e) { - LOG.error(e.getMessage(), e); + throw new RuntimeException("Failed to create database", e); } }