diff --git a/authorizations/authorization-ranger/build.gradle.kts b/authorizations/authorization-ranger/build.gradle.kts index 0eeb6c4fb5b..bffd92b53b9 100644 --- a/authorizations/authorization-ranger/build.gradle.kts +++ b/authorizations/authorization-ranger/build.gradle.kts @@ -30,6 +30,7 @@ val kyuubiVersion: String = libs.versions.kyuubi4paimon.get() val sparkMajorVersion: String = sparkVersion.substringBeforeLast(".") val icebergVersion: String = libs.versions.iceberg4spark.get() val paimonVersion: String = libs.versions.paimon.get() +val hudiVersion = libs.versions.hudi.get() dependencies { implementation(project(":api")) { @@ -78,8 +79,13 @@ dependencies { testRuntimeOnly(libs.junit.jupiter.engine) testImplementation(libs.mysql.driver) testImplementation(libs.postgresql.driver) - testImplementation(libs.postgresql.driver) - testImplementation("org.apache.spark:spark-hive_$scalaVersion:$sparkVersion") + testImplementation(libs.prometheus.dropwizard) + testImplementation("org.apache.spark:spark-hive_$scalaVersion:$sparkVersion") { + exclude("org.apache.hadoop") + exclude("io.dropwizard.metrics") + exclude("com.fasterxml.jackson.core") + exclude("com.fasterxml.jackson.module", "jackson-module-scala_2.12") + } testImplementation("org.apache.spark:spark-sql_$scalaVersion:$sparkVersion") { exclude("org.apache.avro") exclude("org.apache.hadoop") @@ -102,6 +108,9 @@ dependencies { } testImplementation("org.apache.iceberg:iceberg-spark-runtime-${sparkMajorVersion}_$scalaVersion:$icebergVersion") testImplementation("org.apache.paimon:paimon-spark-$sparkMajorVersion:$paimonVersion") + testRuntimeOnly("org.apache.hudi:hudi-spark$sparkMajorVersion-bundle_$scalaVersion:$hudiVersion") { + exclude("*") + } } tasks { diff --git a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorization.java b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorization.java index 459b6b04720..070e07b1456 100644 --- a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorization.java +++ b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorization.java @@ -35,6 +35,7 @@ protected AuthorizationPlugin newPlugin(String catalogProvider, Map configs = Maps.newHashMap(); + configs.put(Configs.ENABLE_AUTHORIZATION.getKey(), String.valueOf(true)); + configs.put(Configs.SERVICE_ADMINS.getKey(), RangerITEnv.HADOOP_USER_NAME); + configs.put(Configs.AUTHENTICATORS.getKey(), AuthenticatorType.SIMPLE.name().toLowerCase()); + configs.put("SimpleAuthUserName", AuthConstants.ANONYMOUS_USER); + registerCustomConfigs(configs); + super.startIntegrationTest(); + + RangerITEnv.init(); + RangerITEnv.startHiveRangerContainer(); + + RANGER_ADMIN_URL = + String.format( + "http://%s:%d", + containerSuite.getRangerContainer().getContainerIpAddress(), RANGER_SERVER_PORT); + + HIVE_METASTORE_URIS = + String.format( + "thrift://%s:%d", + containerSuite.getHiveRangerContainer().getContainerIpAddress(), + HiveContainer.HIVE_METASTORE_PORT); + + generateRangerSparkSecurityXML(); + + sparkSession = + SparkSession.builder() + .master("local[1]") + .appName("Hudi Catalog integration test") + .config("hive.metastore.uris", HIVE_METASTORE_URIS) + .config( + "spark.sql.warehouse.dir", + String.format( + "hdfs://%s:%d/user/hive/warehouse-catalog-hudi", + containerSuite.getHiveRangerContainer().getContainerIpAddress(), + HiveContainer.HDFS_DEFAULTFS_PORT)) + .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") + .config("spark.sql.extensions", "org.apache.spark.sql.hudi.HoodieSparkSessionExtension") + .config( + "spark.sql.catalog.spark_catalog", + "org.apache.spark.sql.hudi.catalog.HoodieCatalog") + .config("spark.kryo.registrator", "org.apache.spark.HoodieSparkKryoRegistrar") + .config("dfs.replication", "1") + .enableHiveSupport() + .getOrCreate(); + + createMetalake(); + createCatalog(); + + metalake.addUser(System.getenv(HADOOP_USER_NAME)); + } + + @AfterAll + public void stop() { + // + } + + void createMetalake() { + GravitinoMetalake[] gravitinoMetalakes = client.listMetalakes(); + Assertions.assertEquals(0, gravitinoMetalakes.length); + + client.createMetalake(metalakeName, "comment", Collections.emptyMap()); + GravitinoMetalake loadMetalake = client.loadMetalake(metalakeName); + Assertions.assertEquals(metalakeName, loadMetalake.name()); + + metalake = loadMetalake; + } + + private static void createCatalog() { + Map properties = + ImmutableMap.of( + "uri", + HIVE_METASTORE_URIS, + "catalog-backend", + "hms", + AUTHORIZATION_PROVIDER, + "ranger", + RANGER_SERVICE_NAME, + RangerITEnv.RANGER_HIVE_REPO_NAME, + AuthorizationPropertiesMeta.RANGER_ADMIN_URL, + RANGER_ADMIN_URL, + RANGER_AUTH_TYPE, + RangerContainer.authType, + RANGER_USERNAME, + RangerContainer.rangerUserName, + RANGER_PASSWORD, + RangerContainer.rangerPassword); + + metalake.createCatalog(catalogName, Catalog.Type.RELATIONAL, provider, "comment", properties); + catalog = metalake.loadCatalog(catalogName); + LOG.info("Catalog created: {}", catalog); + } +}