diff --git a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/ChainAuthorizationProperties.java b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/ChainAuthorizationProperties.java
new file mode 100644
index 00000000000..edaa375747a
--- /dev/null
+++ b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/ChainAuthorizationProperties.java
@@ -0,0 +1,160 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.gravitino.authorization.ranger;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.ImmutableList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import java.util.stream.Collectors;
+
+/**
+ * The properties for Chain authorization plugin.
+ *
+ * Configuration Example:
+ * "authorization.chain.plugins" = "hive1,hdfs1"
+ * "authorization.chain.hive1.provider" = "ranger";
+ * "authorization.chain.hive1.ranger.service.type" = "HadoopSQL";
+ * "authorization.chain.hive1.ranger.service.name" = "hiveDev";
+ * "authorization.chain.hive1.ranger.auth.type" = "simple";
+ * "authorization.chain.hive1.ranger.admin.url" = "http://localhost:6080";
+ * "authorization.chain.hive1.ranger.username" = "admin";
+ * "authorization.chain.hive1.ranger.password" = "admin";
+ * "authorization.chain.hdfs1.provider" = "ranger";
+ * "authorization.chain.hdfs1.ranger.service.type" = "HDFS";
+ * "authorization.chain.hdfs1.ranger.service.name" = "hdfsDev";
+ * "authorization.chain.hdfs1.ranger.auth.type" = "simple";
+ * "authorization.chain.hdfs1.ranger.admin.url" = "http://localhost:6080";
+ * "authorization.chain.hdfs1.ranger.username" = "admin";
+ * "authorization.chain.hdfs1.ranger.password" = "admin";
+ */
+public class ChainAuthorizationProperties {
+ public static final String PLUGINS_SPLITTER = ",";
+ /** Chain authorization plugin names */
+ public static final String CHAIN_PLUGINS_PROPERTIES_KEY = "authorization.chain.plugins";
+
+ /** Chain authorization plugin provider */
+ public static final String CHAIN_PROVIDER = "authorization.chain.*.provider";
+
+ static Map fetchAuthPluginProperties(
+ String pluginName, Map properties) {
+ Preconditions.checkArgument(
+ properties.containsKey(CHAIN_PLUGINS_PROPERTIES_KEY)
+ && properties.get(CHAIN_PLUGINS_PROPERTIES_KEY) != null,
+ String.format("%s is required", CHAIN_PLUGINS_PROPERTIES_KEY));
+
+ String[] pluginNames = properties.get(CHAIN_PLUGINS_PROPERTIES_KEY).split(PLUGINS_SPLITTER);
+ Preconditions.checkArgument(
+ Arrays.asList(pluginNames).contains(pluginName),
+ String.format("pluginName %s must be one of %s", pluginName, Arrays.toString(pluginNames)));
+
+ String regex = "^authorization\\.chain\\.(" + pluginName + ")\\..*";
+ Pattern pattern = Pattern.compile(regex);
+
+ Map filteredProperties = new HashMap<>();
+ for (Map.Entry entry : properties.entrySet()) {
+ Matcher matcher = pattern.matcher(entry.getKey());
+ if (matcher.matches()) {
+ filteredProperties.put(entry.getKey(), entry.getValue());
+ }
+ }
+
+ String removeRegex = "^authorization\\.chain\\.(" + pluginName + ")\\.";
+ Pattern removePattern = Pattern.compile(removeRegex);
+
+ Map resultProperties = new HashMap<>();
+ for (Map.Entry entry : filteredProperties.entrySet()) {
+ Matcher removeMatcher = removePattern.matcher(entry.getKey());
+ if (removeMatcher.find()) {
+ resultProperties.put(removeMatcher.replaceFirst("authorization."), entry.getValue());
+ }
+ }
+
+ return resultProperties;
+ }
+
+ public static void validate(Map properties) {
+ Preconditions.checkArgument(
+ properties.containsKey(CHAIN_PLUGINS_PROPERTIES_KEY),
+ String.format("%s is required", CHAIN_PLUGINS_PROPERTIES_KEY));
+ List pluginNames =
+ Arrays.stream(properties.get(CHAIN_PLUGINS_PROPERTIES_KEY).split(PLUGINS_SPLITTER))
+ .map(String::trim)
+ .collect(Collectors.toList());
+ Preconditions.checkArgument(
+ !pluginNames.isEmpty(),
+ String.format("%s must have at least one plugin name", CHAIN_PLUGINS_PROPERTIES_KEY));
+ Preconditions.checkArgument(
+ pluginNames.size() == pluginNames.stream().distinct().count(),
+ "Duplicate plugin name in %s: %s",
+ CHAIN_PLUGINS_PROPERTIES_KEY,
+ pluginNames);
+ pluginNames.stream()
+ .filter(v -> v.contains("."))
+ .forEach(
+ v -> {
+ throw new IllegalArgumentException(
+ String.format(
+ "Plugin name cannot be contain `.` character in the `%s = %s`.",
+ CHAIN_PLUGINS_PROPERTIES_KEY, properties.get(CHAIN_PLUGINS_PROPERTIES_KEY)));
+ });
+
+ Pattern pattern = Pattern.compile("^authorization\\.chain\\..*\\..*$");
+ Map filteredProperties =
+ properties.entrySet().stream()
+ .filter(entry -> pattern.matcher(entry.getKey()).matches())
+ .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
+
+ String pluginNamesPattern = String.join("|", pluginNames);
+ Pattern patternPluginNames =
+ Pattern.compile("^authorization\\.chain\\.(" + pluginNamesPattern + ")\\..*$");
+ for (String key : filteredProperties.keySet()) {
+ Matcher matcher = patternPluginNames.matcher(key);
+ Preconditions.checkArgument(
+ matcher.matches(),
+ "The key %s does not match the pattern %s",
+ key,
+ patternPluginNames.pattern());
+ }
+
+ // Generate regex patterns from wildcardProperties
+ List wildcardProperties = ImmutableList.of(CHAIN_PROVIDER);
+ for (String pluginName : pluginNames) {
+ List patterns =
+ wildcardProperties.stream()
+ .map(wildcard -> "^" + wildcard.replace("*", pluginName) + "$")
+ .map(Pattern::compile)
+ .collect(Collectors.toList());
+ // Validate properties keys
+ for (Pattern pattern1 : patterns) {
+ boolean matches =
+ filteredProperties.keySet().stream().anyMatch(key -> pattern1.matcher(key).matches());
+ Preconditions.checkArgument(
+ matches,
+ "Missing required properties %s for plugin: %s",
+ filteredProperties,
+ pattern1.pattern());
+ }
+ }
+ }
+}
diff --git a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorization.java b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorization.java
index 04c40e219ef..cd27d9f12a2 100644
--- a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorization.java
+++ b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorization.java
@@ -18,6 +18,9 @@
*/
package org.apache.gravitino.authorization.ranger;
+import static org.apache.gravitino.authorization.ranger.RangerAuthorizationProperties.RANGER_SERVICE_TYPE;
+
+import com.google.common.base.Preconditions;
import java.util.Map;
import org.apache.gravitino.connector.authorization.AuthorizationPlugin;
import org.apache.gravitino.connector.authorization.BaseAuthorization;
@@ -31,16 +34,18 @@ public String shortName() {
@Override
protected AuthorizationPlugin newPlugin(
- String metalake, String catalogProvider, Map config) {
- switch (catalogProvider) {
- case "hive":
- case "lakehouse-iceberg":
- case "lakehouse-paimon":
- return RangerAuthorizationHadoopSQLPlugin.getInstance(metalake, config);
- case "hadoop":
- return RangerAuthorizationHDFSPlugin.getInstance(metalake, config);
+ String metalake, String catalogProvider, Map properties) {
+ Preconditions.checkArgument(
+ properties.containsKey(RANGER_SERVICE_TYPE),
+ String.format("%s is required", RANGER_SERVICE_TYPE));
+ String serviceType = properties.get(RANGER_SERVICE_TYPE).toUpperCase();
+ switch (serviceType) {
+ case "HADOOPSQL":
+ return RangerAuthorizationHadoopSQLPlugin.getInstance(metalake, properties);
+ case "HDFS":
+ return RangerAuthorizationHDFSPlugin.getInstance(metalake, properties);
default:
- throw new IllegalArgumentException("Unknown catalog provider: " + catalogProvider);
+ throw new IllegalArgumentException("Unsupported service type: " + serviceType);
}
}
}
diff --git a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationPlugin.java b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationPlugin.java
index a3ce047aa5b..9c30ee11906 100644
--- a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationPlugin.java
+++ b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationPlugin.java
@@ -52,7 +52,6 @@
import org.apache.gravitino.authorization.ranger.reference.VXGroupList;
import org.apache.gravitino.authorization.ranger.reference.VXUser;
import org.apache.gravitino.authorization.ranger.reference.VXUserList;
-import org.apache.gravitino.connector.AuthorizationPropertiesMeta;
import org.apache.gravitino.connector.authorization.AuthorizationPlugin;
import org.apache.gravitino.exceptions.AuthorizationPluginException;
import org.apache.gravitino.meta.AuditInfo;
@@ -88,17 +87,13 @@ public abstract class RangerAuthorizationPlugin
protected RangerAuthorizationPlugin(String metalake, Map config) {
this.metalake = metalake;
- String rangerUrl = config.get(AuthorizationPropertiesMeta.RANGER_ADMIN_URL);
- String authType = config.get(AuthorizationPropertiesMeta.RANGER_AUTH_TYPE);
- rangerAdminName = config.get(AuthorizationPropertiesMeta.RANGER_USERNAME);
+ RangerAuthorizationProperties.validate(config);
+ String rangerUrl = config.get(RangerAuthorizationProperties.RANGER_ADMIN_URL);
+ String authType = config.get(RangerAuthorizationProperties.RANGER_AUTH_TYPE);
+ rangerAdminName = config.get(RangerAuthorizationProperties.RANGER_USERNAME);
// Apache Ranger Password should be minimum 8 characters with min one alphabet and one numeric.
- String password = config.get(AuthorizationPropertiesMeta.RANGER_PASSWORD);
- rangerServiceName = config.get(AuthorizationPropertiesMeta.RANGER_SERVICE_NAME);
- Preconditions.checkArgument(rangerUrl != null, "Ranger admin URL is required");
- Preconditions.checkArgument(authType != null, "Ranger auth type is required");
- Preconditions.checkArgument(rangerAdminName != null, "Ranger username is required");
- Preconditions.checkArgument(password != null, "Ranger password is required");
- Preconditions.checkArgument(rangerServiceName != null, "Ranger service name is required");
+ String password = config.get(RangerAuthorizationProperties.RANGER_PASSWORD);
+ rangerServiceName = config.get(RangerAuthorizationProperties.RANGER_SERVICE_NAME);
rangerClient = new RangerClientExtension(rangerUrl, authType, rangerAdminName, password);
rangerHelper =
diff --git a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationProperties.java b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationProperties.java
new file mode 100644
index 00000000000..e7fee3088f6
--- /dev/null
+++ b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationProperties.java
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.gravitino.authorization.ranger;
+
+import com.google.common.base.Preconditions;
+import java.util.Map;
+
+/** The properties for Ranger authorization plugin. */
+public class RangerAuthorizationProperties {
+ /** Ranger admin web URIs */
+ public static final String RANGER_ADMIN_URL = "authorization.ranger.admin.url";
+
+ /** Ranger service type */
+ public static final String RANGER_SERVICE_TYPE = "authorization.ranger.service.type";
+
+ /** Ranger service name */
+ public static final String RANGER_SERVICE_NAME = "authorization.ranger.service.name";
+
+ /** Ranger authentication type kerberos or simple */
+ public static final String RANGER_AUTH_TYPE = "authorization.ranger.auth.type";
+
+ /**
+ * Ranger admin web login username(auth_type=simple), or kerberos principal(auth_type=kerberos)
+ */
+ public static final String RANGER_USERNAME = "authorization.ranger.username";
+
+ /**
+ * Ranger admin web login user password(auth_type=simple), or path of the keytab
+ * file(auth_type=kerberos)
+ */
+ public static final String RANGER_PASSWORD = "authorization.ranger.password";
+
+ public static void validate(Map properties) {
+ Preconditions.checkArgument(
+ properties.containsKey(RANGER_ADMIN_URL),
+ String.format("%s is required", RANGER_ADMIN_URL));
+ Preconditions.checkArgument(
+ properties.containsKey(RANGER_SERVICE_TYPE),
+ String.format("%s is required", RANGER_SERVICE_TYPE));
+ Preconditions.checkArgument(
+ properties.containsKey(RANGER_SERVICE_NAME),
+ String.format("%s is required", RANGER_SERVICE_NAME));
+ Preconditions.checkArgument(
+ properties.containsKey(RANGER_AUTH_TYPE),
+ String.format("%s is required", RANGER_AUTH_TYPE));
+ Preconditions.checkArgument(
+ properties.containsKey(RANGER_USERNAME), String.format("%s is required", RANGER_USERNAME));
+ Preconditions.checkArgument(
+ properties.containsKey(RANGER_PASSWORD), String.format("%s is required", RANGER_PASSWORD));
+ Preconditions.checkArgument(
+ properties.get(RANGER_ADMIN_URL) != null,
+ String.format("%s is required", RANGER_ADMIN_URL));
+ Preconditions.checkArgument(
+ properties.get(RANGER_SERVICE_NAME) != null,
+ String.format("%s is required", RANGER_SERVICE_NAME));
+ Preconditions.checkArgument(
+ properties.get(RANGER_AUTH_TYPE) != null,
+ String.format("%s is required", RANGER_AUTH_TYPE));
+ Preconditions.checkArgument(
+ properties.get(RANGER_USERNAME) != null, String.format("%s is required", RANGER_USERNAME));
+ Preconditions.checkArgument(
+ properties.get(RANGER_PASSWORD) != null, String.format("%s is required", RANGER_PASSWORD));
+ }
+}
diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/TestChainAuthorizationProperties.java b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/TestChainAuthorizationProperties.java
new file mode 100644
index 00000000000..5d19f234093
--- /dev/null
+++ b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/TestChainAuthorizationProperties.java
@@ -0,0 +1,213 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.gravitino.authorization.ranger;
+
+import static org.apache.gravitino.Catalog.AUTHORIZATION_PROVIDER;
+import static org.apache.gravitino.catalog.hive.HiveConstants.IMPERSONATION_ENABLE;
+
+import com.google.common.collect.Maps;
+import java.util.HashMap;
+import java.util.Map;
+import org.apache.gravitino.catalog.hive.HiveConstants;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
+
+public class TestChainAuthorizationProperties {
+ @Test
+ void testChainOnePlugin() {
+ Map properties = Maps.newHashMap();
+ properties.put("authorization.chain.plugins", "hive1");
+ properties.put("authorization.chain.hive1.provider", "ranger");
+ properties.put("authorization.chain.hive1.ranger.auth.type", "simple");
+ properties.put("authorization.chain.hive1.ranger.admin.url", "http://localhost:6080");
+ properties.put("authorization.chain.hive1.ranger.username", "admin");
+ properties.put("authorization.chain.hive1.ranger.password", "admin");
+ properties.put("authorization.chain.hive1.ranger.service.type", "hive");
+ properties.put("authorization.chain.hive1.ranger.service.name", "hiveDev");
+ Assertions.assertDoesNotThrow(() -> ChainAuthorizationProperties.validate(properties));
+ }
+
+ @Test
+ void testChainTwoPlugins() {
+ Map properties = new HashMap<>();
+ properties.put(HiveConstants.METASTORE_URIS, "thrift://localhost:9083");
+ properties.put("gravitino.bypass.hive.metastore.client.capability.check", "true");
+ properties.put(IMPERSONATION_ENABLE, "true");
+ properties.put(AUTHORIZATION_PROVIDER, "chain");
+ properties.put("authorization.chain.plugins", "hive1,hdfs1");
+ properties.put("authorization.chain.hive1.provider", "ranger");
+ properties.put("authorization.chain.hive1.ranger.auth.type", "simple");
+ properties.put("authorization.chain.hive1.ranger.admin.url", "http://localhost:6080");
+ properties.put("authorization.chain.hive1.ranger.username", "admin");
+ properties.put("authorization.chain.hive1.ranger.password", "admin");
+ properties.put("authorization.chain.hive1.ranger.service.type", "hive");
+ properties.put("authorization.chain.hive1.ranger.service.name", "hiveDev");
+ properties.put("authorization.chain.hdfs1.provider", "ranger");
+ properties.put("authorization.chain.hdfs1.ranger.auth.type", "simple");
+ properties.put("authorization.chain.hdfs1.ranger.admin.url", "http://localhost:6080");
+ properties.put("authorization.chain.hdfs1.ranger.username", "admin");
+ properties.put("authorization.chain.hdfs1.ranger.password", "admin");
+ properties.put("authorization.chain.hdfs1.ranger.service.type", "hadoop");
+ properties.put("authorization.chain.hdfs1.ranger.service.name", "hdfsDev");
+ Assertions.assertDoesNotThrow(() -> ChainAuthorizationProperties.validate(properties));
+ }
+
+ @Test
+ void testPluginsHasSpace() {
+ Map properties = Maps.newHashMap();
+ properties.put("authorization.chain.plugins", "hive1, hdfs1");
+ properties.put("authorization.chain.hive1.provider", "ranger");
+ properties.put("authorization.chain.hive1.ranger.auth.type", "simple");
+ properties.put("authorization.chain.hive1.ranger.admin.url", "http://localhost:6080");
+ properties.put("authorization.chain.hive1.ranger.username", "admin");
+ properties.put("authorization.chain.hive1.ranger.password", "admin");
+ properties.put("authorization.chain.hive1.ranger.service.type", "hive");
+ properties.put("authorization.chain.hive1.ranger.service.name", "hiveDev");
+ properties.put("authorization.chain.hdfs1.provider", "ranger");
+ properties.put("authorization.chain.hdfs1.ranger.auth.type", "simple");
+ properties.put("authorization.chain.hdfs1.ranger.admin.url", "http://localhost:6080");
+ properties.put("authorization.chain.hdfs1.ranger.username", "admin");
+ properties.put("authorization.chain.hdfs1.ranger.password", "admin");
+ properties.put("authorization.chain.hdfs1.ranger.service.type", "hadoop");
+ properties.put("authorization.chain.hdfs1.ranger.service.name", "hdfsDev");
+ Assertions.assertDoesNotThrow(() -> ChainAuthorizationProperties.validate(properties));
+ }
+
+ @Test
+ void testPluginsOneButHasTowPluginConfig() {
+ Map properties = Maps.newHashMap();
+ properties.put("authorization.chain.plugins", "hive1");
+ properties.put("authorization.chain.hive1.provider", "ranger");
+ properties.put("authorization.chain.hive1.ranger.auth.type", "simple");
+ properties.put("authorization.chain.hive1.ranger.admin.url", "http://localhost:6080");
+ properties.put("authorization.chain.hive1.ranger.username", "admin");
+ properties.put("authorization.chain.hive1.ranger.password", "admin");
+ properties.put("authorization.chain.hive1.ranger.service.type", "hive");
+ properties.put("authorization.chain.hive1.ranger.service.name", "hiveDev");
+ properties.put("authorization.chain.hdfs1.provider", "ranger");
+ properties.put("authorization.chain.hdfs1.ranger.auth.type", "simple");
+ properties.put("authorization.chain.hdfs1.ranger.admin.url", "http://localhost:6080");
+ properties.put("authorization.chain.hdfs1.ranger.username", "admin");
+ properties.put("authorization.chain.hdfs1.ranger.password", "admin");
+ properties.put("authorization.chain.hdfs1.ranger.service.type", "hadoop");
+ properties.put("authorization.chain.hdfs1.ranger.service.name", "hdfsDev");
+ Assertions.assertThrows(
+ IllegalArgumentException.class, () -> ChainAuthorizationProperties.validate(properties));
+ }
+
+ @Test
+ void testPluginsHasPoint() {
+ Map properties = Maps.newHashMap();
+ properties.put("authorization.chain.plugins", "hive.1,hdfs1");
+ properties.put("authorization.chain.hive.1.provider", "ranger");
+ properties.put("authorization.chain.hive.1.ranger.auth.type", "simple");
+ properties.put("authorization.chain.hive.1.ranger.admin.url", "http://localhost:6080");
+ properties.put("authorization.chain.hive.1.ranger.username", "admin");
+ properties.put("authorization.chain.hive.1.ranger.password", "admin");
+ properties.put("authorization.chain.hive1.ranger.service.type", "hive");
+ properties.put("authorization.chain.hive.1.ranger.service.name", "hiveDev");
+ properties.put("authorization.chain.hdfs1.provider", "ranger");
+ properties.put("authorization.chain.hdfs1.ranger.auth.type", "simple");
+ properties.put("authorization.chain.hdfs1.ranger.admin.url", "http://localhost:6080");
+ properties.put("authorization.chain.hdfs1.ranger.username", "admin");
+ properties.put("authorization.chain.hdfs1.ranger.password", "admin");
+ properties.put("authorization.chain.hdfs1.ranger.service.type", "hadoop");
+ properties.put("authorization.chain.hdfs1.ranger.service.name", "hdfsDev");
+ Assertions.assertThrows(
+ IllegalArgumentException.class, () -> ChainAuthorizationProperties.validate(properties));
+ }
+
+ @Test
+ void testErrorPluginName() {
+ Map properties = Maps.newHashMap();
+ properties.put("authorization.chain.plugins", "hive1,hdfs1");
+ properties.put("authorization.chain.hive1.provider", "ranger");
+ properties.put("authorization.chain.hive1.ranger.auth.type", "simple");
+ properties.put("authorization.chain.hive1.ranger.admin.url", "http://localhost:6080");
+ properties.put("authorization.chain.hive1.ranger.username", "admin");
+ properties.put("authorization.chain.hive1.ranger.password", "admin");
+ properties.put("authorization.chain.hive1.ranger.service.type", "hive");
+ properties.put("authorization.chain.hive1.ranger.service.name", "hiveDev");
+ properties.put("authorization.chain.hdfs1.provider", "ranger");
+ properties.put("authorization.chain.hdfs1.ranger.auth.type", "simple");
+ properties.put("authorization.chain.hdfs1.ranger.admin.url", "http://localhost:6080");
+ properties.put("authorization.chain.hdfs1.ranger.username", "admin");
+ properties.put("authorization.chain.hdfs1.ranger.password", "admin");
+ properties.put("authorization.chain.hdfs1.ranger.service.type", "hadoop");
+ properties.put("authorization.chain.plug3.ranger.service.name", "hdfsDev");
+ Assertions.assertThrows(
+ IllegalArgumentException.class, () -> ChainAuthorizationProperties.validate(properties));
+ }
+
+ @Test
+ void testDuplicationPluginName() {
+ Map properties = Maps.newHashMap();
+ properties.put("authorization.chain.plugins", "hive1,hive1,hdfs1");
+ properties.put("authorization.chain.hive1.provider", "ranger");
+ properties.put("authorization.chain.hive1.ranger.auth.type", "simple");
+ properties.put("authorization.chain.hive1.ranger.admin.url", "http://localhost:6080");
+ properties.put("authorization.chain.hive1.ranger.username", "admin");
+ properties.put("authorization.chain.hive1.ranger.password", "admin");
+ properties.put("authorization.chain.hive1.ranger.service.type", "hive");
+ properties.put("authorization.chain.hive1.ranger.service.name", "hiveDev");
+ properties.put("authorization.chain.hdfs1.provider", "ranger");
+ properties.put("authorization.chain.hdfs1.ranger.auth.type", "simple");
+ properties.put("authorization.chain.hdfs1.ranger.admin.url", "http://localhost:6080");
+ properties.put("authorization.chain.hdfs1.ranger.username", "admin");
+ properties.put("authorization.chain.hdfs1.ranger.password", "admin");
+ properties.put("authorization.chain.hdfs1.ranger.service.type", "hadoop");
+ properties.put("authorization.chain.hdfs1.ranger.service.name", "hdfsDev");
+ Assertions.assertThrows(
+ IllegalArgumentException.class, () -> ChainAuthorizationProperties.validate(properties));
+ }
+
+ @Test
+ void testFetchRangerPrpoerties() {
+ Map properties = new HashMap<>();
+ properties.put(HiveConstants.METASTORE_URIS, "thrift://localhost:9083");
+ properties.put("gravitino.bypass.hive.metastore.client.capability.check", "true");
+ properties.put(IMPERSONATION_ENABLE, "true");
+ properties.put(AUTHORIZATION_PROVIDER, "chain");
+ properties.put("authorization.chain.plugins", "hive1,hdfs1");
+ properties.put("authorization.chain.hive1.provider", "ranger");
+ properties.put("authorization.chain.hive1.ranger.auth.type", "simple");
+ properties.put("authorization.chain.hive1.ranger.admin.url", "http://localhost:6080");
+ properties.put("authorization.chain.hive1.ranger.username", "admin");
+ properties.put("authorization.chain.hive1.ranger.password", "admin");
+ properties.put("authorization.chain.hive1.ranger.service.type", "hive");
+ properties.put("authorization.chain.hive1.ranger.service.name", "hiveDev");
+ properties.put("authorization.chain.hdfs1.provider", "ranger");
+ properties.put("authorization.chain.hdfs1.ranger.auth.type", "simple");
+ properties.put("authorization.chain.hdfs1.ranger.admin.url", "http://localhost:6080");
+ properties.put("authorization.chain.hdfs1.ranger.username", "admin");
+ properties.put("authorization.chain.hdfs1.ranger.password", "admin");
+ properties.put("authorization.chain.hdfs1.ranger.service.type", "hadoop");
+ properties.put("authorization.chain.hdfs1.ranger.service.name", "hdfsDev");
+
+ Map rangerHiveProperties =
+ ChainAuthorizationProperties.fetchAuthPluginProperties("hive1", properties);
+ Assertions.assertDoesNotThrow(
+ () -> RangerAuthorizationProperties.validate(rangerHiveProperties));
+
+ Map rangerHDFSProperties =
+ ChainAuthorizationProperties.fetchAuthPluginProperties("hdfs1", properties);
+ Assertions.assertDoesNotThrow(
+ () -> RangerAuthorizationProperties.validate(rangerHDFSProperties));
+ }
+}
diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/TestRangerAuthorizationProperties.java b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/TestRangerAuthorizationProperties.java
new file mode 100644
index 00000000000..a90b164a21f
--- /dev/null
+++ b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/TestRangerAuthorizationProperties.java
@@ -0,0 +1,110 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.gravitino.authorization.ranger;
+
+import com.google.common.collect.Maps;
+import java.util.Map;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
+
+public class TestRangerAuthorizationProperties {
+ @Test
+ void testRangerProperties() {
+ Map properties = Maps.newHashMap();
+ properties.put("authorization.ranger.auth.type", "simple");
+ properties.put("authorization.ranger.admin.url", "http://localhost:6080");
+ properties.put("authorization.ranger.username", "admin");
+ properties.put("authorization.ranger.password", "admin");
+ properties.put("authorization.ranger.service.type", "hive");
+ properties.put("authorization.ranger.service.name", "hiveDev");
+ Assertions.assertDoesNotThrow(() -> RangerAuthorizationProperties.validate(properties));
+ }
+
+ @Test
+ void testRangerPropertiesLoseAuthType() {
+ Map properties = Maps.newHashMap();
+ properties.put("authorization.ranger.admin.url", "http://localhost:6080");
+ properties.put("authorization.ranger.username", "admin");
+ properties.put("authorization.ranger.password", "admin");
+ properties.put("authorization.ranger.service.type", "hive");
+ properties.put("authorization.ranger.service.name", "hiveDev");
+ Assertions.assertThrows(
+ IllegalArgumentException.class, () -> RangerAuthorizationProperties.validate(properties));
+ }
+
+ @Test
+ void testRangerPropertiesLoseAdminUrl() {
+ Map properties = Maps.newHashMap();
+ properties.put("authorization.ranger.auth.type", "simple");
+ properties.put("authorization.ranger.username", "admin");
+ properties.put("authorization.ranger.password", "admin");
+ properties.put("authorization.ranger.service.type", "hive");
+ properties.put("authorization.ranger.service.name", "hiveDev");
+ Assertions.assertThrows(
+ IllegalArgumentException.class, () -> RangerAuthorizationProperties.validate(properties));
+ }
+
+ @Test
+ void testRangerPropertiesLoseUserName() {
+ Map properties = Maps.newHashMap();
+ properties.put("authorization.ranger.auth.type", "simple");
+ properties.put("authorization.ranger.admin.url", "http://localhost:6080");
+ properties.put("authorization.ranger.password", "admin");
+ properties.put("authorization.ranger.service.type", "hive");
+ properties.put("authorization.ranger.service.name", "hiveDev");
+ Assertions.assertThrows(
+ IllegalArgumentException.class, () -> RangerAuthorizationProperties.validate(properties));
+ }
+
+ @Test
+ void testRangerPropertiesLosePassword() {
+ Map properties = Maps.newHashMap();
+ properties.put("authorization.ranger.auth.type", "simple");
+ properties.put("authorization.ranger.admin.url", "http://localhost:6080");
+ properties.put("authorization.ranger.username", "admin");
+ properties.put("authorization.ranger.service.type", "hive");
+ properties.put("authorization.ranger.service.name", "hiveDev");
+ Assertions.assertThrows(
+ IllegalArgumentException.class, () -> RangerAuthorizationProperties.validate(properties));
+ }
+
+ @Test
+ void testRangerPropertiesLoseServiceType() {
+ Map properties = Maps.newHashMap();
+ properties.put("authorization.ranger.auth.type", "simple");
+ properties.put("authorization.ranger.admin.url", "http://localhost:6080");
+ properties.put("authorization.ranger.username", "admin");
+ properties.put("authorization.ranger.password", "admin");
+ properties.put("authorization.ranger.service.name", "hiveDev");
+ Assertions.assertThrows(
+ IllegalArgumentException.class, () -> RangerAuthorizationProperties.validate(properties));
+ }
+
+ @Test
+ void testRangerPropertiesLoseServiceName() {
+ Map properties = Maps.newHashMap();
+ properties.put("authorization.ranger.auth.type", "simple");
+ properties.put("authorization.ranger.admin.url", "http://localhost:6080");
+ properties.put("authorization.ranger.username", "admin");
+ properties.put("authorization.ranger.password", "admin");
+ properties.put("authorization.ranger.service.type", "hive");
+ Assertions.assertThrows(
+ IllegalArgumentException.class, () -> RangerAuthorizationProperties.validate(properties));
+ }
+}
diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerFilesetIT.java b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerFilesetIT.java
index bbaae32781b..56f09781587 100644
--- a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerFilesetIT.java
+++ b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerFilesetIT.java
@@ -23,10 +23,6 @@
import static org.apache.gravitino.authorization.ranger.integration.test.RangerITEnv.rangerClient;
import static org.apache.gravitino.authorization.ranger.integration.test.RangerITEnv.rangerHelper;
import static org.apache.gravitino.catalog.hive.HiveConstants.IMPERSONATION_ENABLE;
-import static org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_AUTH_TYPE;
-import static org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_PASSWORD;
-import static org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_SERVICE_NAME;
-import static org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_USERNAME;
import static org.apache.gravitino.integration.test.container.RangerContainer.RANGER_SERVER_PORT;
import com.google.common.collect.ImmutableMap;
@@ -49,10 +45,10 @@
import org.apache.gravitino.authorization.Privileges;
import org.apache.gravitino.authorization.SecurableObject;
import org.apache.gravitino.authorization.SecurableObjects;
+import org.apache.gravitino.authorization.ranger.RangerAuthorizationProperties;
import org.apache.gravitino.authorization.ranger.RangerHelper;
import org.apache.gravitino.authorization.ranger.RangerPrivileges;
import org.apache.gravitino.client.GravitinoMetalake;
-import org.apache.gravitino.connector.AuthorizationPropertiesMeta;
import org.apache.gravitino.file.Fileset;
import org.apache.gravitino.integration.test.container.HiveContainer;
import org.apache.gravitino.integration.test.container.RangerContainer;
@@ -540,15 +536,17 @@ private void createCatalogAndSchema() {
"true",
AUTHORIZATION_PROVIDER,
"ranger",
- RANGER_SERVICE_NAME,
+ RangerAuthorizationProperties.RANGER_SERVICE_TYPE,
+ "HDFS",
+ RangerAuthorizationProperties.RANGER_SERVICE_NAME,
RangerITEnv.RANGER_HDFS_REPO_NAME,
- AuthorizationPropertiesMeta.RANGER_ADMIN_URL,
+ RangerAuthorizationProperties.RANGER_ADMIN_URL,
RANGER_ADMIN_URL,
- RANGER_AUTH_TYPE,
+ RangerAuthorizationProperties.RANGER_AUTH_TYPE,
RangerContainer.authType,
- RANGER_USERNAME,
+ RangerAuthorizationProperties.RANGER_USERNAME,
RangerContainer.rangerUserName,
- RANGER_PASSWORD,
+ RangerAuthorizationProperties.RANGER_PASSWORD,
RangerContainer.rangerPassword));
catalog = metalake.loadCatalog(catalogName);
diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java
index 600463fbc21..baec9434c79 100644
--- a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java
+++ b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java
@@ -20,10 +20,6 @@
import static org.apache.gravitino.Catalog.AUTHORIZATION_PROVIDER;
import static org.apache.gravitino.catalog.hive.HiveConstants.IMPERSONATION_ENABLE;
-import static org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_AUTH_TYPE;
-import static org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_PASSWORD;
-import static org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_SERVICE_NAME;
-import static org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_USERNAME;
import static org.apache.gravitino.integration.test.container.RangerContainer.RANGER_SERVER_PORT;
import com.google.common.collect.ImmutableMap;
@@ -33,8 +29,8 @@
import org.apache.gravitino.Configs;
import org.apache.gravitino.auth.AuthConstants;
import org.apache.gravitino.auth.AuthenticatorType;
+import org.apache.gravitino.authorization.ranger.RangerAuthorizationProperties;
import org.apache.gravitino.catalog.hive.HiveConstants;
-import org.apache.gravitino.connector.AuthorizationPropertiesMeta;
import org.apache.gravitino.integration.test.container.HiveContainer;
import org.apache.gravitino.integration.test.container.RangerContainer;
import org.apache.gravitino.integration.test.util.GravitinoITUtils;
@@ -179,15 +175,17 @@ private static void createCatalog() {
"true",
AUTHORIZATION_PROVIDER,
"ranger",
- RANGER_SERVICE_NAME,
+ RangerAuthorizationProperties.RANGER_SERVICE_TYPE,
+ "HadoopSQL",
+ RangerAuthorizationProperties.RANGER_SERVICE_NAME,
RangerITEnv.RANGER_HIVE_REPO_NAME,
- AuthorizationPropertiesMeta.RANGER_ADMIN_URL,
+ RangerAuthorizationProperties.RANGER_ADMIN_URL,
RANGER_ADMIN_URL,
- RANGER_AUTH_TYPE,
+ RangerAuthorizationProperties.RANGER_AUTH_TYPE,
RangerContainer.authType,
- RANGER_USERNAME,
+ RangerAuthorizationProperties.RANGER_USERNAME,
RangerContainer.rangerUserName,
- RANGER_PASSWORD,
+ RangerAuthorizationProperties.RANGER_PASSWORD,
RangerContainer.rangerPassword);
metalake.createCatalog(catalogName, Catalog.Type.RELATIONAL, provider, "comment", properties);
diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java
index f6b83bb9d1a..b3be410ea03 100644
--- a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java
+++ b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java
@@ -35,10 +35,10 @@
import org.apache.gravitino.authorization.ranger.RangerAuthorizationHDFSPlugin;
import org.apache.gravitino.authorization.ranger.RangerAuthorizationHadoopSQLPlugin;
import org.apache.gravitino.authorization.ranger.RangerAuthorizationPlugin;
+import org.apache.gravitino.authorization.ranger.RangerAuthorizationProperties;
import org.apache.gravitino.authorization.ranger.RangerHelper;
import org.apache.gravitino.authorization.ranger.RangerPrivileges;
import org.apache.gravitino.authorization.ranger.reference.RangerDefines;
-import org.apache.gravitino.connector.AuthorizationPropertiesMeta;
import org.apache.gravitino.integration.test.container.ContainerSuite;
import org.apache.gravitino.integration.test.container.HiveContainer;
import org.apache.gravitino.integration.test.container.RangerContainer;
@@ -98,18 +98,20 @@ public static void init(boolean allowAnyoneAccessHDFS) {
RangerAuthorizationHadoopSQLPlugin.getInstance(
"metalake",
ImmutableMap.of(
- AuthorizationPropertiesMeta.RANGER_ADMIN_URL,
+ RangerAuthorizationProperties.RANGER_ADMIN_URL,
String.format(
"http://%s:%d",
containerSuite.getRangerContainer().getContainerIpAddress(),
RangerContainer.RANGER_SERVER_PORT),
- AuthorizationPropertiesMeta.RANGER_AUTH_TYPE,
+ RangerAuthorizationProperties.RANGER_AUTH_TYPE,
RangerContainer.authType,
- AuthorizationPropertiesMeta.RANGER_USERNAME,
+ RangerAuthorizationProperties.RANGER_USERNAME,
RangerContainer.rangerUserName,
- AuthorizationPropertiesMeta.RANGER_PASSWORD,
+ RangerAuthorizationProperties.RANGER_PASSWORD,
RangerContainer.rangerPassword,
- AuthorizationPropertiesMeta.RANGER_SERVICE_NAME,
+ RangerAuthorizationProperties.RANGER_SERVICE_TYPE,
+ "HadoopSQL",
+ RangerAuthorizationProperties.RANGER_SERVICE_NAME,
RangerITEnv.RANGER_HIVE_REPO_NAME));
RangerAuthorizationHDFSPlugin spyRangerAuthorizationHDFSPlugin =
@@ -117,18 +119,20 @@ public static void init(boolean allowAnyoneAccessHDFS) {
RangerAuthorizationHDFSPlugin.getInstance(
"metalake",
ImmutableMap.of(
- AuthorizationPropertiesMeta.RANGER_ADMIN_URL,
+ RangerAuthorizationProperties.RANGER_ADMIN_URL,
String.format(
"http://%s:%d",
containerSuite.getRangerContainer().getContainerIpAddress(),
RangerContainer.RANGER_SERVER_PORT),
- AuthorizationPropertiesMeta.RANGER_AUTH_TYPE,
+ RangerAuthorizationProperties.RANGER_AUTH_TYPE,
RangerContainer.authType,
- AuthorizationPropertiesMeta.RANGER_USERNAME,
+ RangerAuthorizationProperties.RANGER_USERNAME,
RangerContainer.rangerUserName,
- AuthorizationPropertiesMeta.RANGER_PASSWORD,
+ RangerAuthorizationProperties.RANGER_PASSWORD,
RangerContainer.rangerPassword,
- AuthorizationPropertiesMeta.RANGER_SERVICE_NAME,
+ RangerAuthorizationProperties.RANGER_SERVICE_TYPE,
+ "HDFS",
+ RangerAuthorizationProperties.RANGER_SERVICE_NAME,
RangerITEnv.RANGER_HDFS_REPO_NAME)));
doReturn("/test").when(spyRangerAuthorizationHDFSPlugin).getFileSetPath(Mockito.any());
rangerAuthHDFSPlugin = spyRangerAuthorizationHDFSPlugin;
diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerIcebergE2EIT.java b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerIcebergE2EIT.java
index a4fc1253efe..d8bd70c6470 100644
--- a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerIcebergE2EIT.java
+++ b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerIcebergE2EIT.java
@@ -21,16 +21,12 @@
import static org.apache.gravitino.Catalog.AUTHORIZATION_PROVIDER;
import static org.apache.gravitino.authorization.ranger.integration.test.RangerITEnv.currentFunName;
import static org.apache.gravitino.catalog.hive.HiveConstants.IMPERSONATION_ENABLE;
-import static org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_AUTH_TYPE;
-import static org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_PASSWORD;
-import static org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_SERVICE_NAME;
-import static org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_USERNAME;
import static org.apache.gravitino.integration.test.container.RangerContainer.RANGER_SERVER_PORT;
-import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import java.util.Collections;
+import java.util.HashMap;
import java.util.Map;
import org.apache.gravitino.Catalog;
import org.apache.gravitino.Configs;
@@ -39,8 +35,8 @@
import org.apache.gravitino.authorization.Privileges;
import org.apache.gravitino.authorization.SecurableObject;
import org.apache.gravitino.authorization.SecurableObjects;
+import org.apache.gravitino.authorization.ranger.RangerAuthorizationProperties;
import org.apache.gravitino.catalog.lakehouse.iceberg.IcebergConstants;
-import org.apache.gravitino.connector.AuthorizationPropertiesMeta;
import org.apache.gravitino.integration.test.container.HiveContainer;
import org.apache.gravitino.integration.test.container.RangerContainer;
import org.apache.gravitino.integration.test.util.GravitinoITUtils;
@@ -168,31 +164,24 @@ protected void testAlterTable() {
}
private static void createCatalog() {
- Map properties =
- ImmutableMap.of(
- IcebergConstants.URI,
- HIVE_METASTORE_URIS,
- IcebergConstants.CATALOG_BACKEND,
- "hive",
- IcebergConstants.WAREHOUSE,
- String.format(
- "hdfs://%s:%d/user/hive/warehouse",
- containerSuite.getHiveRangerContainer().getContainerIpAddress(),
- HiveContainer.HDFS_DEFAULTFS_PORT),
- IMPERSONATION_ENABLE,
- "true",
- AUTHORIZATION_PROVIDER,
- "ranger",
- RANGER_SERVICE_NAME,
- RangerITEnv.RANGER_HIVE_REPO_NAME,
- AuthorizationPropertiesMeta.RANGER_ADMIN_URL,
- RANGER_ADMIN_URL,
- RANGER_AUTH_TYPE,
- RangerContainer.authType,
- RANGER_USERNAME,
- RangerContainer.rangerUserName,
- RANGER_PASSWORD,
- RangerContainer.rangerPassword);
+ Map properties = new HashMap<>();
+ properties.put(IcebergConstants.URI, HIVE_METASTORE_URIS);
+ properties.put(IcebergConstants.CATALOG_BACKEND, "hive");
+ properties.put(
+ IcebergConstants.WAREHOUSE,
+ String.format(
+ "hdfs://%s:%d/user/hive/warehouse",
+ containerSuite.getHiveRangerContainer().getContainerIpAddress(),
+ HiveContainer.HDFS_DEFAULTFS_PORT));
+ properties.put(IMPERSONATION_ENABLE, "true");
+ properties.put(AUTHORIZATION_PROVIDER, "ranger");
+ properties.put(RangerAuthorizationProperties.RANGER_SERVICE_TYPE, "HadoopSQL");
+ properties.put(
+ RangerAuthorizationProperties.RANGER_SERVICE_NAME, RangerITEnv.RANGER_HIVE_REPO_NAME);
+ properties.put(RangerAuthorizationProperties.RANGER_ADMIN_URL, RANGER_ADMIN_URL);
+ properties.put(RangerAuthorizationProperties.RANGER_AUTH_TYPE, RangerContainer.authType);
+ properties.put(RangerAuthorizationProperties.RANGER_USERNAME, RangerContainer.rangerUserName);
+ properties.put(RangerAuthorizationProperties.RANGER_PASSWORD, RangerContainer.rangerPassword);
metalake.createCatalog(catalogName, Catalog.Type.RELATIONAL, provider, "comment", properties);
catalog = metalake.loadCatalog(catalogName);
diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerPaimonE2EIT.java b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerPaimonE2EIT.java
index b2529837e3c..79d1eb1875d 100644
--- a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerPaimonE2EIT.java
+++ b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerPaimonE2EIT.java
@@ -20,10 +20,6 @@
import static org.apache.gravitino.Catalog.AUTHORIZATION_PROVIDER;
import static org.apache.gravitino.authorization.ranger.integration.test.RangerITEnv.currentFunName;
-import static org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_AUTH_TYPE;
-import static org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_PASSWORD;
-import static org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_SERVICE_NAME;
-import static org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_USERNAME;
import static org.apache.gravitino.integration.test.container.RangerContainer.RANGER_SERVER_PORT;
import com.google.common.collect.ImmutableMap;
@@ -38,7 +34,7 @@
import org.apache.gravitino.authorization.Privileges;
import org.apache.gravitino.authorization.SecurableObject;
import org.apache.gravitino.authorization.SecurableObjects;
-import org.apache.gravitino.connector.AuthorizationPropertiesMeta;
+import org.apache.gravitino.authorization.ranger.RangerAuthorizationProperties;
import org.apache.gravitino.integration.test.container.HiveContainer;
import org.apache.gravitino.integration.test.container.RangerContainer;
import org.apache.gravitino.integration.test.util.GravitinoITUtils;
@@ -197,15 +193,17 @@ private static void createCatalog() {
HiveContainer.HDFS_DEFAULTFS_PORT),
AUTHORIZATION_PROVIDER,
"ranger",
- RANGER_SERVICE_NAME,
+ RangerAuthorizationProperties.RANGER_SERVICE_TYPE,
+ "HadoopSQL",
+ RangerAuthorizationProperties.RANGER_SERVICE_NAME,
RangerITEnv.RANGER_HIVE_REPO_NAME,
- AuthorizationPropertiesMeta.RANGER_ADMIN_URL,
+ RangerAuthorizationProperties.RANGER_ADMIN_URL,
RANGER_ADMIN_URL,
- RANGER_AUTH_TYPE,
+ RangerAuthorizationProperties.RANGER_AUTH_TYPE,
RangerContainer.authType,
- RANGER_USERNAME,
+ RangerAuthorizationProperties.RANGER_USERNAME,
RangerContainer.rangerUserName,
- RANGER_PASSWORD,
+ RangerAuthorizationProperties.RANGER_PASSWORD,
RangerContainer.rangerPassword);
metalake.createCatalog(catalogName, Catalog.Type.RELATIONAL, provider, "comment", properties);
diff --git a/catalogs/catalog-hive/src/main/java/org/apache/gravitino/catalog/hive/HiveCatalog.java b/catalogs/catalog-hive/src/main/java/org/apache/gravitino/catalog/hive/HiveCatalog.java
index 717694e1850..98711f98ae8 100644
--- a/catalogs/catalog-hive/src/main/java/org/apache/gravitino/catalog/hive/HiveCatalog.java
+++ b/catalogs/catalog-hive/src/main/java/org/apache/gravitino/catalog/hive/HiveCatalog.java
@@ -29,8 +29,8 @@
/** Implementation of an Apache Hive catalog in Apache Gravitino. */
public class HiveCatalog extends BaseCatalog {
- static final HiveCatalogPropertiesMeta CATALOG_PROPERTIES_METADATA =
- new HiveCatalogPropertiesMeta();
+ static final HiveCatalogPropertiesMetadata CATALOG_PROPERTIES_METADATA =
+ new HiveCatalogPropertiesMetadata();
static final HiveSchemaPropertiesMetadata SCHEMA_PROPERTIES_METADATA =
new HiveSchemaPropertiesMetadata();
@@ -69,8 +69,8 @@ public Capability newCapability() {
protected Optional newProxyPlugin(Map config) {
boolean impersonationEnabled =
(boolean)
- new HiveCatalogPropertiesMeta()
- .getOrDefault(config, HiveCatalogPropertiesMeta.IMPERSONATION_ENABLE);
+ new HiveCatalogPropertiesMetadata()
+ .getOrDefault(config, HiveCatalogPropertiesMetadata.IMPERSONATION_ENABLE);
if (!impersonationEnabled) {
return Optional.empty();
}
diff --git a/catalogs/catalog-hive/src/main/java/org/apache/gravitino/catalog/hive/HiveCatalogOperations.java b/catalogs/catalog-hive/src/main/java/org/apache/gravitino/catalog/hive/HiveCatalogOperations.java
index bb7d06f6bc8..902fce3779c 100644
--- a/catalogs/catalog-hive/src/main/java/org/apache/gravitino/catalog/hive/HiveCatalogOperations.java
+++ b/catalogs/catalog-hive/src/main/java/org/apache/gravitino/catalog/hive/HiveCatalogOperations.java
@@ -18,9 +18,9 @@
*/
package org.apache.gravitino.catalog.hive;
-import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.LIST_ALL_TABLES;
-import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.METASTORE_URIS;
-import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.PRINCIPAL;
+import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.LIST_ALL_TABLES;
+import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.METASTORE_URIS;
+import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.PRINCIPAL;
import static org.apache.gravitino.catalog.hive.HiveTable.SUPPORT_TABLE_TYPES;
import static org.apache.gravitino.catalog.hive.HiveTablePropertiesMetadata.COMMENT;
import static org.apache.gravitino.catalog.hive.HiveTablePropertiesMetadata.TABLE_TYPE;
@@ -200,7 +200,7 @@ private void initKerberosIfNecessary(Map conf, Configuration had
(String)
propertiesMetadata
.catalogPropertiesMetadata()
- .getOrDefault(conf, HiveCatalogPropertiesMeta.KEY_TAB_URI);
+ .getOrDefault(conf, HiveCatalogPropertiesMetadata.KEY_TAB_URI);
Preconditions.checkArgument(StringUtils.isNotBlank(keytabUri), "Keytab uri can't be blank");
// TODO: Support to download the file from Kerberos HDFS
Preconditions.checkArgument(
@@ -210,7 +210,7 @@ private void initKerberosIfNecessary(Map conf, Configuration had
(int)
propertiesMetadata
.catalogPropertiesMetadata()
- .getOrDefault(conf, HiveCatalogPropertiesMeta.FETCH_TIMEOUT_SEC);
+ .getOrDefault(conf, HiveCatalogPropertiesMetadata.FETCH_TIMEOUT_SEC);
FetchFileUtils.fetchFileFromUri(
keytabUri, keytabPath.toFile(), fetchKeytabFileTimeout, hadoopConf);
@@ -244,7 +244,7 @@ private void initKerberosIfNecessary(Map conf, Configuration had
(int)
propertiesMetadata
.catalogPropertiesMetadata()
- .getOrDefault(conf, HiveCatalogPropertiesMeta.CHECK_INTERVAL_SEC);
+ .getOrDefault(conf, HiveCatalogPropertiesMetadata.CHECK_INTERVAL_SEC);
checkTgtExecutor.scheduleAtFixedRate(
() -> {
diff --git a/catalogs/catalog-hive/src/main/java/org/apache/gravitino/catalog/hive/HiveCatalogPropertiesMeta.java b/catalogs/catalog-hive/src/main/java/org/apache/gravitino/catalog/hive/HiveCatalogPropertiesMetadata.java
similarity index 95%
rename from catalogs/catalog-hive/src/main/java/org/apache/gravitino/catalog/hive/HiveCatalogPropertiesMeta.java
rename to catalogs/catalog-hive/src/main/java/org/apache/gravitino/catalog/hive/HiveCatalogPropertiesMetadata.java
index dc532e6014d..8897d77051c 100644
--- a/catalogs/catalog-hive/src/main/java/org/apache/gravitino/catalog/hive/HiveCatalogPropertiesMeta.java
+++ b/catalogs/catalog-hive/src/main/java/org/apache/gravitino/catalog/hive/HiveCatalogPropertiesMetadata.java
@@ -21,12 +21,11 @@
import com.google.common.collect.ImmutableMap;
import java.util.Map;
-import org.apache.gravitino.connector.AuthorizationPropertiesMeta;
import org.apache.gravitino.connector.BaseCatalogPropertiesMetadata;
import org.apache.gravitino.connector.PropertyEntry;
import org.apache.gravitino.hive.ClientPropertiesMetadata;
-public class HiveCatalogPropertiesMeta extends BaseCatalogPropertiesMetadata {
+public class HiveCatalogPropertiesMetadata extends BaseCatalogPropertiesMetadata {
public static final String CLIENT_POOL_SIZE = HiveConstants.CLIENT_POOL_SIZE;
public static final String METASTORE_URIS = HiveConstants.METASTORE_URIS;
@@ -110,7 +109,6 @@ public class HiveCatalogPropertiesMeta extends BaseCatalogPropertiesMetadata {
DEFAULT_LIST_ALL_TABLES,
false /* hidden */,
false /* reserved */))
- .putAll(AuthorizationPropertiesMeta.RANGER_AUTHORIZATION_PROPERTY_ENTRIES)
.putAll(CLIENT_PROPERTIES_METADATA.propertyEntries())
.build();
diff --git a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveCatalog.java b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveCatalog.java
index 7b3f944b913..ddf76163185 100644
--- a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveCatalog.java
+++ b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveCatalog.java
@@ -21,7 +21,7 @@
import static org.apache.gravitino.catalog.hive.HiveCatalog.CATALOG_PROPERTIES_METADATA;
import static org.apache.gravitino.catalog.hive.HiveCatalog.SCHEMA_PROPERTIES_METADATA;
import static org.apache.gravitino.catalog.hive.HiveCatalog.TABLE_PROPERTIES_METADATA;
-import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.METASTORE_URIS;
+import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.METASTORE_URIS;
import com.google.common.collect.Maps;
import java.time.Instant;
diff --git a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveCatalogOperations.java b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveCatalogOperations.java
index 9e355ed044b..2c87bfd5802 100644
--- a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveCatalogOperations.java
+++ b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveCatalogOperations.java
@@ -23,15 +23,15 @@
import static org.apache.gravitino.Catalog.CLOUD_NAME;
import static org.apache.gravitino.Catalog.CLOUD_REGION_CODE;
import static org.apache.gravitino.Catalog.PROPERTY_IN_USE;
-import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.CHECK_INTERVAL_SEC;
-import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.CLIENT_POOL_CACHE_EVICTION_INTERVAL_MS;
-import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.CLIENT_POOL_SIZE;
-import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.FETCH_TIMEOUT_SEC;
-import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.IMPERSONATION_ENABLE;
-import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.KEY_TAB_URI;
-import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.LIST_ALL_TABLES;
-import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.METASTORE_URIS;
-import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.PRINCIPAL;
+import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.CHECK_INTERVAL_SEC;
+import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.CLIENT_POOL_CACHE_EVICTION_INTERVAL_MS;
+import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.CLIENT_POOL_SIZE;
+import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.FETCH_TIMEOUT_SEC;
+import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.IMPERSONATION_ENABLE;
+import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.KEY_TAB_URI;
+import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.LIST_ALL_TABLES;
+import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.METASTORE_URIS;
+import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.PRINCIPAL;
import static org.apache.gravitino.catalog.hive.TestHiveCatalog.HIVE_PROPERTIES_METADATA;
import static org.apache.gravitino.connector.BaseCatalog.CATALOG_BYPASS_PREFIX;
import static org.mockito.ArgumentMatchers.any;
@@ -43,7 +43,6 @@
import java.util.Map;
import org.apache.gravitino.Catalog;
import org.apache.gravitino.NameIdentifier;
-import org.apache.gravitino.connector.AuthorizationPropertiesMeta;
import org.apache.gravitino.connector.BaseCatalog;
import org.apache.gravitino.connector.PropertyEntry;
import org.apache.gravitino.exceptions.ConnectionFailedException;
@@ -74,7 +73,7 @@ void testPropertyMeta() {
Map> propertyEntryMap =
HIVE_PROPERTIES_METADATA.catalogPropertiesMetadata().propertyEntries();
- Assertions.assertEquals(21, propertyEntryMap.size());
+ Assertions.assertEquals(16, propertyEntryMap.size());
Assertions.assertTrue(propertyEntryMap.containsKey(METASTORE_URIS));
Assertions.assertTrue(propertyEntryMap.containsKey(Catalog.PROPERTY_PACKAGE));
Assertions.assertTrue(propertyEntryMap.containsKey(BaseCatalog.CATALOG_OPERATION_IMPL));
@@ -83,17 +82,6 @@ void testPropertyMeta() {
Assertions.assertTrue(propertyEntryMap.containsKey(CLIENT_POOL_SIZE));
Assertions.assertTrue(propertyEntryMap.containsKey(IMPERSONATION_ENABLE));
Assertions.assertTrue(propertyEntryMap.containsKey(LIST_ALL_TABLES));
- Assertions.assertTrue(
- propertyEntryMap.containsKey(AuthorizationPropertiesMeta.RANGER_ADMIN_URL));
- Assertions.assertTrue(
- propertyEntryMap.containsKey(AuthorizationPropertiesMeta.RANGER_AUTH_TYPE));
- Assertions.assertTrue(
- propertyEntryMap.containsKey(AuthorizationPropertiesMeta.RANGER_USERNAME));
- Assertions.assertTrue(
- propertyEntryMap.containsKey(AuthorizationPropertiesMeta.RANGER_PASSWORD));
- Assertions.assertTrue(
- propertyEntryMap.containsKey(AuthorizationPropertiesMeta.RANGER_SERVICE_NAME));
-
Assertions.assertTrue(propertyEntryMap.get(METASTORE_URIS).isRequired());
Assertions.assertFalse(propertyEntryMap.get(Catalog.PROPERTY_PACKAGE).isRequired());
Assertions.assertFalse(propertyEntryMap.get(CLIENT_POOL_SIZE).isRequired());
diff --git a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveSchema.java b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveSchema.java
index d3bfb1e3c69..337600a63a3 100644
--- a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveSchema.java
+++ b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveSchema.java
@@ -18,7 +18,7 @@
*/
package org.apache.gravitino.catalog.hive;
-import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.METASTORE_URIS;
+import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.METASTORE_URIS;
import static org.apache.gravitino.connector.BaseCatalog.CATALOG_BYPASS_PREFIX;
import com.google.common.collect.Maps;
diff --git a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveTable.java b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveTable.java
index 2823bf27612..cd143b1e8ec 100644
--- a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveTable.java
+++ b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveTable.java
@@ -18,7 +18,7 @@
*/
package org.apache.gravitino.catalog.hive;
-import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.METASTORE_URIS;
+import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.METASTORE_URIS;
import static org.apache.gravitino.catalog.hive.HiveTablePropertiesMetadata.TABLE_TYPE;
import static org.apache.gravitino.connector.BaseCatalog.CATALOG_BYPASS_PREFIX;
import static org.apache.gravitino.rel.expressions.transforms.Transforms.day;
diff --git a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHiveIT.java b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHiveIT.java
index d9e6fe70dca..7d8079d1ede 100644
--- a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHiveIT.java
+++ b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHiveIT.java
@@ -18,7 +18,7 @@
*/
package org.apache.gravitino.catalog.hive.integration.test;
-import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.METASTORE_URIS;
+import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.METASTORE_URIS;
import static org.apache.gravitino.catalog.hive.HiveTablePropertiesMetadata.COMMENT;
import static org.apache.gravitino.catalog.hive.HiveTablePropertiesMetadata.EXTERNAL;
import static org.apache.gravitino.catalog.hive.HiveTablePropertiesMetadata.FORMAT;
diff --git a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/HiveUserAuthenticationIT.java b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/HiveUserAuthenticationIT.java
index c333cf35103..861bb44edfd 100644
--- a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/HiveUserAuthenticationIT.java
+++ b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/HiveUserAuthenticationIT.java
@@ -19,10 +19,10 @@
package org.apache.gravitino.catalog.hive.integration.test;
-import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.IMPERSONATION_ENABLE;
-import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.KEY_TAB_URI;
-import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.METASTORE_URIS;
-import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.PRINCIPAL;
+import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.IMPERSONATION_ENABLE;
+import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.KEY_TAB_URI;
+import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.METASTORE_URIS;
+import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.PRINCIPAL;
import static org.apache.gravitino.connector.BaseCatalog.CATALOG_BYPASS_PREFIX;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION;
diff --git a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/ProxyCatalogHiveIT.java b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/ProxyCatalogHiveIT.java
index b7d61582efb..3d71948b744 100644
--- a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/ProxyCatalogHiveIT.java
+++ b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/ProxyCatalogHiveIT.java
@@ -18,8 +18,8 @@
*/
package org.apache.gravitino.catalog.hive.integration.test;
-import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.IMPERSONATION_ENABLE;
-import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.METASTORE_URIS;
+import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.IMPERSONATION_ENABLE;
+import static org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.METASTORE_URIS;
import static org.apache.gravitino.server.GravitinoServer.WEBSERVER_CONF_PREFIX;
import com.google.common.collect.ImmutableMap;
diff --git a/core/src/main/java/org/apache/gravitino/connector/AuthorizationPropertiesMeta.java b/core/src/main/java/org/apache/gravitino/connector/AuthorizationPropertiesMeta.java
deleted file mode 100644
index e1b389f7ca3..00000000000
--- a/core/src/main/java/org/apache/gravitino/connector/AuthorizationPropertiesMeta.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.gravitino.connector;
-
-import com.google.common.collect.ImmutableMap;
-import java.util.Map;
-
-public class AuthorizationPropertiesMeta {
- /** Ranger admin web URIs */
- public static final String RANGER_ADMIN_URL = "authorization.ranger.admin.url";
- /** Ranger authentication type kerberos or simple */
- public static final String RANGER_AUTH_TYPE = "authorization.ranger.auth.type";
- /**
- * Ranger admin web login username(auth_type=simple), or kerberos principal(auth_type=kerberos)
- */
- public static final String RANGER_USERNAME = "authorization.ranger.username";
- /**
- * Ranger admin web login user password(auth_type=simple), or path of the keytab
- * file(auth_type=kerberos)
- */
- public static final String RANGER_PASSWORD = "authorization.ranger.password";
- /** Ranger service name */
- public static final String RANGER_SERVICE_NAME = "authorization.ranger.service.name";
-
- public static final Map> RANGER_AUTHORIZATION_PROPERTY_ENTRIES =
- ImmutableMap.>builder()
- .put(
- RANGER_SERVICE_NAME,
- PropertyEntry.stringOptionalPropertyEntry(
- RANGER_SERVICE_NAME, "The Ranger service name", true, null, false))
- .put(
- RANGER_ADMIN_URL,
- PropertyEntry.stringOptionalPropertyEntry(
- RANGER_ADMIN_URL, "The Ranger admin web URIs", true, null, false))
- .put(
- RANGER_AUTH_TYPE,
- PropertyEntry.stringOptionalPropertyEntry(
- RANGER_AUTH_TYPE,
- "The Ranger admin web auth type (kerberos/simple)",
- true,
- "simple",
- false))
- .put(
- RANGER_USERNAME,
- PropertyEntry.stringOptionalPropertyEntry(
- RANGER_USERNAME, "The Ranger admin web login username", true, null, false))
- .put(
- RANGER_PASSWORD,
- PropertyEntry.stringOptionalPropertyEntry(
- RANGER_PASSWORD, "The Ranger admin web login password", true, null, false))
- .build();
-}
diff --git a/docs/security/authorization-pushdown.md b/docs/security/authorization-pushdown.md
index 43c1096bd4d..fe42a0955f4 100644
--- a/docs/security/authorization-pushdown.md
+++ b/docs/security/authorization-pushdown.md
@@ -24,6 +24,7 @@ In order to use the Ranger Hadoop SQL Plugin, you need to configure the followin
| `authorization.ranger.auth.type` | The Apache Ranger authentication type `simple` or `kerberos`. | `simple` | No | 0.6.0-incubating |
| `authorization.ranger.username` | The Apache Ranger admin web login username (auth type=simple), or kerberos principal(auth type=kerberos), Need have Ranger administrator permission. | (none) | No | 0.6.0-incubating |
| `authorization.ranger.password` | The Apache Ranger admin web login user password (auth type=simple), or path of the keytab file(auth type=kerberos) | (none) | No | 0.6.0-incubating |
+| `authorization.ranger.service.type` | The Apache Ranger service type. | (none) | No | 0.8.0-incubating |
| `authorization.ranger.service.name` | The Apache Ranger service name. | (none) | No | 0.6.0-incubating |
Once you have used the correct configuration, you can perform authorization operations by calling Gravitino [authorization RESTful API](https://gravitino.apache.org/docs/latest/api/rest/grant-roles-to-a-user).
@@ -46,6 +47,7 @@ authorization.ranger.admin.url=172.0.0.100:6080
authorization.ranger.auth.type=simple
authorization.ranger.username=Jack
authorization.ranger.password=PWD123
+authorization.ranger.service.type=HadoopSQL
authorization.ranger.service.name=hiveRepo
```