diff --git a/.github/workflows/access-control-integration-test.yml b/.github/workflows/access-control-integration-test.yml index 54ffde2ee82..6997eaf9a4c 100644 --- a/.github/workflows/access-control-integration-test.yml +++ b/.github/workflows/access-control-integration-test.yml @@ -90,6 +90,9 @@ jobs: ./gradlew -PtestMode=embedded -PjdbcBackend=h2 -PjdkVersion=${{ matrix.java-version }} -PskipDockerTests=false :authorizations:authorization-ranger:test ./gradlew -PtestMode=deploy -PjdbcBackend=mysql -PjdkVersion=${{ matrix.java-version }} -PskipDockerTests=false :authorizations:authorization-ranger:test ./gradlew -PtestMode=deploy -PjdbcBackend=postgresql -PjdkVersion=${{ matrix.java-version }} -PskipDockerTests=false :authorizations:authorization-ranger:test + ./gradlew -PtestMode=embedded -PjdbcBackend=h2 -PjdkVersion=${{ matrix.java-version }} -PskipDockerTests=false :authorizations:authorization-jdbc:test + ./gradlew -PtestMode=deploy -PjdbcBackend=mysql -PjdkVersion=${{ matrix.java-version }} -PskipDockerTests=false :authorizations:authorization-jdbc:test + ./gradlew -PtestMode=deploy -PjdbcBackend=postgresql -PjdkVersion=${{ matrix.java-version }} -PskipDockerTests=false :authorizations:authorization-jdbc:test - name: Upload integrate tests reports uses: actions/upload-artifact@v3 diff --git a/.github/workflows/add-to-project.yml b/.github/workflows/add-to-project.yml deleted file mode 100644 index 6ac8c758034..00000000000 --- a/.github/workflows/add-to-project.yml +++ /dev/null @@ -1,16 +0,0 @@ -name: Add issue to project - -on: - issues: - types: - - opened - -jobs: - add-to-project: - name: Add issue to project - runs-on: ubuntu-latest - steps: - - uses: actions/add-to-project@v0.5.0 - with: - project-url: https://github.com/orgs/datastrato/projects/1 - github-token: ${{ secrets.ADD_ISSUE_TO_PROJECT }} diff --git a/LICENSE.bin b/LICENSE.bin index 34723024e78..effaa4ac4a2 100644 --- a/LICENSE.bin +++ b/LICENSE.bin @@ -304,6 +304,7 @@ Apache Iceberg Aliyun Apache Iceberg api Apache Iceberg AWS + Apache Iceberg Azure Apache Iceberg core Apache Iceberg Hive metastore Apache Iceberg GCP diff --git a/api/src/main/java/org/apache/gravitino/credential/AzureAccountKeyCredential.java b/api/src/main/java/org/apache/gravitino/credential/AzureAccountKeyCredential.java new file mode 100644 index 00000000000..be24d7cda0e --- /dev/null +++ b/api/src/main/java/org/apache/gravitino/credential/AzureAccountKeyCredential.java @@ -0,0 +1,109 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.gravitino.credential; + +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; +import java.util.Map; +import org.apache.commons.lang3.StringUtils; + +/** Azure account key credential. */ +public class AzureAccountKeyCredential implements Credential { + + /** Azure account key credential type. */ + public static final String AZURE_ACCOUNT_KEY_CREDENTIAL_TYPE = "azure-account-key"; + /** Azure storage account name */ + public static final String GRAVITINO_AZURE_STORAGE_ACCOUNT_NAME = "azure-storage-account-name"; + /** Azure storage account key */ + public static final String GRAVITINO_AZURE_STORAGE_ACCOUNT_KEY = "azure-storage-account-key"; + + private String accountName; + private String accountKey; + + /** + * Constructs an instance of {@link AzureAccountKeyCredential}. + * + * @param accountName The Azure account name. + * @param accountKey The Azure account key. + */ + public AzureAccountKeyCredential(String accountName, String accountKey) { + validate(accountName, accountKey); + this.accountName = accountName; + this.accountKey = accountKey; + } + + /** + * This is the constructor that is used by credential factory to create an instance of credential + * according to the credential information. + */ + public AzureAccountKeyCredential() {} + + @Override + public String credentialType() { + return AZURE_ACCOUNT_KEY_CREDENTIAL_TYPE; + } + + @Override + public long expireTimeInMs() { + return 0; + } + + @Override + public Map credentialInfo() { + return (new ImmutableMap.Builder()) + .put(GRAVITINO_AZURE_STORAGE_ACCOUNT_NAME, accountName) + .put(GRAVITINO_AZURE_STORAGE_ACCOUNT_KEY, accountKey) + .build(); + } + + @Override + public void initialize(Map credentialInfo, long expireTimeInMS) { + String accountName = credentialInfo.get(GRAVITINO_AZURE_STORAGE_ACCOUNT_NAME); + String accountKey = credentialInfo.get(GRAVITINO_AZURE_STORAGE_ACCOUNT_KEY); + validate(accountName, accountKey); + this.accountName = accountName; + this.accountKey = accountKey; + } + + /** + * Get Azure account name + * + * @return The Azure account name + */ + public String accountName() { + return accountName; + } + + /** + * Get Azure account key + * + * @return The Azure account key + */ + public String accountKey() { + return accountKey; + } + + private void validate(String accountName, String accountKey) { + Preconditions.checkArgument( + StringUtils.isNotBlank(accountName), "Azure account name should not be empty."); + Preconditions.checkArgument( + StringUtils.isNotBlank(accountKey), "Azure account key should not be empty."); + } +} diff --git a/api/src/main/resources/META-INF/services/org.apache.gravitino.credential.Credential b/api/src/main/resources/META-INF/services/org.apache.gravitino.credential.Credential index f130b4b6423..6071cb916ae 100644 --- a/api/src/main/resources/META-INF/services/org.apache.gravitino.credential.Credential +++ b/api/src/main/resources/META-INF/services/org.apache.gravitino.credential.Credential @@ -23,3 +23,4 @@ org.apache.gravitino.credential.GCSTokenCredential org.apache.gravitino.credential.OSSTokenCredential org.apache.gravitino.credential.OSSSecretKeyCredential org.apache.gravitino.credential.ADLSTokenCredential +org.apache.gravitino.credential.AzureAccountKeyCredential diff --git a/authorizations/authorization-jdbc/build.gradle.kts b/authorizations/authorization-jdbc/build.gradle.kts new file mode 100644 index 00000000000..8b105908c26 --- /dev/null +++ b/authorizations/authorization-jdbc/build.gradle.kts @@ -0,0 +1,94 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +description = "authorization-jdbc" + +plugins { + `maven-publish` + id("java") + id("idea") +} + +dependencies { + implementation(project(":api")) { + exclude(group = "*") + } + implementation(project(":core")) { + exclude(group = "*") + } + + implementation(libs.bundles.log4j) + implementation(libs.commons.lang3) + implementation(libs.guava) + implementation(libs.javax.jaxb.api) { + exclude("*") + } + implementation(libs.javax.ws.rs.api) + implementation(libs.jettison) + compileOnly(libs.lombok) + implementation(libs.mail) + implementation(libs.rome) + implementation(libs.commons.dbcp2) + + testImplementation(project(":common")) + testImplementation(project(":clients:client-java")) + testImplementation(project(":server")) + testImplementation(project(":catalogs:catalog-common")) + testImplementation(project(":integration-test-common", "testArtifacts")) + testImplementation(libs.junit.jupiter.api) + testImplementation(libs.mockito.core) + testImplementation(libs.testcontainers) + testRuntimeOnly(libs.junit.jupiter.engine) +} + +tasks { + val runtimeJars by registering(Copy::class) { + from(configurations.runtimeClasspath) + into("build/libs") + } + + val copyAuthorizationLibs by registering(Copy::class) { + dependsOn("jar", runtimeJars) + from("build/libs") { + exclude("guava-*.jar") + exclude("log4j-*.jar") + exclude("slf4j-*.jar") + } + into("$rootDir/distribution/package/authorizations/ranger/libs") + } + + register("copyLibAndConfig", Copy::class) { + dependsOn(copyAuthorizationLibs) + } + + jar { + dependsOn(runtimeJars) + } +} + +tasks.test { + dependsOn(":catalogs:catalog-hive:jar", ":catalogs:catalog-hive:runtimeJars") + + val skipITs = project.hasProperty("skipITs") + if (skipITs) { + // Exclude integration tests + exclude("**/integration/test/**") + } else { + dependsOn(tasks.jar) + } +} diff --git a/authorizations/authorization-jdbc/src/main/java/org/apache/gravitino/authorization/jdbc/JdbcAuthorizationPlugin.java b/authorizations/authorization-jdbc/src/main/java/org/apache/gravitino/authorization/jdbc/JdbcAuthorizationPlugin.java new file mode 100644 index 00000000000..f889cee2240 --- /dev/null +++ b/authorizations/authorization-jdbc/src/main/java/org/apache/gravitino/authorization/jdbc/JdbcAuthorizationPlugin.java @@ -0,0 +1,461 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.gravitino.authorization.jdbc; + +import com.google.common.annotations.VisibleForTesting; +import com.google.common.collect.Lists; +import java.io.IOException; +import java.sql.Connection; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import org.apache.commons.dbcp2.BasicDataSource; +import org.apache.commons.pool2.impl.BaseObjectPoolConfig; +import org.apache.gravitino.MetadataObject; +import org.apache.gravitino.annotation.Unstable; +import org.apache.gravitino.authorization.AuthorizationPrivilege; +import org.apache.gravitino.authorization.AuthorizationSecurableObject; +import org.apache.gravitino.authorization.Group; +import org.apache.gravitino.authorization.MetadataObjectChange; +import org.apache.gravitino.authorization.Owner; +import org.apache.gravitino.authorization.Role; +import org.apache.gravitino.authorization.RoleChange; +import org.apache.gravitino.authorization.SecurableObject; +import org.apache.gravitino.authorization.User; +import org.apache.gravitino.connector.authorization.AuthorizationPlugin; +import org.apache.gravitino.exceptions.AuthorizationPluginException; +import org.apache.gravitino.meta.AuditInfo; +import org.apache.gravitino.meta.GroupEntity; +import org.apache.gravitino.meta.UserEntity; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * JdbcSQLBasedAuthorizationPlugin is the base class for all JDBC-based authorization plugins. For + * example, JdbcHiveAuthorizationPlugin is the JDBC-based authorization plugin for Hive. Different + * JDBC-based authorization plugins can inherit this class and implement their own SQL statements. + */ +@Unstable +abstract class JdbcAuthorizationPlugin implements AuthorizationPlugin, JdbcAuthorizationSQL { + + private static final String GROUP_PREFIX = "GRAVITINO_GROUP_"; + private static final Logger LOG = LoggerFactory.getLogger(JdbcAuthorizationPlugin.class); + + protected BasicDataSource dataSource; + protected JdbcSecurableObjectMappingProvider mappingProvider; + + public JdbcAuthorizationPlugin(Map config) { + // Initialize the data source + dataSource = new BasicDataSource(); + JdbcAuthorizationProperties.validate(config); + + String jdbcUrl = config.get(JdbcAuthorizationProperties.JDBC_URL); + dataSource.setUrl(jdbcUrl); + dataSource.setDriverClassName(config.get(JdbcAuthorizationProperties.JDBC_DRIVER)); + dataSource.setUsername(config.get(JdbcAuthorizationProperties.JDBC_USERNAME)); + dataSource.setPassword(config.get(JdbcAuthorizationProperties.JDBC_PASSWORD)); + dataSource.setDefaultAutoCommit(true); + dataSource.setMaxTotal(20); + dataSource.setMaxIdle(5); + dataSource.setMinIdle(0); + dataSource.setLogAbandoned(true); + dataSource.setRemoveAbandonedOnBorrow(true); + dataSource.setTestOnBorrow(BaseObjectPoolConfig.DEFAULT_TEST_ON_BORROW); + dataSource.setTestWhileIdle(BaseObjectPoolConfig.DEFAULT_TEST_WHILE_IDLE); + dataSource.setNumTestsPerEvictionRun(BaseObjectPoolConfig.DEFAULT_NUM_TESTS_PER_EVICTION_RUN); + dataSource.setTestOnReturn(BaseObjectPoolConfig.DEFAULT_TEST_ON_RETURN); + dataSource.setLifo(BaseObjectPoolConfig.DEFAULT_LIFO); + mappingProvider = new JdbcSecurableObjectMappingProvider(); + } + + @Override + public void close() throws IOException { + if (dataSource != null) { + try { + dataSource.close(); + dataSource = null; + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + } + + @Override + public Boolean onMetadataUpdated(MetadataObjectChange... changes) throws RuntimeException { + // This interface mainly handles the metadata object rename change and delete change. + // The privilege for JdbcSQLBasedAuthorizationPlugin will be renamed or deleted automatically. + // We don't need to do any other things. + return true; + } + + @Override + public Boolean onRoleCreated(Role role) throws AuthorizationPluginException { + List sqls = getCreateRoleSQL(role.name()); + for (String sql : sqls) { + executeUpdateSQL(sql, "already exists"); + } + + if (role.securableObjects() != null) { + for (SecurableObject object : role.securableObjects()) { + onRoleUpdated(role, RoleChange.addSecurableObject(role.name(), object)); + } + } + + return true; + } + + @Override + public Boolean onRoleAcquired(Role role) throws AuthorizationPluginException { + throw new UnsupportedOperationException("Doesn't support to acquired a role"); + } + + @Override + public Boolean onRoleDeleted(Role role) throws AuthorizationPluginException { + List sqls = getDropRoleSQL(role.name()); + for (String sql : sqls) { + executeUpdateSQL(sql); + } + return null; + } + + @Override + public Boolean onRoleUpdated(Role role, RoleChange... changes) + throws AuthorizationPluginException { + onRoleCreated(role); + for (RoleChange change : changes) { + if (change instanceof RoleChange.AddSecurableObject) { + SecurableObject object = ((RoleChange.AddSecurableObject) change).getSecurableObject(); + grantObjectPrivileges(role, object); + } else if (change instanceof RoleChange.RemoveSecurableObject) { + SecurableObject object = ((RoleChange.RemoveSecurableObject) change).getSecurableObject(); + revokeObjectPrivileges(role, object); + } else if (change instanceof RoleChange.UpdateSecurableObject) { + RoleChange.UpdateSecurableObject updateChange = (RoleChange.UpdateSecurableObject) change; + SecurableObject addObject = updateChange.getNewSecurableObject(); + SecurableObject removeObject = updateChange.getSecurableObject(); + revokeObjectPrivileges(role, removeObject); + grantObjectPrivileges(role, addObject); + } else { + throw new IllegalArgumentException( + String.format("RoleChange is not supported - %s", change)); + } + } + return true; + } + + @Override + public Boolean onGrantedRolesToUser(List roles, User user) + throws AuthorizationPluginException { + + for (Role role : roles) { + onRoleCreated(role); + List sqls = getGrantRoleSQL(role.name(), "USER", user.name()); + for (String sql : sqls) { + executeUpdateSQL(sql); + } + } + return true; + } + + @Override + public Boolean onRevokedRolesFromUser(List roles, User user) + throws AuthorizationPluginException { + + for (Role role : roles) { + onRoleCreated(role); + List sqls = getRevokeRoleSQL(role.name(), "USER", user.name()); + for (String sql : sqls) { + executeUpdateSQL(sql); + } + } + return true; + } + + @Override + public Boolean onGrantedRolesToGroup(List roles, Group group) + throws AuthorizationPluginException { + + for (Role role : roles) { + onRoleCreated(role); + List sqls = + getGrantRoleSQL(role.name(), "USER", String.format("%s%s", GROUP_PREFIX, group.name())); + for (String sql : sqls) { + executeUpdateSQL(sql); + } + } + return true; + } + + @Override + public Boolean onRevokedRolesFromGroup(List roles, Group group) + throws AuthorizationPluginException { + + for (Role role : roles) { + onRoleCreated(role); + List sqls = + getRevokeRoleSQL(role.name(), "USER", String.format("%s%s", GROUP_PREFIX, group.name())); + for (String sql : sqls) { + executeUpdateSQL(sql); + } + } + return true; + } + + @Override + public Boolean onUserAdded(User user) throws AuthorizationPluginException { + List sqls = getCreateUserSQL(user.name()); + for (String sql : sqls) { + executeUpdateSQL(sql); + } + return true; + } + + @Override + public Boolean onUserRemoved(User user) throws AuthorizationPluginException { + List sqls = getDropUserSQL(user.name()); + for (String sql : sqls) { + executeUpdateSQL(sql); + } + return true; + } + + @Override + public Boolean onUserAcquired(User user) throws AuthorizationPluginException { + throw new UnsupportedOperationException("Doesn't support to acquired a user"); + } + + @Override + public Boolean onGroupAdded(Group group) throws AuthorizationPluginException { + String name = String.format("%s%s", GROUP_PREFIX, group.name()); + List sqls = getCreateUserSQL(name); + for (String sql : sqls) { + executeUpdateSQL(sql); + } + return true; + } + + @Override + public Boolean onGroupRemoved(Group group) throws AuthorizationPluginException { + String name = String.format("%s%s", GROUP_PREFIX, group.name()); + List sqls = getDropUserSQL(name); + for (String sql : sqls) { + executeUpdateSQL(sql); + } + return true; + } + + @Override + public Boolean onGroupAcquired(Group group) throws AuthorizationPluginException { + throw new UnsupportedOperationException("Doesn't support to acquired a group"); + } + + @Override + public Boolean onOwnerSet(MetadataObject metadataObject, Owner preOwner, Owner newOwner) + throws AuthorizationPluginException { + if (newOwner.type() == Owner.Type.USER) { + onUserAdded( + UserEntity.builder() + .withName(newOwner.name()) + .withId(0L) + .withAuditInfo(AuditInfo.EMPTY) + .build()); + } else if (newOwner.type() == Owner.Type.GROUP) { + onGroupAdded( + GroupEntity.builder() + .withName(newOwner.name()) + .withId(0L) + .withAuditInfo(AuditInfo.EMPTY) + .build()); + } else { + throw new IllegalArgumentException( + String.format("Don't support owner type %s", newOwner.type())); + } + + List authObjects = mappingProvider.translateOwner(metadataObject); + for (AuthorizationSecurableObject authObject : authObjects) { + List sqls = + getSetOwnerSQL( + authObject.type().metadataObjectType(), authObject.fullName(), preOwner, newOwner); + for (String sql : sqls) { + executeUpdateSQL(sql); + } + } + return true; + } + + @Override + public List getCreateUserSQL(String username) { + return Lists.newArrayList(String.format("CREATE USER %s", username)); + } + + @Override + public List getDropUserSQL(String username) { + return Lists.newArrayList(String.format("DROP USER %s", username)); + } + + @Override + public List getCreateRoleSQL(String roleName) { + return Lists.newArrayList(String.format("CREATE ROLE %s", roleName)); + } + + @Override + public List getDropRoleSQL(String roleName) { + return Lists.newArrayList(String.format("DROP ROLE %s", roleName)); + } + + @Override + public List getGrantPrivilegeSQL( + String privilege, String objectType, String objectName, String roleName) { + return Lists.newArrayList( + String.format("GRANT %s ON %s %s TO ROLE %s", privilege, objectType, objectName, roleName)); + } + + @Override + public List getRevokePrivilegeSQL( + String privilege, String objectType, String objectName, String roleName) { + return Lists.newArrayList( + String.format( + "REVOKE %s ON %s %s FROM ROLE %s", privilege, objectType, objectName, roleName)); + } + + @Override + public List getGrantRoleSQL(String roleName, String grantorType, String grantorName) { + return Lists.newArrayList( + String.format("GRANT ROLE %s TO %s %s", roleName, grantorType, grantorName)); + } + + @Override + public List getRevokeRoleSQL(String roleName, String revokerType, String revokerName) { + return Lists.newArrayList( + String.format("REVOKE ROLE %s FROM %s %s", roleName, revokerType, revokerName)); + } + + @VisibleForTesting + Connection getConnection() throws SQLException { + return dataSource.getConnection(); + } + + protected void executeUpdateSQL(String sql) { + executeUpdateSQL(sql, null); + } + + /** + * Convert the object name contains `*` to a list of AuthorizationSecurableObject. + * + * @param object The object contains the name with `*` to be converted + * @return The list of AuthorizationSecurableObject + */ + protected List convertResourceAll( + AuthorizationSecurableObject object) { + List authObjects = Lists.newArrayList(); + authObjects.add(object); + return authObjects; + } + + protected List filterUnsupportedPrivileges( + List privileges) { + return privileges; + } + + protected AuthorizationPluginException toAuthorizationPluginException(SQLException se) { + return new AuthorizationPluginException( + "JDBC authorization plugin fail to execute SQL, error code: %d", se.getErrorCode()); + } + + void executeUpdateSQL(String sql, String ignoreErrorMsg) { + try (final Connection connection = getConnection()) { + try (final Statement statement = connection.createStatement()) { + statement.executeUpdate(sql); + } + } catch (SQLException se) { + if (ignoreErrorMsg != null && se.getMessage().contains(ignoreErrorMsg)) { + return; + } + LOG.error("JDBC authorization plugin exception: ", se); + throw toAuthorizationPluginException(se); + } + } + + private void grantObjectPrivileges(Role role, SecurableObject object) { + List authObjects = mappingProvider.translatePrivilege(object); + for (AuthorizationSecurableObject authObject : authObjects) { + List convertedObjects = Lists.newArrayList(); + if (authObject.name().equals(JdbcSecurableObject.ALL)) { + convertedObjects.addAll(convertResourceAll(authObject)); + } else { + convertedObjects.add(authObject); + } + + for (AuthorizationSecurableObject convertedObject : convertedObjects) { + List privileges = + filterUnsupportedPrivileges(authObject.privileges()).stream() + .map(AuthorizationPrivilege::getName) + .collect(Collectors.toList()); + // We don't grant the privileges in one SQL, because some privilege has been granted, it + // will cause the failure of the SQL. So we grant the privileges one by one. + for (String privilege : privileges) { + List sqls = + getGrantPrivilegeSQL( + privilege, + convertedObject.metadataObjectType().name(), + convertedObject.fullName(), + role.name()); + for (String sql : sqls) { + executeUpdateSQL(sql, "is already granted"); + } + } + } + } + } + + private void revokeObjectPrivileges(Role role, SecurableObject removeObject) { + List authObjects = + mappingProvider.translatePrivilege(removeObject); + for (AuthorizationSecurableObject authObject : authObjects) { + List convertedObjects = Lists.newArrayList(); + if (authObject.name().equals(JdbcSecurableObject.ALL)) { + convertedObjects.addAll(convertResourceAll(authObject)); + } else { + convertedObjects.add(authObject); + } + + for (AuthorizationSecurableObject convertedObject : convertedObjects) { + List privileges = + filterUnsupportedPrivileges(authObject.privileges()).stream() + .map(AuthorizationPrivilege::getName) + .collect(Collectors.toList()); + for (String privilege : privileges) { + // We don't revoke the privileges in one SQL, because some privilege has been revoked, it + // will cause the failure of the SQL. So we revoke the privileges one by one. + List sqls = + getRevokePrivilegeSQL( + privilege, + convertedObject.metadataObjectType().name(), + convertedObject.fullName(), + role.name()); + for (String sql : sqls) { + executeUpdateSQL(sql, "Cannot find privilege Privilege"); + } + } + } + } + } +} diff --git a/authorizations/authorization-jdbc/src/main/java/org/apache/gravitino/authorization/jdbc/JdbcAuthorizationProperties.java b/authorizations/authorization-jdbc/src/main/java/org/apache/gravitino/authorization/jdbc/JdbcAuthorizationProperties.java new file mode 100644 index 00000000000..b13504fd2fd --- /dev/null +++ b/authorizations/authorization-jdbc/src/main/java/org/apache/gravitino/authorization/jdbc/JdbcAuthorizationProperties.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.gravitino.authorization.jdbc; + +import java.util.Map; + +/** The properties for JDBC authorization plugin. */ +public class JdbcAuthorizationProperties { + private static final String CONFIG_PREFIX = "authorization.jdbc."; + public static final String JDBC_PASSWORD = CONFIG_PREFIX + "password"; + public static final String JDBC_USERNAME = CONFIG_PREFIX + "username"; + public static final String JDBC_URL = CONFIG_PREFIX + "url"; + public static final String JDBC_DRIVER = CONFIG_PREFIX + "driver"; + + public static void validate(Map properties) { + String errorMsg = "%s is required"; + check(properties, JDBC_URL, errorMsg); + check(properties, JDBC_USERNAME, errorMsg); + check(properties, JDBC_PASSWORD, errorMsg); + check(properties, JDBC_DRIVER, errorMsg); + } + + private static void check(Map properties, String key, String errorMsg) { + if (!properties.containsKey(key) && properties.get(key) != null) { + throw new IllegalArgumentException(String.format(errorMsg, key)); + } + } +} diff --git a/authorizations/authorization-jdbc/src/main/java/org/apache/gravitino/authorization/jdbc/JdbcAuthorizationSQL.java b/authorizations/authorization-jdbc/src/main/java/org/apache/gravitino/authorization/jdbc/JdbcAuthorizationSQL.java new file mode 100644 index 00000000000..f7171ff354a --- /dev/null +++ b/authorizations/authorization-jdbc/src/main/java/org/apache/gravitino/authorization/jdbc/JdbcAuthorizationSQL.java @@ -0,0 +1,117 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.gravitino.authorization.jdbc; + +import java.util.List; +import org.apache.gravitino.MetadataObject; +import org.apache.gravitino.annotation.Unstable; +import org.apache.gravitino.authorization.Owner; + +/** Interface for SQL operations of the underlying access control system. */ +@Unstable +interface JdbcAuthorizationSQL { + + /** + * Get SQL statements for creating a user. + * + * @param username the username to create + * @return the SQL statement list to create a user + */ + List getCreateUserSQL(String username); + + /** + * Get SQL statements for creating a group. + * + * @param username the username to drop + * @return the SQL statement list to drop a user + */ + List getDropUserSQL(String username); + + /** + * Get SQL statements for creating a role. + * + * @param roleName the role name to create + * @return the SQL statement list to create a role + */ + List getCreateRoleSQL(String roleName); + + /** + * Get SQL statements for dropping a role. + * + * @param roleName the role name to drop + * @return the SQL statement list to drop a role + */ + List getDropRoleSQL(String roleName); + + /** + * Get SQL statements for granting privileges. + * + * @param privilege the privilege to grant + * @param objectType the object type in the database system + * @param objectName the object name in the database system + * @param roleName the role name to grant + * @return the sql statement list to grant privilege + */ + List getGrantPrivilegeSQL( + String privilege, String objectType, String objectName, String roleName); + + /** + * Get SQL statements for revoking privileges. + * + * @param privilege the privilege to revoke + * @param objectType the object type in the database system + * @param objectName the object name in the database system + * @param roleName the role name to revoke + * @return the sql statement list to revoke privilege + */ + List getRevokePrivilegeSQL( + String privilege, String objectType, String objectName, String roleName); + + /** + * Get SQL statements for granting role. + * + * @param roleName the role name to grant + * @param grantorType the grantor type, usually USER or ROLE + * @param grantorName the grantor name + * @return the sql statement list to grant role + */ + List getGrantRoleSQL(String roleName, String grantorType, String grantorName); + + /** + * Get SQL statements for revoking roles. + * + * @param roleName the role name to revoke + * @param revokerType the revoker type, usually USER or ROLE + * @param revokerName the revoker name + * @return the sql statement list to revoke role + */ + List getRevokeRoleSQL(String roleName, String revokerType, String revokerName); + + /** + * Get SQL statements for setting owner. + * + * @param type The metadata object type + * @param objectName the object name in the database system + * @param preOwner the previous owner of the object + * @param newOwner the new owner of the object + * @return the sql statement list to set owner + */ + List getSetOwnerSQL( + MetadataObject.Type type, String objectName, Owner preOwner, Owner newOwner); +} diff --git a/authorizations/authorization-jdbc/src/main/java/org/apache/gravitino/authorization/jdbc/JdbcMetadataObject.java b/authorizations/authorization-jdbc/src/main/java/org/apache/gravitino/authorization/jdbc/JdbcMetadataObject.java new file mode 100644 index 00000000000..c74c7ae6093 --- /dev/null +++ b/authorizations/authorization-jdbc/src/main/java/org/apache/gravitino/authorization/jdbc/JdbcMetadataObject.java @@ -0,0 +1,106 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.gravitino.authorization.jdbc; + +import com.google.common.base.Preconditions; +import java.util.List; +import javax.annotation.Nullable; +import org.apache.gravitino.MetadataObject; +import org.apache.gravitino.authorization.AuthorizationMetadataObject; + +public class JdbcMetadataObject implements AuthorizationMetadataObject { + + private final String parent; + private final String name; + private final Type type; + + public JdbcMetadataObject(String parent, String name, Type type) { + this.parent = parent; + this.name = name; + this.type = type; + } + + @Nullable + @Override + public String parent() { + return parent; + } + + @Override + public String name() { + return name; + } + + @Override + public List names() { + return DOT_SPLITTER.splitToList(fullName()); + } + + @Override + public Type type() { + return type; + } + + @Override + public void validateAuthorizationMetadataObject() throws IllegalArgumentException { + List names = names(); + Preconditions.checkArgument( + names != null && !names.isEmpty(), "The name of the object is empty."); + Preconditions.checkArgument( + names.size() <= 2, "The name of the object is not in the format of 'database.table'."); + Preconditions.checkArgument(type != null, "The type of the object is null."); + if (names.size() == 1) { + Preconditions.checkArgument( + type.metadataObjectType() == MetadataObject.Type.SCHEMA, + "The type of the object is not SCHEMA."); + } else { + Preconditions.checkArgument( + type.metadataObjectType() == MetadataObject.Type.TABLE, + "The type of the object is not TABLE."); + } + + for (String name : names) { + Preconditions.checkArgument(name != null, "Cannot create a metadata object with null name"); + } + } + + public enum Type implements AuthorizationMetadataObject.Type { + SCHEMA(MetadataObject.Type.SCHEMA), + TABLE(MetadataObject.Type.TABLE); + + private final MetadataObject.Type metadataType; + + Type(MetadataObject.Type type) { + this.metadataType = type; + } + + public MetadataObject.Type metadataObjectType() { + return metadataType; + } + + public static Type fromMetadataType(MetadataObject.Type metadataType) { + for (Type type : Type.values()) { + if (type.metadataObjectType() == metadataType) { + return type; + } + } + throw new IllegalArgumentException("No matching JdbcMetadataObject.Type for " + metadataType); + } + } +} diff --git a/authorizations/authorization-jdbc/src/main/java/org/apache/gravitino/authorization/jdbc/JdbcPrivilege.java b/authorizations/authorization-jdbc/src/main/java/org/apache/gravitino/authorization/jdbc/JdbcPrivilege.java new file mode 100644 index 00000000000..845b31a5b59 --- /dev/null +++ b/authorizations/authorization-jdbc/src/main/java/org/apache/gravitino/authorization/jdbc/JdbcPrivilege.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.gravitino.authorization.jdbc; + +import org.apache.gravitino.authorization.AuthorizationPrivilege; +import org.apache.gravitino.authorization.Privilege; + +public enum JdbcPrivilege implements AuthorizationPrivilege { + SELECT("SELECT"), + INSERT("INSERT"), + UPDATE("UPDATE"), + ALTER("ALTER"), + DELETE("DELETE"), + ALL("ALL PRIVILEGES"), + CREATE("CREATE"), + DROP("DROP"), + USAGE("USAGE"); + + private final String name; + + JdbcPrivilege(String name) { + this.name = name; + } + + @Override + public String getName() { + return name; + } + + @Override + public Privilege.Condition condition() { + return Privilege.Condition.ALLOW; + } + + @Override + public boolean equalsTo(String value) { + return name.equals(value); + } +} diff --git a/authorizations/authorization-jdbc/src/main/java/org/apache/gravitino/authorization/jdbc/JdbcSecurableObject.java b/authorizations/authorization-jdbc/src/main/java/org/apache/gravitino/authorization/jdbc/JdbcSecurableObject.java new file mode 100644 index 00000000000..78b82e2a8da --- /dev/null +++ b/authorizations/authorization-jdbc/src/main/java/org/apache/gravitino/authorization/jdbc/JdbcSecurableObject.java @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.gravitino.authorization.jdbc; + +import java.util.List; +import org.apache.gravitino.MetadataObject; +import org.apache.gravitino.authorization.AuthorizationPrivilege; +import org.apache.gravitino.authorization.AuthorizationSecurableObject; + +/** + * JdbcAuthorizationObject is used for translating securable object to authorization securable + * object. JdbcAuthorizationObject has the database and table name. When table name is null, the + * object represents a database. The database can't be null. + */ +public class JdbcSecurableObject extends JdbcMetadataObject + implements AuthorizationSecurableObject { + + public static final String ALL = "*"; + + List privileges; + + private JdbcSecurableObject( + String parent, + String name, + JdbcMetadataObject.Type type, + List privileges) { + super(parent, name, type); + this.privileges = privileges; + } + + static JdbcSecurableObject create( + String schema, String table, List privileges) { + String parent = table == null ? null : schema; + String name = table == null ? schema : table; + JdbcMetadataObject.Type type = + table == null + ? JdbcMetadataObject.Type.fromMetadataType(MetadataObject.Type.SCHEMA) + : JdbcMetadataObject.Type.fromMetadataType(MetadataObject.Type.TABLE); + + JdbcSecurableObject object = new JdbcSecurableObject(parent, name, type, privileges); + object.validateAuthorizationMetadataObject(); + return object; + } + + @Override + public List privileges() { + return privileges; + } +} diff --git a/authorizations/authorization-jdbc/src/main/java/org/apache/gravitino/authorization/jdbc/JdbcSecurableObjectMappingProvider.java b/authorizations/authorization-jdbc/src/main/java/org/apache/gravitino/authorization/jdbc/JdbcSecurableObjectMappingProvider.java new file mode 100644 index 00000000000..70b2d10e39c --- /dev/null +++ b/authorizations/authorization-jdbc/src/main/java/org/apache/gravitino/authorization/jdbc/JdbcSecurableObjectMappingProvider.java @@ -0,0 +1,212 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.gravitino.authorization.jdbc; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Lists; +import com.google.common.collect.Sets; +import java.util.List; +import java.util.Map; +import java.util.Set; +import org.apache.gravitino.MetadataObject; +import org.apache.gravitino.MetadataObjects; +import org.apache.gravitino.authorization.AuthorizationMetadataObject; +import org.apache.gravitino.authorization.AuthorizationPrivilege; +import org.apache.gravitino.authorization.AuthorizationPrivilegesMappingProvider; +import org.apache.gravitino.authorization.AuthorizationSecurableObject; +import org.apache.gravitino.authorization.Privilege; +import org.apache.gravitino.authorization.SecurableObject; + +/** + * JdbcSecurableObjectMappingProvider is used for translating securable object to authorization + * securable object. + */ +public class JdbcSecurableObjectMappingProvider implements AuthorizationPrivilegesMappingProvider { + + private final Map> privilegeMapping = + ImmutableMap.of( + Privilege.Name.CREATE_TABLE, Sets.newHashSet(JdbcPrivilege.CREATE), + Privilege.Name.CREATE_SCHEMA, Sets.newHashSet(JdbcPrivilege.CREATE), + Privilege.Name.SELECT_TABLE, Sets.newHashSet(JdbcPrivilege.SELECT), + Privilege.Name.MODIFY_TABLE, + Sets.newHashSet( + JdbcPrivilege.SELECT, + JdbcPrivilege.UPDATE, + JdbcPrivilege.DELETE, + JdbcPrivilege.INSERT, + JdbcPrivilege.ALTER), + Privilege.Name.USE_SCHEMA, Sets.newHashSet(JdbcPrivilege.USAGE)); + + private final Map privilegeScopeMapping = + ImmutableMap.of( + Privilege.Name.CREATE_TABLE, MetadataObject.Type.TABLE, + Privilege.Name.CREATE_SCHEMA, MetadataObject.Type.SCHEMA, + Privilege.Name.SELECT_TABLE, MetadataObject.Type.TABLE, + Privilege.Name.MODIFY_TABLE, MetadataObject.Type.TABLE, + Privilege.Name.USE_SCHEMA, MetadataObject.Type.SCHEMA); + + private final Set ownerPrivileges = ImmutableSet.of(); + + private final Set allowObjectTypes = + ImmutableSet.of( + MetadataObject.Type.METALAKE, + MetadataObject.Type.CATALOG, + MetadataObject.Type.SCHEMA, + MetadataObject.Type.TABLE); + + @Override + public Map> privilegesMappingRule() { + return privilegeMapping; + } + + @Override + public Set ownerMappingRule() { + return ownerPrivileges; + } + + @Override + public Set allowPrivilegesRule() { + return privilegeMapping.keySet(); + } + + @Override + public Set allowMetadataObjectTypesRule() { + return allowObjectTypes; + } + + @Override + public List translatePrivilege(SecurableObject securableObject) { + List authObjects = Lists.newArrayList(); + List databasePrivileges = Lists.newArrayList(); + List tablePrivileges = Lists.newArrayList(); + JdbcSecurableObject databaseObject; + JdbcSecurableObject tableObject; + switch (securableObject.type()) { + case METALAKE: + case CATALOG: + convertJdbcPrivileges(securableObject, databasePrivileges, tablePrivileges); + + if (!databasePrivileges.isEmpty()) { + databaseObject = + JdbcSecurableObject.create(JdbcSecurableObject.ALL, null, databasePrivileges); + authObjects.add(databaseObject); + } + + if (!tablePrivileges.isEmpty()) { + tableObject = + JdbcSecurableObject.create( + JdbcSecurableObject.ALL, JdbcSecurableObject.ALL, tablePrivileges); + authObjects.add(tableObject); + } + break; + + case SCHEMA: + convertJdbcPrivileges(securableObject, databasePrivileges, tablePrivileges); + if (!databasePrivileges.isEmpty()) { + databaseObject = + JdbcSecurableObject.create(securableObject.name(), null, databasePrivileges); + authObjects.add(databaseObject); + } + + if (!tablePrivileges.isEmpty()) { + tableObject = + JdbcSecurableObject.create( + securableObject.name(), JdbcSecurableObject.ALL, tablePrivileges); + authObjects.add(tableObject); + } + break; + + case TABLE: + convertJdbcPrivileges(securableObject, databasePrivileges, tablePrivileges); + if (!tablePrivileges.isEmpty()) { + MetadataObject metadataObject = + MetadataObjects.parse(securableObject.parent(), MetadataObject.Type.SCHEMA); + tableObject = + JdbcSecurableObject.create( + metadataObject.name(), securableObject.name(), tablePrivileges); + authObjects.add(tableObject); + } + break; + + default: + throw new IllegalArgumentException( + String.format("Don't support metadata object type %s", securableObject.type())); + } + + return authObjects; + } + + @Override + public List translateOwner(MetadataObject metadataObject) { + List objects = Lists.newArrayList(); + switch (metadataObject.type()) { + case METALAKE: + case CATALOG: + objects.add( + JdbcSecurableObject.create( + JdbcSecurableObject.ALL, null, Lists.newArrayList(JdbcPrivilege.ALL))); + objects.add( + JdbcSecurableObject.create( + JdbcSecurableObject.ALL, + JdbcSecurableObject.ALL, + Lists.newArrayList(JdbcPrivilege.ALL))); + break; + case SCHEMA: + objects.add( + JdbcSecurableObject.create( + metadataObject.name(), null, Lists.newArrayList(JdbcPrivilege.ALL))); + objects.add( + JdbcSecurableObject.create( + metadataObject.name(), + JdbcSecurableObject.ALL, + Lists.newArrayList(JdbcPrivilege.ALL))); + break; + case TABLE: + MetadataObject schema = + MetadataObjects.parse(metadataObject.parent(), MetadataObject.Type.SCHEMA); + objects.add( + JdbcSecurableObject.create( + schema.name(), metadataObject.name(), Lists.newArrayList(JdbcPrivilege.ALL))); + break; + default: + throw new IllegalArgumentException( + "Don't support metadata object type " + metadataObject.type()); + } + return objects; + } + + @Override + public AuthorizationMetadataObject translateMetadataObject(MetadataObject metadataObject) { + throw new UnsupportedOperationException("Not supported"); + } + + private void convertJdbcPrivileges( + SecurableObject securableObject, + List databasePrivileges, + List tablePrivileges) { + for (Privilege privilege : securableObject.privileges()) { + if (privilegeScopeMapping.get(privilege.name()) == MetadataObject.Type.SCHEMA) { + databasePrivileges.addAll(privilegeMapping.get(privilege.name())); + } else if (privilegeScopeMapping.get(privilege.name()) == MetadataObject.Type.TABLE) { + tablePrivileges.addAll(privilegeMapping.get(privilege.name())); + } + } + } +} diff --git a/authorizations/authorization-jdbc/src/test/java/org/apache/gravitino/authorization/jdbc/JdbcAuthorizationPluginTest.java b/authorizations/authorization-jdbc/src/test/java/org/apache/gravitino/authorization/jdbc/JdbcAuthorizationPluginTest.java new file mode 100644 index 00000000000..b72392a6cd8 --- /dev/null +++ b/authorizations/authorization-jdbc/src/test/java/org/apache/gravitino/authorization/jdbc/JdbcAuthorizationPluginTest.java @@ -0,0 +1,317 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.gravitino.authorization.jdbc; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import org.apache.gravitino.MetadataObject; +import org.apache.gravitino.MetadataObjects; +import org.apache.gravitino.authorization.Group; +import org.apache.gravitino.authorization.Owner; +import org.apache.gravitino.authorization.Privileges; +import org.apache.gravitino.authorization.Role; +import org.apache.gravitino.authorization.RoleChange; +import org.apache.gravitino.authorization.SecurableObject; +import org.apache.gravitino.authorization.SecurableObjects; +import org.apache.gravitino.authorization.User; +import org.apache.gravitino.meta.AuditInfo; +import org.apache.gravitino.meta.GroupEntity; +import org.apache.gravitino.meta.RoleEntity; +import org.apache.gravitino.meta.UserEntity; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +public class JdbcAuthorizationPluginTest { + private static List expectSQLs = Lists.newArrayList(); + private static List expectTypes = Lists.newArrayList(); + private static List expectObjectNames = Lists.newArrayList(); + private static List> expectPreOwners = Lists.newArrayList(); + private static List expectNewOwners = Lists.newArrayList(); + private static int currentSQLIndex = 0; + private static int currentIndex = 0; + private static final Map properties = + ImmutableMap.of( + JdbcAuthorizationProperties.JDBC_URL, + "xx", + JdbcAuthorizationProperties.JDBC_USERNAME, + "xx", + JdbcAuthorizationProperties.JDBC_PASSWORD, + "xx", + JdbcAuthorizationProperties.JDBC_DRIVER, + "xx"); + + private static final JdbcAuthorizationPlugin plugin = + new JdbcAuthorizationPlugin(properties) { + + @Override + public List getSetOwnerSQL( + MetadataObject.Type type, String objectName, Owner preOwner, Owner newOwner) { + Assertions.assertEquals(expectTypes.get(currentIndex), type); + Assertions.assertEquals(expectObjectNames.get(currentIndex), objectName); + Assertions.assertEquals(expectPreOwners.get(currentIndex), Optional.ofNullable(preOwner)); + Assertions.assertEquals(expectNewOwners.get(currentIndex), newOwner); + currentIndex++; + return Collections.emptyList(); + } + + void executeUpdateSQL(String sql, String ignoreErrorMsg) { + Assertions.assertEquals(expectSQLs.get(currentSQLIndex), sql); + currentSQLIndex++; + } + }; + + @Test + public void testUserManagement() { + expectSQLs = Lists.newArrayList("CREATE USER tmp"); + currentSQLIndex = 0; + plugin.onUserAdded(createUser("tmp")); + + Assertions.assertThrows( + UnsupportedOperationException.class, () -> plugin.onUserAcquired(createUser("tmp"))); + + expectSQLs = Lists.newArrayList("DROP USER tmp"); + currentSQLIndex = 0; + plugin.onUserRemoved(createUser("tmp")); + } + + @Test + public void testGroupManagement() { + expectSQLs = Lists.newArrayList("CREATE USER GRAVITINO_GROUP_tmp"); + resetSQLIndex(); + plugin.onGroupAdded(createGroup("tmp")); + + Assertions.assertThrows( + UnsupportedOperationException.class, () -> plugin.onGroupAcquired(createGroup("tmp"))); + + expectSQLs = Lists.newArrayList("DROP USER GRAVITINO_GROUP_tmp"); + resetSQLIndex(); + plugin.onGroupRemoved(createGroup("tmp")); + } + + @Test + public void testRoleManagement() { + expectSQLs = Lists.newArrayList("CREATE ROLE tmp"); + resetSQLIndex(); + Role role = createRole("tmp"); + plugin.onRoleCreated(role); + + Assertions.assertThrows(UnsupportedOperationException.class, () -> plugin.onRoleAcquired(role)); + + resetSQLIndex(); + expectSQLs = Lists.newArrayList("DROP ROLE tmp"); + plugin.onRoleDeleted(role); + } + + @Test + public void testPermissionManagement() { + Role role = createRole("tmp"); + Group group = createGroup("tmp"); + User user = createUser("tmp"); + + resetSQLIndex(); + expectSQLs = + Lists.newArrayList("CREATE ROLE tmp", "GRANT ROLE tmp TO USER GRAVITINO_GROUP_tmp"); + plugin.onGrantedRolesToGroup(Lists.newArrayList(role), group); + + resetSQLIndex(); + expectSQLs = Lists.newArrayList("CREATE ROLE tmp", "GRANT ROLE tmp TO USER tmp"); + plugin.onGrantedRolesToUser(Lists.newArrayList(role), user); + + resetSQLIndex(); + expectSQLs = + Lists.newArrayList("CREATE ROLE tmp", "REVOKE ROLE tmp FROM USER GRAVITINO_GROUP_tmp"); + plugin.onRevokedRolesFromGroup(Lists.newArrayList(role), group); + + resetSQLIndex(); + expectSQLs = Lists.newArrayList("CREATE ROLE tmp", "REVOKE ROLE tmp FROM USER tmp"); + plugin.onRevokedRolesFromUser(Lists.newArrayList(role), user); + + // Test metalake object and different role change + resetSQLIndex(); + expectSQLs = Lists.newArrayList("CREATE ROLE tmp", "GRANT SELECT ON TABLE *.* TO ROLE tmp"); + SecurableObject metalakeObject = + SecurableObjects.ofMetalake("metalake", Lists.newArrayList(Privileges.SelectTable.allow())); + RoleChange roleChange = RoleChange.addSecurableObject("tmp", metalakeObject); + plugin.onRoleUpdated(role, roleChange); + + resetSQLIndex(); + expectSQLs = Lists.newArrayList("CREATE ROLE tmp", "REVOKE SELECT ON TABLE *.* FROM ROLE tmp"); + roleChange = RoleChange.removeSecurableObject("tmp", metalakeObject); + plugin.onRoleUpdated(role, roleChange); + + resetSQLIndex(); + expectSQLs = + Lists.newArrayList( + "CREATE ROLE tmp", + "REVOKE SELECT ON TABLE *.* FROM ROLE tmp", + "GRANT CREATE ON TABLE *.* TO ROLE tmp"); + SecurableObject newMetalakeObject = + SecurableObjects.ofMetalake("metalake", Lists.newArrayList(Privileges.CreateTable.allow())); + roleChange = RoleChange.updateSecurableObject("tmp", metalakeObject, newMetalakeObject); + plugin.onRoleUpdated(role, roleChange); + + // Test catalog object + resetSQLIndex(); + SecurableObject catalogObject = + SecurableObjects.ofCatalog("catalog", Lists.newArrayList(Privileges.SelectTable.allow())); + roleChange = RoleChange.addSecurableObject("tmp", catalogObject); + expectSQLs = Lists.newArrayList("CREATE ROLE tmp", "GRANT SELECT ON TABLE *.* TO ROLE tmp"); + plugin.onRoleUpdated(role, roleChange); + + // Test schema object + resetSQLIndex(); + SecurableObject schemaObject = + SecurableObjects.ofSchema( + catalogObject, "schema", Lists.newArrayList(Privileges.SelectTable.allow())); + roleChange = RoleChange.addSecurableObject("tmp", schemaObject); + expectSQLs = + Lists.newArrayList("CREATE ROLE tmp", "GRANT SELECT ON TABLE schema.* TO ROLE tmp"); + plugin.onRoleUpdated(role, roleChange); + + // Test table object + resetSQLIndex(); + SecurableObject tableObject = + SecurableObjects.ofTable( + schemaObject, "table", Lists.newArrayList(Privileges.SelectTable.allow())); + roleChange = RoleChange.addSecurableObject("tmp", tableObject); + expectSQLs = + Lists.newArrayList("CREATE ROLE tmp", "GRANT SELECT ON TABLE schema.table TO ROLE tmp"); + plugin.onRoleUpdated(role, roleChange); + } + + @Test + public void testOwnerManagement() { + + // Test metalake object + Owner owner = new TemporaryOwner("tmp", Owner.Type.USER); + MetadataObject metalakeObject = + MetadataObjects.of(null, "metalake", MetadataObject.Type.METALAKE); + expectSQLs = Lists.newArrayList("CREATE USER tmp"); + currentSQLIndex = 0; + expectTypes.add(MetadataObject.Type.SCHEMA); + expectObjectNames.add("*"); + expectPreOwners.add(Optional.empty()); + expectNewOwners.add(owner); + + expectTypes.add(MetadataObject.Type.TABLE); + expectObjectNames.add("*.*"); + expectPreOwners.add(Optional.empty()); + expectNewOwners.add(owner); + plugin.onOwnerSet(metalakeObject, null, owner); + + // clean up + cleanup(); + expectSQLs = Lists.newArrayList("CREATE USER tmp"); + + // Test catalog object + MetadataObject catalogObject = MetadataObjects.of(null, "catalog", MetadataObject.Type.CATALOG); + expectTypes.add(MetadataObject.Type.SCHEMA); + expectObjectNames.add("*"); + expectPreOwners.add(Optional.empty()); + expectNewOwners.add(owner); + + expectTypes.add(MetadataObject.Type.TABLE); + expectObjectNames.add("*.*"); + expectPreOwners.add(Optional.empty()); + expectNewOwners.add(owner); + plugin.onOwnerSet(catalogObject, null, owner); + + // clean up + cleanup(); + expectSQLs = Lists.newArrayList("CREATE USER tmp"); + + // Test schema object + MetadataObject schemaObject = + MetadataObjects.of("catalog", "schema", MetadataObject.Type.SCHEMA); + expectTypes.add(MetadataObject.Type.SCHEMA); + expectObjectNames.add("schema"); + expectPreOwners.add(Optional.empty()); + expectNewOwners.add(owner); + + expectTypes.add(MetadataObject.Type.TABLE); + expectObjectNames.add("schema.*"); + expectPreOwners.add(Optional.empty()); + expectNewOwners.add(owner); + plugin.onOwnerSet(schemaObject, null, owner); + + // clean up + cleanup(); + expectSQLs = Lists.newArrayList("CREATE USER tmp"); + + // Test table object + MetadataObject tableObject = + MetadataObjects.of( + Lists.newArrayList("catalog", "schema", "table"), MetadataObject.Type.TABLE); + + expectTypes.add(MetadataObject.Type.TABLE); + expectObjectNames.add("schema.table"); + expectPreOwners.add(Optional.empty()); + expectNewOwners.add(owner); + plugin.onOwnerSet(tableObject, null, owner); + } + + private static void resetSQLIndex() { + currentSQLIndex = 0; + } + + private static void cleanup() { + expectTypes.clear(); + expectObjectNames.clear(); + expectPreOwners.clear(); + expectNewOwners.clear(); + currentIndex = 0; + currentSQLIndex = 0; + } + + private static class TemporaryOwner implements Owner { + private final String name; + private final Type type; + + public TemporaryOwner(String name, Type type) { + this.name = name; + this.type = type; + } + + @Override + public String name() { + return name; + } + + @Override + public Type type() { + return type; + } + } + + private static Role createRole(String name) { + return RoleEntity.builder().withId(0L).withName(name).withAuditInfo(AuditInfo.EMPTY).build(); + } + + private static Group createGroup(String name) { + return GroupEntity.builder().withId(0L).withName(name).withAuditInfo(AuditInfo.EMPTY).build(); + } + + private static User createUser(String name) { + return UserEntity.builder().withId(0L).withName(name).withAuditInfo(AuditInfo.EMPTY).build(); + } +} diff --git a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorization.java b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorization.java index cd27d9f12a2..6aae714a359 100644 --- a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorization.java +++ b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorization.java @@ -33,7 +33,7 @@ public String shortName() { } @Override - protected AuthorizationPlugin newPlugin( + public AuthorizationPlugin newPlugin( String metalake, String catalogProvider, Map properties) { Preconditions.checkArgument( properties.containsKey(RANGER_SERVICE_TYPE), @@ -41,9 +41,9 @@ protected AuthorizationPlugin newPlugin( String serviceType = properties.get(RANGER_SERVICE_TYPE).toUpperCase(); switch (serviceType) { case "HADOOPSQL": - return RangerAuthorizationHadoopSQLPlugin.getInstance(metalake, properties); + return new RangerAuthorizationHadoopSQLPlugin(metalake, properties); case "HDFS": - return RangerAuthorizationHDFSPlugin.getInstance(metalake, properties); + return new RangerAuthorizationHDFSPlugin(metalake, properties); default: throw new IllegalArgumentException("Unsupported service type: " + serviceType); } diff --git a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationHDFSPlugin.java b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationHDFSPlugin.java index 16ce5bba4cb..9afa77880e9 100644 --- a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationHDFSPlugin.java +++ b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationHDFSPlugin.java @@ -52,24 +52,10 @@ public class RangerAuthorizationHDFSPlugin extends RangerAuthorizationPlugin { private static final Pattern pattern = Pattern.compile("^hdfs://[^/]*"); - private static volatile RangerAuthorizationHDFSPlugin instance = null; - - private RangerAuthorizationHDFSPlugin(String metalake, Map config) { + public RangerAuthorizationHDFSPlugin(String metalake, Map config) { super(metalake, config); } - public static synchronized RangerAuthorizationHDFSPlugin getInstance( - String metalake, Map config) { - if (instance == null) { - synchronized (RangerAuthorizationHadoopSQLPlugin.class) { - if (instance == null) { - instance = new RangerAuthorizationHDFSPlugin(metalake, config); - } - } - } - return instance; - } - @Override public Map> privilegesMappingRule() { return ImmutableMap.of( diff --git a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationHadoopSQLPlugin.java b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationHadoopSQLPlugin.java index 0da5c105a4b..b8e078d086e 100644 --- a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationHadoopSQLPlugin.java +++ b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationHadoopSQLPlugin.java @@ -48,24 +48,11 @@ public class RangerAuthorizationHadoopSQLPlugin extends RangerAuthorizationPlugin { private static final Logger LOG = LoggerFactory.getLogger(RangerAuthorizationHadoopSQLPlugin.class); - private static volatile RangerAuthorizationHadoopSQLPlugin instance = null; - private RangerAuthorizationHadoopSQLPlugin(String metalake, Map config) { + public RangerAuthorizationHadoopSQLPlugin(String metalake, Map config) { super(metalake, config); } - public static synchronized RangerAuthorizationHadoopSQLPlugin getInstance( - String metalake, Map config) { - if (instance == null) { - synchronized (RangerAuthorizationHadoopSQLPlugin.class) { - if (instance == null) { - instance = new RangerAuthorizationHadoopSQLPlugin(metalake, config); - } - } - } - return instance; - } - @Override /** Set the default mapping Gravitino privilege name to the Ranger rule */ public Map> privilegesMappingRule() { diff --git a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationPlugin.java b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationPlugin.java index 9c30ee11906..7a91ad54bf0 100644 --- a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationPlugin.java +++ b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationPlugin.java @@ -226,7 +226,7 @@ public Boolean onRoleUpdated(Role role, RoleChange... changes) SecurableObject securableObject = ((RoleChange.AddSecurableObject) change).getSecurableObject(); if (!validAuthorizationOperation(Arrays.asList(securableObject))) { - return false; + return Boolean.FALSE; } List AuthorizationSecurableObjects = @@ -243,7 +243,7 @@ public Boolean onRoleUpdated(Role role, RoleChange... changes) SecurableObject securableObject = ((RoleChange.RemoveSecurableObject) change).getSecurableObject(); if (!validAuthorizationOperation(Arrays.asList(securableObject))) { - return false; + return Boolean.FALSE; } List AuthorizationSecurableObjects = @@ -260,12 +260,12 @@ public Boolean onRoleUpdated(Role role, RoleChange... changes) SecurableObject oldSecurableObject = ((RoleChange.UpdateSecurableObject) change).getSecurableObject(); if (!validAuthorizationOperation(Arrays.asList(oldSecurableObject))) { - return false; + return Boolean.FALSE; } SecurableObject newSecurableObject = ((RoleChange.UpdateSecurableObject) change).getNewSecurableObject(); if (!validAuthorizationOperation(Arrays.asList(newSecurableObject))) { - return false; + return Boolean.FALSE; } Preconditions.checkArgument( @@ -394,8 +394,7 @@ public Boolean onOwnerSet(MetadataObject metadataObject, Owner preOwner, Owner n onGroupAdded(groupEntity); } - List AuthorizationSecurableObjects = - translateOwner(metadataObject); + List rangerSecurableObjects = translateOwner(metadataObject); String ownerRoleName; switch (metadataObject.type()) { case METALAKE: @@ -426,14 +425,13 @@ public Boolean onOwnerSet(MetadataObject metadataObject, Owner preOwner, Owner n LOG.warn("Grant owner role: {} failed!", ownerRoleName, e); } - AuthorizationSecurableObjects.stream() + rangerSecurableObjects.stream() .forEach( - AuthorizationSecurableObject -> { - RangerPolicy policy = - rangerHelper.findManagedPolicy(AuthorizationSecurableObject); + rangerSecurableObject -> { + RangerPolicy policy = rangerHelper.findManagedPolicy(rangerSecurableObject); try { if (policy == null) { - policy = addOwnerRoleToNewPolicy(AuthorizationSecurableObject, ownerRoleName); + policy = addOwnerRoleToNewPolicy(rangerSecurableObject, ownerRoleName); rangerClient.createPolicy(policy); } else { rangerHelper.updatePolicyOwnerRole(policy, ownerRoleName); @@ -449,7 +447,7 @@ public Boolean onOwnerSet(MetadataObject metadataObject, Owner preOwner, Owner n case TABLE: case FILESET: // The schema and table use user/group to manage the owner - AuthorizationSecurableObjects.stream() + rangerSecurableObjects.stream() .forEach( AuthorizationSecurableObject -> { RangerPolicy policy = @@ -483,7 +481,7 @@ public Boolean onOwnerSet(MetadataObject metadataObject, Owner preOwner, Owner n * 2. Create a role in the Ranger if the role does not exist.
* 3. Add this user to the role.
* - * @param roles The roles to grant to the group. + * @param roles The roles to grant to the user. * @param user The user to grant the roles. */ @Override diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerAuthorizationHDFSPluginIT.java b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerAuthorizationHDFSPluginIT.java index e1eacba1587..4062263222b 100644 --- a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerAuthorizationHDFSPluginIT.java +++ b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerAuthorizationHDFSPluginIT.java @@ -42,7 +42,7 @@ public class RangerAuthorizationHDFSPluginIT { @BeforeAll public static void setup() { - RangerITEnv.init(true); + RangerITEnv.init(RangerITEnv.currentFunName(), true); rangerAuthPlugin = RangerITEnv.rangerAuthHDFSPlugin; } diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerAuthorizationPluginIT.java b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerAuthorizationPluginIT.java index 74ddf078491..881d8f0ab44 100644 --- a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerAuthorizationPluginIT.java +++ b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerAuthorizationPluginIT.java @@ -45,7 +45,7 @@ public class RangerAuthorizationPluginIT { @BeforeAll public static void setup() { - RangerITEnv.init(true); + RangerITEnv.init(RangerITEnv.currentFunName(), true); rangerAuthPlugin = RangerITEnv.rangerAuthHivePlugin; } diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerBaseE2EIT.java b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerBaseE2EIT.java index de5641ffc7d..1fb9677d528 100644 --- a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerBaseE2EIT.java +++ b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerBaseE2EIT.java @@ -29,8 +29,10 @@ import java.util.List; import org.apache.commons.io.FileUtils; import org.apache.gravitino.Catalog; +import org.apache.gravitino.CatalogChange; import org.apache.gravitino.MetadataObject; import org.apache.gravitino.MetadataObjects; +import org.apache.gravitino.MetalakeChange; import org.apache.gravitino.NameIdentifier; import org.apache.gravitino.auth.AuthConstants; import org.apache.gravitino.authorization.Owner; @@ -169,12 +171,18 @@ protected void createMetalake() { metalake = loadMetalake; } - protected static void waitForUpdatingPolicies() throws InterruptedException { + public abstract void createCatalog(); + + protected static void waitForUpdatingPolicies() { // After Ranger authorization, Must wait a period of time for the Ranger Spark plugin to update // the policy Sleep time must be greater than the policy update interval // (ranger.plugin.spark.policy.pollIntervalMs) in the // `resources/ranger-spark-security.xml.template` - Thread.sleep(1000L); + try { + Thread.sleep(1000L); + } catch (InterruptedException e) { + LOG.error("Failed to sleep", e); + } } protected abstract void checkTableAllPrivilegesExceptForCreating(); @@ -197,8 +205,22 @@ protected static void waitForUpdatingPolicies() throws InterruptedException { protected abstract void testAlterTable(); + // ISSUE-5947: can't rename a catalog or a metalake + @Test + void testRenameMetalakeOrCatalog() { + Assertions.assertDoesNotThrow( + () -> client.alterMetalake(metalakeName, MetalakeChange.rename("new_name"))); + Assertions.assertDoesNotThrow( + () -> client.alterMetalake("new_name", MetalakeChange.rename(metalakeName))); + + Assertions.assertDoesNotThrow( + () -> metalake.alterCatalog(catalogName, CatalogChange.rename("new_name"))); + Assertions.assertDoesNotThrow( + () -> metalake.alterCatalog("new_name", CatalogChange.rename(catalogName))); + } + @Test - void testCreateSchema() throws InterruptedException { + protected void testCreateSchema() throws InterruptedException { // Choose a catalog useCatalog(); diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerFilesetIT.java b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerFilesetIT.java index 56f09781587..d8024afcc11 100644 --- a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerFilesetIT.java +++ b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerFilesetIT.java @@ -95,7 +95,7 @@ public void startIntegrationTest() throws Exception { registerCustomConfigs(configs); super.startIntegrationTest(); - RangerITEnv.init(false); + RangerITEnv.init(metalakeName, false); RangerITEnv.startHiveRangerContainer(); RANGER_ADMIN_URL = diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java index baec9434c79..363f8f0b3a1 100644 --- a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java +++ b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java @@ -31,6 +31,7 @@ import org.apache.gravitino.auth.AuthenticatorType; import org.apache.gravitino.authorization.ranger.RangerAuthorizationProperties; import org.apache.gravitino.catalog.hive.HiveConstants; +import org.apache.gravitino.exceptions.UserAlreadyExistsException; import org.apache.gravitino.integration.test.container.HiveContainer; import org.apache.gravitino.integration.test.container.RangerContainer; import org.apache.gravitino.integration.test.util.GravitinoITUtils; @@ -63,7 +64,7 @@ public void startIntegrationTest() throws Exception { registerCustomConfigs(configs); super.startIntegrationTest(); - RangerITEnv.init(true); + RangerITEnv.init(RangerBaseE2EIT.metalakeName, true); RangerITEnv.startHiveRangerContainer(); RANGER_ADMIN_URL = @@ -102,7 +103,11 @@ public void startIntegrationTest() throws Exception { createCatalog(); RangerITEnv.cleanup(); - metalake.addUser(System.getenv(HADOOP_USER_NAME)); + try { + metalake.addUser(System.getenv(HADOOP_USER_NAME)); + } catch (UserAlreadyExistsException e) { + LOG.error("Failed to add user: {}", System.getenv(HADOOP_USER_NAME), e); + } } @AfterAll @@ -166,7 +171,8 @@ protected void testAlterTable() { sparkSession.sql(SQL_ALTER_TABLE); } - private static void createCatalog() { + @Override + public void createCatalog() { Map properties = ImmutableMap.of( HiveConstants.METASTORE_URIS, diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveIT.java b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveIT.java index 9c45a21099e..9545f243dd3 100644 --- a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveIT.java +++ b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveIT.java @@ -80,7 +80,7 @@ public class RangerHiveIT { @BeforeAll public static void setup() { - RangerITEnv.init(true); + RangerITEnv.init(RangerITEnv.currentFunName(), true); rangerAuthHivePlugin = RangerITEnv.rangerAuthHivePlugin; rangerHelper = RangerITEnv.rangerHelper; diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java index b3be410ea03..2efc1e9dd60 100644 --- a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java +++ b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java @@ -59,12 +59,12 @@ public class RangerITEnv { private static final Logger LOG = LoggerFactory.getLogger(RangerITEnv.class); protected static final String RANGER_TRINO_REPO_NAME = "trinoDev"; private static final String RANGER_TRINO_TYPE = "trino"; - protected static final String RANGER_HIVE_REPO_NAME = "hiveDev"; + public static final String RANGER_HIVE_REPO_NAME = "hiveDev"; private static final String RANGER_HIVE_TYPE = "hive"; - protected static final String RANGER_HDFS_REPO_NAME = "hdfsDev"; + public static final String RANGER_HDFS_REPO_NAME = "hdfsDev"; private static final String RANGER_HDFS_TYPE = "hdfs"; protected static RangerClient rangerClient; - protected static final String HADOOP_USER_NAME = "gravitino"; + public static final String HADOOP_USER_NAME = "gravitino"; private static volatile boolean initRangerService = false; private static final ContainerSuite containerSuite = ContainerSuite.getInstance(); @@ -90,13 +90,13 @@ public class RangerITEnv { protected static RangerHelper rangerHDFSHelper; - public static void init(boolean allowAnyoneAccessHDFS) { + public static void init(String metalakeName, boolean allowAnyoneAccessHDFS) { containerSuite.startRangerContainer(); rangerClient = containerSuite.getRangerContainer().rangerClient; rangerAuthHivePlugin = - RangerAuthorizationHadoopSQLPlugin.getInstance( - "metalake", + new RangerAuthorizationHadoopSQLPlugin( + metalakeName, ImmutableMap.of( RangerAuthorizationProperties.RANGER_ADMIN_URL, String.format( @@ -116,8 +116,8 @@ public static void init(boolean allowAnyoneAccessHDFS) { RangerAuthorizationHDFSPlugin spyRangerAuthorizationHDFSPlugin = Mockito.spy( - RangerAuthorizationHDFSPlugin.getInstance( - "metalake", + new RangerAuthorizationHDFSPlugin( + metalakeName, ImmutableMap.of( RangerAuthorizationProperties.RANGER_ADMIN_URL, String.format( @@ -175,7 +175,7 @@ public static void cleanup() { } } - static void startHiveRangerContainer() { + public static void startHiveRangerContainer() { containerSuite.startHiveRangerContainer( new HashMap<>( ImmutableMap.of( diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerIcebergE2EIT.java b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerIcebergE2EIT.java index d8bd70c6470..8f6f769504a 100644 --- a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerIcebergE2EIT.java +++ b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerIcebergE2EIT.java @@ -67,7 +67,7 @@ public void startIntegrationTest() throws Exception { registerCustomConfigs(configs); super.startIntegrationTest(); - RangerITEnv.init(true); + RangerITEnv.init(RangerBaseE2EIT.metalakeName, true); RangerITEnv.startHiveRangerContainer(); RANGER_ADMIN_URL = @@ -163,7 +163,8 @@ protected void testAlterTable() { sparkSession.sql(SQL_ALTER_TABLE_BACK); } - private static void createCatalog() { + @Override + public void createCatalog() { Map properties = new HashMap<>(); properties.put(IcebergConstants.URI, HIVE_METASTORE_URIS); properties.put(IcebergConstants.CATALOG_BACKEND, "hive"); diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerPaimonE2EIT.java b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerPaimonE2EIT.java index 79d1eb1875d..2773610048e 100644 --- a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerPaimonE2EIT.java +++ b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerPaimonE2EIT.java @@ -66,7 +66,7 @@ public void startIntegrationTest() throws Exception { registerCustomConfigs(configs); super.startIntegrationTest(); - RangerITEnv.init(true); + RangerITEnv.init(RangerBaseE2EIT.metalakeName, true); RangerITEnv.startHiveRangerContainer(); RANGER_ADMIN_URL = @@ -179,7 +179,8 @@ protected void testAlterTable() { sparkSession.sql(SQL_ALTER_TABLE_BACK); } - private static void createCatalog() { + @Override + public void createCatalog() { Map properties = ImmutableMap.of( "uri", diff --git a/bundles/azure-bundle/src/main/java/org/apache/gravitino/abs/credential/ADLSTokenProvider.java b/bundles/azure-bundle/src/main/java/org/apache/gravitino/abs/credential/ADLSTokenProvider.java index e2ee3ed82a3..c2b684acbde 100644 --- a/bundles/azure-bundle/src/main/java/org/apache/gravitino/abs/credential/ADLSTokenProvider.java +++ b/bundles/azure-bundle/src/main/java/org/apache/gravitino/abs/credential/ADLSTokenProvider.java @@ -38,7 +38,7 @@ import org.apache.gravitino.credential.CredentialContext; import org.apache.gravitino.credential.CredentialProvider; import org.apache.gravitino.credential.PathBasedCredentialContext; -import org.apache.gravitino.credential.config.ADLSCredentialConfig; +import org.apache.gravitino.credential.config.AzureCredentialConfig; /** Generates ADLS token to access ADLS data. */ public class ADLSTokenProvider implements CredentialProvider { @@ -51,14 +51,14 @@ public class ADLSTokenProvider implements CredentialProvider { @Override public void initialize(Map properties) { - ADLSCredentialConfig adlsCredentialConfig = new ADLSCredentialConfig(properties); - this.storageAccountName = adlsCredentialConfig.storageAccountName(); - this.tenantId = adlsCredentialConfig.tenantId(); - this.clientId = adlsCredentialConfig.clientId(); - this.clientSecret = adlsCredentialConfig.clientSecret(); + AzureCredentialConfig azureCredentialConfig = new AzureCredentialConfig(properties); + this.storageAccountName = azureCredentialConfig.storageAccountName(); + this.tenantId = azureCredentialConfig.tenantId(); + this.clientId = azureCredentialConfig.clientId(); + this.clientSecret = azureCredentialConfig.clientSecret(); this.endpoint = String.format("https://%s.%s", storageAccountName, ADLSTokenCredential.ADLS_DOMAIN); - this.tokenExpireSecs = adlsCredentialConfig.tokenExpireInSecs(); + this.tokenExpireSecs = azureCredentialConfig.adlsTokenExpireInSecs(); } @Override diff --git a/bundles/azure-bundle/src/main/java/org/apache/gravitino/abs/credential/AzureAccountKeyProvider.java b/bundles/azure-bundle/src/main/java/org/apache/gravitino/abs/credential/AzureAccountKeyProvider.java new file mode 100644 index 00000000000..726c4f2d996 --- /dev/null +++ b/bundles/azure-bundle/src/main/java/org/apache/gravitino/abs/credential/AzureAccountKeyProvider.java @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.gravitino.abs.credential; + +import java.util.Map; +import org.apache.gravitino.credential.AzureAccountKeyCredential; +import org.apache.gravitino.credential.Credential; +import org.apache.gravitino.credential.CredentialConstants; +import org.apache.gravitino.credential.CredentialContext; +import org.apache.gravitino.credential.CredentialProvider; +import org.apache.gravitino.credential.config.AzureCredentialConfig; + +/** Generates Azure account key to access data. */ +public class AzureAccountKeyProvider implements CredentialProvider { + private String accountName; + private String accountKey; + + @Override + public void initialize(Map properties) { + AzureCredentialConfig azureCredentialConfig = new AzureCredentialConfig(properties); + this.accountName = azureCredentialConfig.storageAccountName(); + this.accountKey = azureCredentialConfig.storageAccountKey(); + } + + @Override + public void close() {} + + @Override + public String credentialType() { + return CredentialConstants.AZURE_ACCOUNT_KEY_CREDENTIAL_PROVIDER_TYPE; + } + + @Override + public Credential getCredential(CredentialContext context) { + return new AzureAccountKeyCredential(accountName, accountKey); + } +} diff --git a/bundles/azure-bundle/src/main/resources/META-INF/services/org.apache.gravitino.credential.CredentialProvider b/bundles/azure-bundle/src/main/resources/META-INF/services/org.apache.gravitino.credential.CredentialProvider index fb53efffa63..4c7e7982cb1 100644 --- a/bundles/azure-bundle/src/main/resources/META-INF/services/org.apache.gravitino.credential.CredentialProvider +++ b/bundles/azure-bundle/src/main/resources/META-INF/services/org.apache.gravitino.credential.CredentialProvider @@ -16,4 +16,5 @@ # specific language governing permissions and limitations # under the License. # -org.apache.gravitino.abs.credential.ADLSTokenProvider \ No newline at end of file +org.apache.gravitino.abs.credential.ADLSTokenProvider +org.apache.gravitino.abs.credential.AzureAccountKeyProvider \ No newline at end of file diff --git a/catalogs/catalog-common/src/main/java/org/apache/gravitino/credential/CredentialConstants.java b/catalogs/catalog-common/src/main/java/org/apache/gravitino/credential/CredentialConstants.java index 7dd74d08484..29f9241c890 100644 --- a/catalogs/catalog-common/src/main/java/org/apache/gravitino/credential/CredentialConstants.java +++ b/catalogs/catalog-common/src/main/java/org/apache/gravitino/credential/CredentialConstants.java @@ -32,5 +32,7 @@ public class CredentialConstants { public static final String ADLS_TOKEN_CREDENTIAL_PROVIDER_TYPE = "adls-token"; public static final String ADLS_TOKEN_EXPIRE_IN_SECS = "adls-token-expire-in-secs"; + public static final String AZURE_ACCOUNT_KEY_CREDENTIAL_PROVIDER_TYPE = "azure-account-key"; + private CredentialConstants() {} } diff --git a/catalogs/catalog-lakehouse-iceberg/src/main/java/org/apache/gravitino/catalog/lakehouse/iceberg/IcebergCatalogPropertiesMetadata.java b/catalogs/catalog-lakehouse-iceberg/src/main/java/org/apache/gravitino/catalog/lakehouse/iceberg/IcebergCatalogPropertiesMetadata.java index 6d61a6220a3..9e1c184cad9 100644 --- a/catalogs/catalog-lakehouse-iceberg/src/main/java/org/apache/gravitino/catalog/lakehouse/iceberg/IcebergCatalogPropertiesMetadata.java +++ b/catalogs/catalog-lakehouse-iceberg/src/main/java/org/apache/gravitino/catalog/lakehouse/iceberg/IcebergCatalogPropertiesMetadata.java @@ -33,6 +33,7 @@ import org.apache.gravitino.iceberg.common.IcebergCatalogBackend; import org.apache.gravitino.iceberg.common.authentication.AuthenticationConfig; import org.apache.gravitino.iceberg.common.authentication.kerberos.KerberosConfig; +import org.apache.gravitino.storage.AzureProperties; import org.apache.gravitino.storage.OSSProperties; import org.apache.gravitino.storage.S3Properties; @@ -74,10 +75,11 @@ public class IcebergCatalogPropertiesMetadata extends BaseCatalogPropertiesMetad false /* reserved */), stringRequiredPropertyEntry( URI, "Iceberg catalog uri config", false /* immutable */, false /* hidden */), - stringRequiredPropertyEntry( + stringOptionalPropertyEntry( WAREHOUSE, "Iceberg catalog warehouse config", false /* immutable */, + null, /* defaultValue */ false /* hidden */), stringOptionalPropertyEntry( IcebergConstants.IO_IMPL, @@ -90,25 +92,37 @@ public class IcebergCatalogPropertiesMetadata extends BaseCatalogPropertiesMetad "s3 access key ID", false /* immutable */, null /* defaultValue */, - true /* hidden */), + false /* hidden */), stringOptionalPropertyEntry( S3Properties.GRAVITINO_S3_SECRET_ACCESS_KEY, "s3 secret access key", false /* immutable */, null /* defaultValue */, - true /* hidden */), + false /* hidden */), stringOptionalPropertyEntry( OSSProperties.GRAVITINO_OSS_ACCESS_KEY_ID, "OSS access key ID", false /* immutable */, null /* defaultValue */, - true /* hidden */), + false /* hidden */), stringOptionalPropertyEntry( OSSProperties.GRAVITINO_OSS_ACCESS_KEY_SECRET, "OSS access key secret", false /* immutable */, null /* defaultValue */, - true /* hidden */)); + false /* hidden */), + stringOptionalPropertyEntry( + AzureProperties.GRAVITINO_AZURE_STORAGE_ACCOUNT_NAME, + "Azure storage account name", + false /* immutable */, + null /* defaultValue */, + false /* hidden */), + stringOptionalPropertyEntry( + AzureProperties.GRAVITINO_AZURE_STORAGE_ACCOUNT_KEY, + "Azure storage account key", + false /* immutable */, + null /* defaultValue */, + false /* hidden */)); HashMap> result = Maps.newHashMap(); result.putAll(Maps.uniqueIndex(propertyEntries, PropertyEntry::getName)); result.putAll(KerberosConfig.KERBEROS_PROPERTY_ENTRIES); diff --git a/catalogs/catalog-lakehouse-iceberg/src/test/java/org/apache/gravitino/catalog/lakehouse/iceberg/TestIcebergCatalog.java b/catalogs/catalog-lakehouse-iceberg/src/test/java/org/apache/gravitino/catalog/lakehouse/iceberg/TestIcebergCatalog.java index 5c657197231..8ff70d39854 100644 --- a/catalogs/catalog-lakehouse-iceberg/src/test/java/org/apache/gravitino/catalog/lakehouse/iceberg/TestIcebergCatalog.java +++ b/catalogs/catalog-lakehouse-iceberg/src/test/java/org/apache/gravitino/catalog/lakehouse/iceberg/TestIcebergCatalog.java @@ -146,4 +146,51 @@ void testCatalogProperty() { throwable.getMessage().contains(IcebergCatalogPropertiesMetadata.CATALOG_BACKEND)); } } + + @Test + void testCatalogInstanciation() { + AuditInfo auditInfo = + AuditInfo.builder().withCreator("creator").withCreateTime(Instant.now()).build(); + + CatalogEntity entity = + CatalogEntity.builder() + .withId(1L) + .withName("catalog") + .withNamespace(Namespace.of("metalake")) + .withType(IcebergCatalog.Type.RELATIONAL) + .withProvider("iceberg") + .withAuditInfo(auditInfo) + .build(); + + Map conf = Maps.newHashMap(); + + try (IcebergCatalogOperations ops = new IcebergCatalogOperations()) { + ops.initialize(conf, entity.toCatalogInfo(), ICEBERG_PROPERTIES_METADATA); + Map map1 = Maps.newHashMap(); + map1.put(IcebergCatalogPropertiesMetadata.CATALOG_BACKEND, "test"); + PropertiesMetadata metadata = ICEBERG_PROPERTIES_METADATA.catalogPropertiesMetadata(); + Assertions.assertThrows( + IllegalArgumentException.class, + () -> { + PropertiesMetadataHelpers.validatePropertyForCreate(metadata, map1); + }); + + Map map2 = Maps.newHashMap(); + map2.put(IcebergCatalogPropertiesMetadata.CATALOG_BACKEND, "rest"); + map2.put(IcebergCatalogPropertiesMetadata.URI, "127.0.0.1"); + Assertions.assertDoesNotThrow( + () -> { + PropertiesMetadataHelpers.validatePropertyForCreate(metadata, map2); + }); + + Map map3 = Maps.newHashMap(); + Throwable throwable = + Assertions.assertThrows( + IllegalArgumentException.class, + () -> PropertiesMetadataHelpers.validatePropertyForCreate(metadata, map3)); + + Assertions.assertTrue( + throwable.getMessage().contains(IcebergCatalogPropertiesMetadata.CATALOG_BACKEND)); + } + } } diff --git a/clients/cli/src/main/java/org/apache/gravitino/cli/ErrorMessages.java b/clients/cli/src/main/java/org/apache/gravitino/cli/ErrorMessages.java index 323f0fc2aed..3423cee07f7 100644 --- a/clients/cli/src/main/java/org/apache/gravitino/cli/ErrorMessages.java +++ b/clients/cli/src/main/java/org/apache/gravitino/cli/ErrorMessages.java @@ -30,6 +30,8 @@ public class ErrorMessages { public static final String UNKNOWN_TABLE = "Unknown table name."; public static final String MALFORMED_NAME = "Malformed entity name."; public static final String MISSING_NAME = "Missing --name option."; + public static final String MISSING_GROUP = "Missing --group option."; + public static final String MISSING_USER = "Missing --user option."; public static final String METALAKE_EXISTS = "Metalake already exists."; public static final String CATALOG_EXISTS = "Catalog already exists."; public static final String SCHEMA_EXISTS = "Schema already exists."; diff --git a/clients/cli/src/main/java/org/apache/gravitino/cli/FullName.java b/clients/cli/src/main/java/org/apache/gravitino/cli/FullName.java index 46a3bb92dce..a2be2e52c2d 100644 --- a/clients/cli/src/main/java/org/apache/gravitino/cli/FullName.java +++ b/clients/cli/src/main/java/org/apache/gravitino/cli/FullName.java @@ -29,6 +29,8 @@ public class FullName { private final CommandLine line; private String metalakeEnv; private boolean matalakeSet = false; + private boolean hasDisplayedMissingNameInfo = true; + private boolean hasDisplayedMalformedInfo = true; /** * Constructor for the {@code FullName} class. @@ -159,14 +161,14 @@ public String getNamePart(int position) { String[] names = line.getOptionValue(GravitinoOptions.NAME).split("\\."); if (names.length <= position) { - System.err.println(ErrorMessages.MALFORMED_NAME); + showMalformedInfo(); return null; } return names[position]; } - System.err.println(ErrorMessages.MISSING_NAME); + showMissingNameInfo(); return null; } @@ -224,4 +226,18 @@ public boolean hasTableName() { public boolean hasColumnName() { return hasNamePart(4); } + + private void showMissingNameInfo() { + if (hasDisplayedMissingNameInfo) { + System.err.println(ErrorMessages.MISSING_NAME); + hasDisplayedMissingNameInfo = false; + } + } + + private void showMalformedInfo() { + if (hasDisplayedMalformedInfo) { + System.err.println(ErrorMessages.MALFORMED_NAME); + hasDisplayedMalformedInfo = false; + } + } } diff --git a/clients/cli/src/main/java/org/apache/gravitino/cli/GravitinoCommandLine.java b/clients/cli/src/main/java/org/apache/gravitino/cli/GravitinoCommandLine.java index 8b7e65c32d2..7c8539ba1c7 100644 --- a/clients/cli/src/main/java/org/apache/gravitino/cli/GravitinoCommandLine.java +++ b/clients/cli/src/main/java/org/apache/gravitino/cli/GravitinoCommandLine.java @@ -21,6 +21,7 @@ import com.google.common.base.Joiner; import com.google.common.base.Preconditions; +import com.google.common.collect.Lists; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; @@ -212,6 +213,18 @@ private void handleMetalakeCommand() { break; case CommandActions.UPDATE: + if (line.hasOption(GravitinoOptions.ENABLE) && line.hasOption(GravitinoOptions.DISABLE)) { + System.err.println("Unable to enable and disable at the same time"); + Main.exit(-1); + } + if (line.hasOption(GravitinoOptions.ENABLE)) { + boolean enableAllCatalogs = line.hasOption(GravitinoOptions.ALL); + newMetalakeEnable(url, ignore, metalake, enableAllCatalogs).handle(); + } + if (line.hasOption(GravitinoOptions.DISABLE)) { + newMetalakeDisable(url, ignore, metalake).handle(); + } + if (line.hasOption(GravitinoOptions.COMMENT)) { comment = line.getOptionValue(GravitinoOptions.COMMENT); newUpdateMetalakeComment(url, ignore, metalake, comment).handle(); @@ -289,6 +302,18 @@ private void handleCatalogCommand() { break; case CommandActions.UPDATE: + if (line.hasOption(GravitinoOptions.ENABLE) && line.hasOption(GravitinoOptions.DISABLE)) { + System.err.println("Unable to enable and disable at the same time"); + Main.exit(-1); + } + if (line.hasOption(GravitinoOptions.ENABLE)) { + boolean enableMetalake = line.hasOption(GravitinoOptions.ALL); + newCatalogEnable(url, ignore, metalake, catalog, enableMetalake).handle(); + } + if (line.hasOption(GravitinoOptions.DISABLE)) { + newCatalogDisable(url, ignore, metalake, catalog).handle(); + } + if (line.hasOption(GravitinoOptions.COMMENT)) { String updateComment = line.getOptionValue(GravitinoOptions.COMMENT); newUpdateCatalogComment(url, ignore, metalake, catalog, updateComment).handle(); @@ -318,14 +343,29 @@ private void handleSchemaCommand() { String catalog = name.getCatalogName(); Command.setAuthenticationMode(auth, userName); + List missingEntities = Lists.newArrayList(); + if (metalake == null) missingEntities.add(CommandEntities.METALAKE); + if (catalog == null) missingEntities.add(CommandEntities.CATALOG); // Handle the CommandActions.LIST action separately as it doesn't use `schema` if (CommandActions.LIST.equals(command)) { + if (!missingEntities.isEmpty()) { + System.err.println("Missing required argument(s): " + COMMA_JOINER.join(missingEntities)); + Main.exit(-1); + } newListSchema(url, ignore, metalake, catalog).handle(); return; } String schema = name.getSchemaName(); + if (schema == null) { + missingEntities.add(CommandEntities.SCHEMA); + } + + if (!missingEntities.isEmpty()) { + System.err.println("Missing required argument(s): " + COMMA_JOINER.join(missingEntities)); + Main.exit(-1); + } switch (command) { case CommandActions.DETAILS: @@ -381,27 +421,33 @@ private void handleTableCommand() { String schema = name.getSchemaName(); Command.setAuthenticationMode(auth, userName); + List missingEntities = + Stream.of( + catalog == null ? CommandEntities.CATALOG : null, + schema == null ? CommandEntities.SCHEMA : null) + .filter(Objects::nonNull) + .collect(Collectors.toList()); // Handle CommandActions.LIST action separately as it doesn't require the `table` if (CommandActions.LIST.equals(command)) { - List missingEntities = - Stream.of( - metalake == null ? CommandEntities.METALAKE : null, - catalog == null ? CommandEntities.CATALOG : null, - schema == null ? CommandEntities.SCHEMA : null) - .filter(Objects::nonNull) - .collect(Collectors.toList()); if (!missingEntities.isEmpty()) { System.err.println( "Missing required argument(s): " + Joiner.on(", ").join(missingEntities)); Main.exit(-1); } - newListTables(url, ignore, metalake, catalog, schema).handle(); return; } String table = name.getTableName(); + if (table == null) { + missingEntities.add(CommandEntities.TABLE); + } + + if (!missingEntities.isEmpty()) { + System.err.println("Missing required argument(s): " + Joiner.on(", ").join(missingEntities)); + Main.exit(-1); + } switch (command) { case CommandActions.DETAILS: @@ -479,6 +525,11 @@ protected void handleUserCommand() { Command.setAuthenticationMode(auth, userName); + if (user == null && !CommandActions.LIST.equals(command)) { + System.err.println(ErrorMessages.MISSING_USER); + return; + } + switch (command) { case CommandActions.DETAILS: if (line.hasOption(GravitinoOptions.AUDIT)) { @@ -535,6 +586,11 @@ protected void handleGroupCommand() { Command.setAuthenticationMode(auth, userName); + if (group == null && !CommandActions.LIST.equals(command)) { + System.err.println(ErrorMessages.MISSING_GROUP); + return; + } + switch (command) { case CommandActions.DETAILS: if (line.hasOption(GravitinoOptions.AUDIT)) { @@ -745,6 +801,9 @@ private void handleColumnCommand() { case CommandActions.DETAILS: if (line.hasOption(GravitinoOptions.AUDIT)) { newColumnAudit(url, ignore, metalake, catalog, schema, table, column).handle(); + } else { + System.err.println(ErrorMessages.UNSUPPORTED_ACTION); + Main.exit(-1); } break; diff --git a/clients/cli/src/main/java/org/apache/gravitino/cli/GravitinoOptions.java b/clients/cli/src/main/java/org/apache/gravitino/cli/GravitinoOptions.java index a42591026a6..657566036dc 100644 --- a/clients/cli/src/main/java/org/apache/gravitino/cli/GravitinoOptions.java +++ b/clients/cli/src/main/java/org/apache/gravitino/cli/GravitinoOptions.java @@ -59,6 +59,9 @@ public class GravitinoOptions { public static final String USER = "user"; public static final String VALUE = "value"; public static final String VERSION = "version"; + public static final String ALL = "all"; + public static final String ENABLE = "enable"; + public static final String DISABLE = "disable"; /** * Builds and returns the CLI options for Gravitino. @@ -84,6 +87,8 @@ public Options options() { options.addOption(createSimpleOption(PARTITION, "display partition information")); options.addOption(createSimpleOption("o", OWNER, "display entity owner")); options.addOption(createSimpleOption(null, SORTORDER, "display sortorder information")); + options.addOption(createSimpleOption(null, ENABLE, "enable entities")); + options.addOption(createSimpleOption(null, DISABLE, "disable entities")); // Create/update options options.addOption(createArgOption(RENAME, "new entity name")); @@ -102,6 +107,7 @@ public Options options() { options.addOption(createArgOption(DEFAULT, "default column value")); options.addOption(createSimpleOption("o", OWNER, "display entity owner")); options.addOption(createArgOption(COLUMNFILE, "CSV file describing columns")); + options.addOption(createSimpleOption(null, ALL, "all operation for --enable")); // Options that support multiple values options.addOption(createArgsOption("p", PROPERTIES, "property name/value pairs")); diff --git a/clients/cli/src/main/java/org/apache/gravitino/cli/TestableCommandLine.java b/clients/cli/src/main/java/org/apache/gravitino/cli/TestableCommandLine.java index 41909f7209e..effe0da1f10 100644 --- a/clients/cli/src/main/java/org/apache/gravitino/cli/TestableCommandLine.java +++ b/clients/cli/src/main/java/org/apache/gravitino/cli/TestableCommandLine.java @@ -26,6 +26,8 @@ import org.apache.gravitino.cli.commands.AddRoleToUser; import org.apache.gravitino.cli.commands.CatalogAudit; import org.apache.gravitino.cli.commands.CatalogDetails; +import org.apache.gravitino.cli.commands.CatalogDisable; +import org.apache.gravitino.cli.commands.CatalogEnable; import org.apache.gravitino.cli.commands.ClientVersion; import org.apache.gravitino.cli.commands.ColumnAudit; import org.apache.gravitino.cli.commands.CreateCatalog; @@ -75,6 +77,8 @@ import org.apache.gravitino.cli.commands.ListUsers; import org.apache.gravitino.cli.commands.MetalakeAudit; import org.apache.gravitino.cli.commands.MetalakeDetails; +import org.apache.gravitino.cli.commands.MetalakeDisable; +import org.apache.gravitino.cli.commands.MetalakeEnable; import org.apache.gravitino.cli.commands.OwnerDetails; import org.apache.gravitino.cli.commands.RemoveAllTags; import org.apache.gravitino.cli.commands.RemoveCatalogProperty; @@ -884,4 +888,23 @@ protected RevokePrivilegesFromRole newRevokePrivilegesFromRole( String[] privileges) { return new RevokePrivilegesFromRole(url, ignore, metalake, role, entity, privileges); } + + protected MetalakeEnable newMetalakeEnable( + String url, boolean ignore, String metalake, boolean enableAllCatalogs) { + return new MetalakeEnable(url, ignore, metalake, enableAllCatalogs); + } + + protected MetalakeDisable newMetalakeDisable(String url, boolean ignore, String metalake) { + return new MetalakeDisable(url, ignore, metalake); + } + + protected CatalogEnable newCatalogEnable( + String url, boolean ignore, String metalake, String catalog, boolean enableMetalake) { + return new CatalogEnable(url, ignore, metalake, catalog, enableMetalake); + } + + protected CatalogDisable newCatalogDisable( + String url, boolean ignore, String metalake, String catalog) { + return new CatalogDisable(url, ignore, metalake, catalog); + } } diff --git a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/CatalogDisable.java b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/CatalogDisable.java new file mode 100644 index 00000000000..620a4291eea --- /dev/null +++ b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/CatalogDisable.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.gravitino.cli.commands; + +import org.apache.gravitino.cli.ErrorMessages; +import org.apache.gravitino.client.GravitinoClient; +import org.apache.gravitino.exceptions.NoSuchCatalogException; +import org.apache.gravitino.exceptions.NoSuchMetalakeException; + +/** Disable catalog. */ +public class CatalogDisable extends Command { + + private final String metalake; + private final String catalog; + + /** + * Disable catalog + * + * @param url The URL of the Gravitino server. + * @param ignoreVersions If true don't check the client/server versions match. + * @param metalake The name of the metalake. + * @param catalog The name of the catalog. + */ + public CatalogDisable(String url, boolean ignoreVersions, String metalake, String catalog) { + super(url, ignoreVersions); + this.metalake = metalake; + this.catalog = catalog; + } + + /** Disable catalog. */ + @Override + public void handle() { + try { + GravitinoClient client = buildClient(metalake); + client.disableCatalog(catalog); + } catch (NoSuchMetalakeException noSuchMetalakeException) { + exitWithError(ErrorMessages.UNKNOWN_METALAKE); + } catch (NoSuchCatalogException noSuchCatalogException) { + exitWithError(ErrorMessages.UNKNOWN_CATALOG); + } catch (Exception exp) { + exitWithError(exp.getMessage()); + } + + System.out.println(metalake + "." + catalog + " has been disabled."); + } +} diff --git a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/CatalogEnable.java b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/CatalogEnable.java new file mode 100644 index 00000000000..8646baee292 --- /dev/null +++ b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/CatalogEnable.java @@ -0,0 +1,74 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.gravitino.cli.commands; + +import org.apache.gravitino.cli.ErrorMessages; +import org.apache.gravitino.client.GravitinoAdminClient; +import org.apache.gravitino.client.GravitinoClient; +import org.apache.gravitino.exceptions.MetalakeNotInUseException; +import org.apache.gravitino.exceptions.NoSuchCatalogException; +import org.apache.gravitino.exceptions.NoSuchMetalakeException; + +/** Enable catalog. */ +public class CatalogEnable extends Command { + private final String metalake; + private final String catalog; + private final boolean enableMetalake; + + /** + * Enable catalog + * + * @param url The URL of the Gravitino server. + * @param ignoreVersions If true don't check the client/server versions match. + * @param metalake The name of the metalake. + * @param catalog The name of the catalog. + * @param enableMetalake Whether to enable it's metalake + */ + public CatalogEnable( + String url, boolean ignoreVersions, String metalake, String catalog, boolean enableMetalake) { + super(url, ignoreVersions); + this.metalake = metalake; + this.catalog = catalog; + this.enableMetalake = enableMetalake; + } + + /** Enable catalog. */ + @Override + public void handle() { + try { + if (enableMetalake) { + GravitinoAdminClient adminClient = buildAdminClient(); + adminClient.enableMetalake(metalake); + } + GravitinoClient client = buildClient(metalake); + client.enableCatalog(catalog); + } catch (NoSuchMetalakeException noSuchMetalakeException) { + exitWithError(ErrorMessages.UNKNOWN_METALAKE); + } catch (NoSuchCatalogException noSuchCatalogException) { + exitWithError(ErrorMessages.UNKNOWN_CATALOG); + } catch (MetalakeNotInUseException notInUseException) { + exitWithError( + metalake + " not in use. please use --recursive option, or enable metalake first"); + } catch (Exception exp) { + exitWithError(exp.getMessage()); + } + + System.out.println(metalake + "." + catalog + " has been enabled."); + } +} diff --git a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/DeleteCatalog.java b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/DeleteCatalog.java index 6c5fbaee97d..6aa8e5ad904 100644 --- a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/DeleteCatalog.java +++ b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/DeleteCatalog.java @@ -22,6 +22,7 @@ import org.apache.gravitino.cli.AreYouSure; import org.apache.gravitino.cli.ErrorMessages; import org.apache.gravitino.client.GravitinoClient; +import org.apache.gravitino.exceptions.CatalogInUseException; import org.apache.gravitino.exceptions.NoSuchCatalogException; import org.apache.gravitino.exceptions.NoSuchMetalakeException; @@ -64,6 +65,8 @@ public void handle() { exitWithError(ErrorMessages.UNKNOWN_METALAKE); } catch (NoSuchCatalogException err) { exitWithError(ErrorMessages.UNKNOWN_CATALOG); + } catch (CatalogInUseException catalogInUseException) { + System.err.println(catalog + " in use, please disable it first."); } catch (Exception exp) { exitWithError(exp.getMessage()); } diff --git a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/DeleteMetalake.java b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/DeleteMetalake.java index 386dde92130..e88ae41486f 100644 --- a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/DeleteMetalake.java +++ b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/DeleteMetalake.java @@ -22,6 +22,7 @@ import org.apache.gravitino.cli.AreYouSure; import org.apache.gravitino.cli.ErrorMessages; import org.apache.gravitino.client.GravitinoAdminClient; +import org.apache.gravitino.exceptions.MetalakeInUseException; import org.apache.gravitino.exceptions.NoSuchMetalakeException; public class DeleteMetalake extends Command { @@ -56,6 +57,8 @@ public void handle() { deleted = client.dropMetalake(metalake); } catch (NoSuchMetalakeException err) { exitWithError(ErrorMessages.UNKNOWN_METALAKE); + } catch (MetalakeInUseException inUseException) { + System.err.println(metalake + " in use, please disable it first."); } catch (Exception exp) { exitWithError(exp.getMessage()); } diff --git a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/GroupDetails.java b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/GroupDetails.java index 4df87b5fa8e..7217d5ad3bd 100644 --- a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/GroupDetails.java +++ b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/GroupDetails.java @@ -60,7 +60,7 @@ public void handle() { exitWithError(exp.getMessage()); } - String all = String.join(",", roles); + String all = roles.isEmpty() ? "The group has no roles." : String.join(",", roles); System.out.println(all.toString()); } diff --git a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListAllTags.java b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListAllTags.java index fa6c74c7afa..cded12808d9 100644 --- a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListAllTags.java +++ b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListAllTags.java @@ -53,7 +53,7 @@ public void handle() { exitWithError(exp.getMessage()); } - String all = String.join(",", tags); + String all = tags.length == 0 ? "No tags exist." : String.join(",", tags); System.out.println(all.toString()); } diff --git a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListCatalogs.java b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListCatalogs.java index e6aaf811ec9..eb9c960b14e 100644 --- a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListCatalogs.java +++ b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListCatalogs.java @@ -49,7 +49,11 @@ public void handle() { try { GravitinoClient client = buildClient(metalake); catalogs = client.listCatalogsInfo(); - output(catalogs); + if (catalogs.length == 0) { + System.out.println("No catalogs exist."); + } else { + output(catalogs); + } } catch (NoSuchMetalakeException err) { exitWithError(ErrorMessages.UNKNOWN_METALAKE); } catch (Exception exp) { diff --git a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListFilesets.java b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListFilesets.java index 34839f683c5..d00ba3e6ba5 100644 --- a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListFilesets.java +++ b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListFilesets.java @@ -71,7 +71,7 @@ public void handle() { exitWithError(exp.getMessage()); } - String all = Joiner.on(",").join(filesets); + String all = filesets.length == 0 ? "No filesets exist." : Joiner.on(",").join(filesets); System.out.println(all.toString()); } diff --git a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListGroups.java b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListGroups.java index fd9009a755a..a517b4daed8 100644 --- a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListGroups.java +++ b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListGroups.java @@ -53,7 +53,7 @@ public void handle() { exitWithError(exp.getMessage()); } - String all = String.join(",", groups); + String all = groups.length == 0 ? "No groups exist." : String.join(",", groups); System.out.println(all.toString()); } diff --git a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListMetalakes.java b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListMetalakes.java index ee5ac81d646..b2388e5cd3d 100644 --- a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListMetalakes.java +++ b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListMetalakes.java @@ -43,7 +43,11 @@ public void handle() { try { GravitinoAdminClient client = buildAdminClient(); metalakes = client.listMetalakes(); - output(metalakes); + if (metalakes.length == 0) { + System.out.println("No metalakes exist."); + } else { + output(metalakes); + } } catch (Exception exp) { exitWithError(exp.getMessage()); } diff --git a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListRoles.java b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListRoles.java index a7bb1cd20f7..2ecb35bd093 100644 --- a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListRoles.java +++ b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListRoles.java @@ -53,7 +53,7 @@ public void handle() { exitWithError(exp.getMessage()); } - String all = String.join(",", roles); + String all = roles.length == 0 ? "No roles exist." : String.join(",", roles); System.out.println(all.toString()); } diff --git a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListSchema.java b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListSchema.java index cf5fe487cc8..110a6477a62 100644 --- a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListSchema.java +++ b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListSchema.java @@ -60,7 +60,7 @@ public void handle() { exitWithError(exp.getMessage()); } - String all = Joiner.on(",").join(schemas); + String all = schemas.length == 0 ? "No schemas exist." : Joiner.on(",").join(schemas); System.out.println(all.toString()); } diff --git a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListTables.java b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListTables.java index e6afb9b51c0..41a71e87c00 100644 --- a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListTables.java +++ b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListTables.java @@ -61,7 +61,10 @@ public void handle() { tableNames.add(tables[i].name()); } - String all = Joiner.on(System.lineSeparator()).join(tableNames); + String all = + tableNames.isEmpty() + ? "No tables exist." + : Joiner.on(System.lineSeparator()).join(tableNames); System.out.println(all.toString()); } diff --git a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListTopics.java b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListTopics.java index af4cc217713..a2da6a69ad7 100644 --- a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListTopics.java +++ b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/ListTopics.java @@ -66,7 +66,10 @@ public void handle() { exitWithError(exp.getMessage()); } - String all = Joiner.on(",").join(Arrays.stream(topics).map(topic -> topic.name()).iterator()); + String all = + topics.length == 0 + ? "No topics exist." + : Joiner.on(",").join(Arrays.stream(topics).map(topic -> topic.name()).iterator()); System.out.println(all); } } diff --git a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/MetalakeDisable.java b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/MetalakeDisable.java new file mode 100644 index 00000000000..02e33a45d45 --- /dev/null +++ b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/MetalakeDisable.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.gravitino.cli.commands; + +import org.apache.gravitino.cli.ErrorMessages; +import org.apache.gravitino.client.GravitinoAdminClient; +import org.apache.gravitino.exceptions.NoSuchMetalakeException; + +/** Disable metalake. */ +public class MetalakeDisable extends Command { + private String metalake; + + /** + * Disable metalake + * + * @param url The URL of the Gravitino server. + * @param ignoreVersions If true don't check the client/server versions match. + * @param metalake The name of the metalake. + */ + public MetalakeDisable(String url, boolean ignoreVersions, String metalake) { + super(url, ignoreVersions); + this.metalake = metalake; + } + + /** Disable metalake. */ + @Override + public void handle() { + try { + GravitinoAdminClient client = buildAdminClient(); + client.disableMetalake(metalake); + } catch (NoSuchMetalakeException err) { + exitWithError(ErrorMessages.UNKNOWN_METALAKE); + } catch (Exception exp) { + exitWithError(exp.getMessage()); + } + + System.out.println(metalake + " has been disabled."); + } +} diff --git a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/MetalakeEnable.java b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/MetalakeEnable.java new file mode 100644 index 00000000000..34ba23a61bb --- /dev/null +++ b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/MetalakeEnable.java @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.gravitino.cli.commands; + +import java.util.Arrays; +import org.apache.gravitino.cli.ErrorMessages; +import org.apache.gravitino.client.GravitinoAdminClient; +import org.apache.gravitino.client.GravitinoMetalake; +import org.apache.gravitino.exceptions.NoSuchMetalakeException; + +/** Enable metalake. */ +public class MetalakeEnable extends Command { + + private final String metalake; + private Boolean enableAllCatalogs; + + /** + * Enable a metalake + * + * @param url The URL of the Gravitino server. + * @param ignoreVersions If true don't check the client/server versions match. + * @param metalake The name of the metalake. + * @param enableAllCatalogs Whether to enable all catalogs. + */ + public MetalakeEnable( + String url, boolean ignoreVersions, String metalake, boolean enableAllCatalogs) { + super(url, ignoreVersions); + this.metalake = metalake; + this.enableAllCatalogs = enableAllCatalogs; + } + + /** Enable metalake. */ + @Override + public void handle() { + StringBuilder msgBuilder = new StringBuilder(metalake); + try { + GravitinoAdminClient client = buildAdminClient(); + client.enableMetalake(metalake); + msgBuilder.append(" has been enabled."); + + if (enableAllCatalogs) { + GravitinoMetalake metalakeObject = client.loadMetalake(metalake); + String[] catalogs = metalakeObject.listCatalogs(); + Arrays.stream(catalogs).forEach(metalakeObject::enableCatalog); + msgBuilder.append(" and all catalogs in this metalake have been enabled."); + } + } catch (NoSuchMetalakeException err) { + exitWithError(ErrorMessages.UNKNOWN_METALAKE); + } catch (Exception exp) { + exitWithError(exp.getMessage()); + } + + System.out.println(msgBuilder); + } +} diff --git a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/UpdateCatalogName.java b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/UpdateCatalogName.java index 399d600fcef..8d4fcb60b96 100644 --- a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/UpdateCatalogName.java +++ b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/UpdateCatalogName.java @@ -39,7 +39,7 @@ public class UpdateCatalogName extends Command { * @param ignoreVersions If true don't check the client/server versions match. * @param metalake The name of the metalake. * @param catalog The name of the catalog. - * @param name The new metalake name. + * @param name The new catalog name. */ public UpdateCatalogName( String url, boolean ignoreVersions, String metalake, String catalog, String name) { diff --git a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/UpdateFilesetName.java b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/UpdateFilesetName.java index 6d4ca8e0f27..a613c1f9d9b 100644 --- a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/UpdateFilesetName.java +++ b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/UpdateFilesetName.java @@ -46,7 +46,7 @@ public class UpdateFilesetName extends Command { * @param catalog The name of the catalog. * @param schema The name of the schema. * @param fileset The name of the fileset. - * @param name The new metalake name. + * @param name The new fileset name. */ public UpdateFilesetName( String url, diff --git a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/UpdateTableName.java b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/UpdateTableName.java index 773d366fb3b..51a5b68722b 100644 --- a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/UpdateTableName.java +++ b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/UpdateTableName.java @@ -46,7 +46,7 @@ public class UpdateTableName extends Command { * @param catalog The name of the catalog. * @param schema The name of the schema. * @param table The name of the table. - * @param name The new metalake name. + * @param name The new table name. */ public UpdateTableName( String url, diff --git a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/UpdateTagName.java b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/UpdateTagName.java index 96fb9d15714..f4ef43412db 100644 --- a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/UpdateTagName.java +++ b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/UpdateTagName.java @@ -39,7 +39,7 @@ public class UpdateTagName extends Command { * @param ignoreVersions If true don't check the client/server versions match. * @param metalake The name of the tag. * @param tag The name of the catalog. - * @param name The new metalake name. + * @param name The new tag name. */ public UpdateTagName( String url, boolean ignoreVersions, String metalake, String tag, String name) { diff --git a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/UserDetails.java b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/UserDetails.java index 1d59c83e529..e37f8e6f139 100644 --- a/clients/cli/src/main/java/org/apache/gravitino/cli/commands/UserDetails.java +++ b/clients/cli/src/main/java/org/apache/gravitino/cli/commands/UserDetails.java @@ -60,7 +60,7 @@ public void handle() { exitWithError(exp.getMessage()); } - String all = String.join(",", roles); + String all = roles.isEmpty() ? "The user has no roles." : String.join(",", roles); System.out.println(all.toString()); } diff --git a/clients/cli/src/main/resources/catalog_help.txt b/clients/cli/src/main/resources/catalog_help.txt index 27ba7eeac34..c29e9dcadbd 100644 --- a/clients/cli/src/main/resources/catalog_help.txt +++ b/clients/cli/src/main/resources/catalog_help.txt @@ -47,4 +47,10 @@ Set a catalog's property gcli catalog set --name catalog_mysql --property test --value value Remove a catalog's property -gcli catalog remove --name catalog_mysql --property test \ No newline at end of file +gcli catalog remove --name catalog_mysql --property test + +Enable a catalog +gcli catalog update -m metalake_demo --name catalog --enable + +Disable a catalog +gcli catalog update -m metalake_demo --name catalog --disable \ No newline at end of file diff --git a/clients/cli/src/main/resources/metalake_help.txt b/clients/cli/src/main/resources/metalake_help.txt index c80d244f521..f700d3a07ea 100644 --- a/clients/cli/src/main/resources/metalake_help.txt +++ b/clients/cli/src/main/resources/metalake_help.txt @@ -38,3 +38,9 @@ gcli metalake set --property test --value value Remove a metalake's property gcli metalake remove --property test + +Enable a metalake +gcli metalake update -m metalake_demo --enable + +Disable a metalke +gcli metalake update -m metalake_demo --disable \ No newline at end of file diff --git a/clients/cli/src/test/java/org/apache/gravitino/cli/TestCatalogCommands.java b/clients/cli/src/test/java/org/apache/gravitino/cli/TestCatalogCommands.java index eb8bc46d38e..d751d671731 100644 --- a/clients/cli/src/test/java/org/apache/gravitino/cli/TestCatalogCommands.java +++ b/clients/cli/src/test/java/org/apache/gravitino/cli/TestCatalogCommands.java @@ -19,17 +19,24 @@ package org.apache.gravitino.cli; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import java.io.ByteArrayOutputStream; +import java.io.PrintStream; import java.util.HashMap; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.Options; import org.apache.gravitino.cli.commands.CatalogAudit; import org.apache.gravitino.cli.commands.CatalogDetails; +import org.apache.gravitino.cli.commands.CatalogDisable; +import org.apache.gravitino.cli.commands.CatalogEnable; import org.apache.gravitino.cli.commands.CreateCatalog; import org.apache.gravitino.cli.commands.DeleteCatalog; import org.apache.gravitino.cli.commands.ListCatalogProperties; @@ -38,6 +45,7 @@ import org.apache.gravitino.cli.commands.SetCatalogProperty; import org.apache.gravitino.cli.commands.UpdateCatalogComment; import org.apache.gravitino.cli.commands.UpdateCatalogName; +import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -45,10 +53,28 @@ class TestCatalogCommands { private CommandLine mockCommandLine; private Options mockOptions; + private final ByteArrayOutputStream outContent = new ByteArrayOutputStream(); + private final ByteArrayOutputStream errContent = new ByteArrayOutputStream(); + private final PrintStream originalOut = System.out; + private final PrintStream originalErr = System.err; + @BeforeEach void setUp() { mockCommandLine = mock(CommandLine.class); mockOptions = mock(Options.class); + System.setOut(new PrintStream(outContent)); + System.setErr(new PrintStream(errContent)); + } + + @AfterEach + void restoreExitFlg() { + Main.useExit = true; + } + + @AfterEach + public void restoreStreams() { + System.setOut(originalOut); + System.setErr(originalErr); } @Test @@ -291,4 +317,92 @@ void testUpdateCatalogNameCommand() { commandLine.handleCommandLine(); verify(mockUpdateName).handle(); } + + @Test + void testEnableCatalogCommand() { + CatalogEnable mockEnable = mock(CatalogEnable.class); + when(mockCommandLine.hasOption(GravitinoOptions.METALAKE)).thenReturn(true); + when(mockCommandLine.getOptionValue(GravitinoOptions.METALAKE)).thenReturn("metalake_demo"); + when(mockCommandLine.hasOption(GravitinoOptions.NAME)).thenReturn(true); + when(mockCommandLine.getOptionValue(GravitinoOptions.NAME)).thenReturn("catalog"); + when(mockCommandLine.hasOption(GravitinoOptions.ENABLE)).thenReturn(true); + + GravitinoCommandLine commandLine = + spy( + new GravitinoCommandLine( + mockCommandLine, mockOptions, CommandEntities.CATALOG, CommandActions.UPDATE)); + doReturn(mockEnable) + .when(commandLine) + .newCatalogEnable( + GravitinoCommandLine.DEFAULT_URL, false, "metalake_demo", "catalog", false); + commandLine.handleCommandLine(); + verify(mockEnable).handle(); + } + + @Test + void testEnableCatalogCommandWithRecursive() { + CatalogEnable mockEnable = mock(CatalogEnable.class); + when(mockCommandLine.hasOption(GravitinoOptions.METALAKE)).thenReturn(true); + when(mockCommandLine.getOptionValue(GravitinoOptions.METALAKE)).thenReturn("metalake_demo"); + when(mockCommandLine.hasOption(GravitinoOptions.NAME)).thenReturn(true); + when(mockCommandLine.getOptionValue(GravitinoOptions.NAME)).thenReturn("catalog"); + when(mockCommandLine.hasOption(GravitinoOptions.ALL)).thenReturn(true); + when(mockCommandLine.hasOption(GravitinoOptions.ENABLE)).thenReturn(true); + + GravitinoCommandLine commandLine = + spy( + new GravitinoCommandLine( + mockCommandLine, mockOptions, CommandEntities.CATALOG, CommandActions.UPDATE)); + doReturn(mockEnable) + .when(commandLine) + .newCatalogEnable( + GravitinoCommandLine.DEFAULT_URL, false, "metalake_demo", "catalog", true); + commandLine.handleCommandLine(); + verify(mockEnable).handle(); + } + + @Test + void testDisableCatalogCommand() { + CatalogDisable mockDisable = mock(CatalogDisable.class); + when(mockCommandLine.hasOption(GravitinoOptions.METALAKE)).thenReturn(true); + when(mockCommandLine.getOptionValue(GravitinoOptions.METALAKE)).thenReturn("metalake_demo"); + when(mockCommandLine.hasOption(GravitinoOptions.NAME)).thenReturn(true); + when(mockCommandLine.getOptionValue(GravitinoOptions.NAME)).thenReturn("catalog"); + when(mockCommandLine.hasOption(GravitinoOptions.DISABLE)).thenReturn(true); + + GravitinoCommandLine commandLine = + spy( + new GravitinoCommandLine( + mockCommandLine, mockOptions, CommandEntities.CATALOG, CommandActions.UPDATE)); + doReturn(mockDisable) + .when(commandLine) + .newCatalogDisable(GravitinoCommandLine.DEFAULT_URL, false, "metalake_demo", "catalog"); + commandLine.handleCommandLine(); + verify(mockDisable).handle(); + } + + @Test + @SuppressWarnings("DefaultCharset") + void testCatalogWithDisableAndEnableOptions() { + Main.useExit = false; + when(mockCommandLine.hasOption(GravitinoOptions.METALAKE)).thenReturn(true); + when(mockCommandLine.getOptionValue(GravitinoOptions.METALAKE)).thenReturn("metalake_demo"); + when(mockCommandLine.hasOption(GravitinoOptions.NAME)).thenReturn(true); + when(mockCommandLine.getOptionValue(GravitinoOptions.NAME)).thenReturn("catalog"); + when(mockCommandLine.hasOption(GravitinoOptions.DISABLE)).thenReturn(true); + when(mockCommandLine.hasOption(GravitinoOptions.ENABLE)).thenReturn(true); + + GravitinoCommandLine commandLine = + spy( + new GravitinoCommandLine( + mockCommandLine, mockOptions, CommandEntities.CATALOG, CommandActions.UPDATE)); + + assertThrows(RuntimeException.class, commandLine::handleCommandLine); + verify(commandLine, never()) + .newCatalogEnable( + GravitinoCommandLine.DEFAULT_URL, false, "metalake_demo", "catalog", false); + verify(commandLine, never()) + .newCatalogDisable(GravitinoCommandLine.DEFAULT_URL, false, "melake_demo", "catalog"); + assertTrue(errContent.toString().contains("Unable to enable and disable at the same time")); + } } diff --git a/clients/cli/src/test/java/org/apache/gravitino/cli/TestColumnCommands.java b/clients/cli/src/test/java/org/apache/gravitino/cli/TestColumnCommands.java index e26759e2d4c..2eb4c536480 100644 --- a/clients/cli/src/test/java/org/apache/gravitino/cli/TestColumnCommands.java +++ b/clients/cli/src/test/java/org/apache/gravitino/cli/TestColumnCommands.java @@ -19,12 +19,18 @@ package org.apache.gravitino.cli; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertThrows; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import java.io.ByteArrayOutputStream; +import java.io.PrintStream; +import java.nio.charset.StandardCharsets; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.Options; import org.apache.gravitino.cli.commands.AddColumn; @@ -38,17 +44,30 @@ import org.apache.gravitino.cli.commands.UpdateColumnName; import org.apache.gravitino.cli.commands.UpdateColumnNullability; import org.apache.gravitino.cli.commands.UpdateColumnPosition; +import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; class TestColumnCommands { private CommandLine mockCommandLine; private Options mockOptions; + private final ByteArrayOutputStream outContent = new ByteArrayOutputStream(); + private final ByteArrayOutputStream errContent = new ByteArrayOutputStream(); + private final PrintStream originalOut = System.out; + private final PrintStream originalErr = System.err; @BeforeEach void setUp() { mockCommandLine = mock(CommandLine.class); mockOptions = mock(Options.class); + System.setOut(new PrintStream(outContent)); + System.setErr(new PrintStream(errContent)); + } + + @AfterEach + public void restoreStreams() { + System.setOut(originalOut); + System.setErr(originalErr); } @Test @@ -98,6 +117,34 @@ void testColumnAuditCommand() { verify(mockAudit).handle(); } + @Test + void testColumnDetailsCommand() { + Main.useExit = false; + when(mockCommandLine.hasOption(GravitinoOptions.METALAKE)).thenReturn(true); + when(mockCommandLine.getOptionValue(GravitinoOptions.METALAKE)).thenReturn("metalake_demo"); + when(mockCommandLine.hasOption(GravitinoOptions.NAME)).thenReturn(true); + when(mockCommandLine.getOptionValue(GravitinoOptions.NAME)) + .thenReturn("catalog.schema.users.name"); + GravitinoCommandLine commandLine = + spy( + new GravitinoCommandLine( + mockCommandLine, mockOptions, CommandEntities.COLUMN, CommandActions.DETAILS)); + + assertThrows(RuntimeException.class, commandLine::handleCommandLine); + verify(commandLine, never()) + .newColumnAudit( + GravitinoCommandLine.DEFAULT_URL, + false, + "metalake_demo", + "catalog", + "schema", + "users", + "name"); + + String output = new String(errContent.toByteArray(), StandardCharsets.UTF_8).trim(); + assertEquals(output, ErrorMessages.UNSUPPORTED_ACTION); + } + @Test void testAddColumn() { AddColumn mockAddColumn = mock(AddColumn.class); diff --git a/clients/cli/src/test/java/org/apache/gravitino/cli/TestFulllName.java b/clients/cli/src/test/java/org/apache/gravitino/cli/TestFulllName.java index ecde923a36a..4b5e1fed79b 100644 --- a/clients/cli/src/test/java/org/apache/gravitino/cli/TestFulllName.java +++ b/clients/cli/src/test/java/org/apache/gravitino/cli/TestFulllName.java @@ -25,20 +25,37 @@ import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; +import java.io.ByteArrayOutputStream; +import java.io.PrintStream; +import java.nio.charset.StandardCharsets; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.DefaultParser; import org.apache.commons.cli.MissingArgumentException; import org.apache.commons.cli.Options; +import org.apache.commons.cli.ParseException; +import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; public class TestFulllName { private Options options; + private final ByteArrayOutputStream outContent = new ByteArrayOutputStream(); + private final ByteArrayOutputStream errContent = new ByteArrayOutputStream(); + private final PrintStream originalOut = System.out; + private final PrintStream originalErr = System.err; @BeforeEach public void setUp() { options = new GravitinoOptions().options(); + System.setOut(new PrintStream(outContent)); + System.setErr(new PrintStream(errContent)); + } + + @AfterEach + public void restoreStreams() { + System.setOut(originalOut); + System.setErr(originalErr); } @Test @@ -152,4 +169,32 @@ public void hasPartNameColumn() throws Exception { assertTrue(fullName.hasTableName()); assertTrue(fullName.hasColumnName()); } + + @Test + @SuppressWarnings("DefaultCharset") + public void testMissingName() throws ParseException { + String[] args = {"column", "list", "-m", "demo_metalake", "-i"}; + CommandLine commandLine = new DefaultParser().parse(options, args); + FullName fullName = new FullName(commandLine); + fullName.getCatalogName(); + fullName.getSchemaName(); + fullName.getTableName(); + fullName.getColumnName(); + String output = new String(errContent.toByteArray(), StandardCharsets.UTF_8).trim(); + assertEquals(output, ErrorMessages.MISSING_NAME); + } + + @Test + @SuppressWarnings("DefaultCharset") + public void testMalformedName() throws ParseException { + String[] args = {"column", "list", "-m", "demo_metalake", "-i", "--name", "Hive_catalog"}; + CommandLine commandLine = new DefaultParser().parse(options, args); + FullName fullName = new FullName(commandLine); + fullName.getCatalogName(); + fullName.getSchemaName(); + fullName.getTableName(); + fullName.getColumnName(); + String output = new String(errContent.toByteArray(), StandardCharsets.UTF_8).trim(); + assertEquals(output, ErrorMessages.MALFORMED_NAME); + } } diff --git a/clients/cli/src/test/java/org/apache/gravitino/cli/TestMain.java b/clients/cli/src/test/java/org/apache/gravitino/cli/TestMain.java index 93de0a6bc9d..377e569aa53 100644 --- a/clients/cli/src/test/java/org/apache/gravitino/cli/TestMain.java +++ b/clients/cli/src/test/java/org/apache/gravitino/cli/TestMain.java @@ -150,6 +150,7 @@ public void catalogWithOneArg() throws ParseException { assertEquals(CommandEntities.CATALOG, entity); } + @Test public void metalakeWithHelpOption() throws ParseException { Options options = new GravitinoOptions().options(); CommandLineParser parser = new DefaultParser(); diff --git a/clients/cli/src/test/java/org/apache/gravitino/cli/TestMetalakeCommands.java b/clients/cli/src/test/java/org/apache/gravitino/cli/TestMetalakeCommands.java index b7468b635a4..01eebb6dab5 100644 --- a/clients/cli/src/test/java/org/apache/gravitino/cli/TestMetalakeCommands.java +++ b/clients/cli/src/test/java/org/apache/gravitino/cli/TestMetalakeCommands.java @@ -19,12 +19,16 @@ package org.apache.gravitino.cli; +import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import java.io.ByteArrayOutputStream; +import java.io.PrintStream; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.Options; import org.apache.gravitino.cli.commands.CreateMetalake; @@ -33,21 +37,42 @@ import org.apache.gravitino.cli.commands.ListMetalakes; import org.apache.gravitino.cli.commands.MetalakeAudit; import org.apache.gravitino.cli.commands.MetalakeDetails; +import org.apache.gravitino.cli.commands.MetalakeDisable; +import org.apache.gravitino.cli.commands.MetalakeEnable; import org.apache.gravitino.cli.commands.RemoveMetalakeProperty; import org.apache.gravitino.cli.commands.SetMetalakeProperty; import org.apache.gravitino.cli.commands.UpdateMetalakeComment; import org.apache.gravitino.cli.commands.UpdateMetalakeName; +import org.junit.Assert; +import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; class TestMetalakeCommands { private CommandLine mockCommandLine; private Options mockOptions; + private final ByteArrayOutputStream outContent = new ByteArrayOutputStream(); + private final ByteArrayOutputStream errContent = new ByteArrayOutputStream(); + private final PrintStream originalOut = System.out; + private final PrintStream originalErr = System.err; @BeforeEach void setUp() { mockCommandLine = mock(CommandLine.class); mockOptions = mock(Options.class); + System.setOut(new PrintStream(outContent)); + System.setErr(new PrintStream(errContent)); + } + + @AfterEach + void restoreExitFlg() { + Main.useExit = true; + } + + @AfterEach + public void restoreStreams() { + System.setOut(originalOut); + System.setErr(originalErr); } @Test @@ -280,4 +305,80 @@ void testUpdateMetalakeNameForceCommand() { commandLine.handleCommandLine(); verify(mockUpdateName).handle(); } + + @Test + void testEnableMetalakeCommand() { + MetalakeEnable mockEnable = mock(MetalakeEnable.class); + when(mockCommandLine.hasOption(GravitinoOptions.METALAKE)).thenReturn(true); + when(mockCommandLine.getOptionValue(GravitinoOptions.METALAKE)).thenReturn("metalake_demo"); + when(mockCommandLine.hasOption(GravitinoOptions.ENABLE)).thenReturn(true); + GravitinoCommandLine commandLine = + spy( + new GravitinoCommandLine( + mockCommandLine, mockOptions, CommandEntities.METALAKE, CommandActions.UPDATE)); + doReturn(mockEnable) + .when(commandLine) + .newMetalakeEnable(GravitinoCommandLine.DEFAULT_URL, false, "metalake_demo", false); + commandLine.handleCommandLine(); + verify(mockEnable).handle(); + } + + @Test + void testEnableMetalakeCommandWithRecursive() { + MetalakeEnable mockEnable = mock(MetalakeEnable.class); + when(mockCommandLine.hasOption(GravitinoOptions.METALAKE)).thenReturn(true); + when(mockCommandLine.getOptionValue(GravitinoOptions.METALAKE)).thenReturn("metalake_demo"); + when(mockCommandLine.hasOption(GravitinoOptions.ALL)).thenReturn(true); + when(mockCommandLine.hasOption(GravitinoOptions.ENABLE)).thenReturn(true); + GravitinoCommandLine commandLine = + spy( + new GravitinoCommandLine( + mockCommandLine, mockOptions, CommandEntities.METALAKE, CommandActions.UPDATE)); + doReturn(mockEnable) + .when(commandLine) + .newMetalakeEnable(GravitinoCommandLine.DEFAULT_URL, false, "metalake_demo", true); + commandLine.handleCommandLine(); + verify(mockEnable).handle(); + } + + @Test + void testDisableMetalakeCommand() { + MetalakeDisable mockDisable = mock(MetalakeDisable.class); + when(mockCommandLine.hasOption(GravitinoOptions.METALAKE)).thenReturn(true); + when(mockCommandLine.getOptionValue(GravitinoOptions.METALAKE)).thenReturn("metalake_demo"); + when(mockCommandLine.hasOption(GravitinoOptions.DISABLE)).thenReturn(true); + + GravitinoCommandLine commandLine = + spy( + new GravitinoCommandLine( + mockCommandLine, mockOptions, CommandEntities.METALAKE, CommandActions.UPDATE)); + doReturn(mockDisable) + .when(commandLine) + .newMetalakeDisable(GravitinoCommandLine.DEFAULT_URL, false, "metalake_demo"); + + commandLine.handleCommandLine(); + verify(mockDisable).handle(); + } + + @Test + @SuppressWarnings("DefaultCharset") + void testMetalakeWithDisableAndEnableOptions() { + Main.useExit = false; + when(mockCommandLine.hasOption(GravitinoOptions.METALAKE)).thenReturn(true); + when(mockCommandLine.getOptionValue(CommandEntities.METALAKE)).thenReturn("metalake_demo"); + when(mockCommandLine.hasOption(GravitinoOptions.ENABLE)).thenReturn(true); + when(mockCommandLine.hasOption(GravitinoOptions.DISABLE)).thenReturn(true); + + GravitinoCommandLine commandLine = + spy( + new GravitinoCommandLine( + mockCommandLine, mockOptions, CommandEntities.METALAKE, CommandActions.UPDATE)); + + Assert.assertThrows(RuntimeException.class, commandLine::handleCommandLine); + verify(commandLine, never()) + .newMetalakeEnable(GravitinoCommandLine.DEFAULT_URL, false, "metalake_demo", false); + verify(commandLine, never()) + .newMetalakeEnable(GravitinoCommandLine.DEFAULT_URL, false, "metalake_demo", false); + assertTrue(errContent.toString().contains("Unable to enable and disable at the same time")); + } } diff --git a/clients/cli/src/test/java/org/apache/gravitino/cli/TestSchemaCommands.java b/clients/cli/src/test/java/org/apache/gravitino/cli/TestSchemaCommands.java index 89cc72bcdbf..190e866355b 100644 --- a/clients/cli/src/test/java/org/apache/gravitino/cli/TestSchemaCommands.java +++ b/clients/cli/src/test/java/org/apache/gravitino/cli/TestSchemaCommands.java @@ -19,12 +19,17 @@ package org.apache.gravitino.cli; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import java.io.ByteArrayOutputStream; +import java.io.PrintStream; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.Options; import org.apache.gravitino.cli.commands.CreateSchema; @@ -35,6 +40,7 @@ import org.apache.gravitino.cli.commands.SchemaAudit; import org.apache.gravitino.cli.commands.SchemaDetails; import org.apache.gravitino.cli.commands.SetSchemaProperty; +import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -42,10 +48,28 @@ class TestSchemaCommands { private CommandLine mockCommandLine; private Options mockOptions; + private final ByteArrayOutputStream outContent = new ByteArrayOutputStream(); + private final ByteArrayOutputStream errContent = new ByteArrayOutputStream(); + private final PrintStream originalOut = System.out; + private final PrintStream originalErr = System.err; + @BeforeEach void setUp() { mockCommandLine = mock(CommandLine.class); mockOptions = mock(Options.class); + System.setOut(new PrintStream(outContent)); + System.setErr(new PrintStream(errContent)); + } + + @AfterEach + void restoreExitFlg() { + Main.useExit = true; + } + + @AfterEach + public void restoreStreams() { + System.setOut(originalOut); + System.setErr(originalErr); } @Test @@ -245,4 +269,67 @@ void testListSchemaPropertiesCommand() { commandLine.handleCommandLine(); verify(mockListProperties).handle(); } + + @Test + @SuppressWarnings("DefaultCharset") + void testListSchemaWithoutCatalog() { + Main.useExit = false; + when(mockCommandLine.hasOption(GravitinoOptions.METALAKE)).thenReturn(true); + when(mockCommandLine.getOptionValue(GravitinoOptions.METALAKE)).thenReturn("metalake_demo"); + when(mockCommandLine.hasOption(GravitinoOptions.NAME)).thenReturn(false); + + GravitinoCommandLine commandLine = + spy( + new GravitinoCommandLine( + mockCommandLine, mockOptions, CommandEntities.SCHEMA, CommandActions.LIST)); + + assertThrows(RuntimeException.class, commandLine::handleCommandLine); + verify(commandLine, never()) + .newListSchema(GravitinoCommandLine.DEFAULT_URL, false, "metalake_demo", null); + assertTrue( + errContent.toString().contains("Missing required argument(s): " + CommandEntities.CATALOG)); + } + + @Test + @SuppressWarnings("DefaultCharset") + void testDetailsSchemaWithoutCatalog() { + Main.useExit = false; + when(mockCommandLine.hasOption(GravitinoOptions.METALAKE)).thenReturn(true); + when(mockCommandLine.getOptionValue(GravitinoOptions.METALAKE)).thenReturn("metalake_demo"); + when(mockCommandLine.hasOption(GravitinoOptions.NAME)).thenReturn(false); + + GravitinoCommandLine commandLine = + spy( + new GravitinoCommandLine( + mockCommandLine, mockOptions, CommandEntities.SCHEMA, CommandActions.DETAILS)); + + assertThrows(RuntimeException.class, commandLine::handleCommandLine); + assertTrue( + errContent + .toString() + .contains( + "Missing required argument(s): " + + CommandEntities.CATALOG + + ", " + + CommandEntities.SCHEMA)); + } + + @Test + @SuppressWarnings("DefaultCharset") + void testDetailsSchemaWithoutSchema() { + Main.useExit = false; + when(mockCommandLine.hasOption(GravitinoOptions.METALAKE)).thenReturn(true); + when(mockCommandLine.getOptionValue(GravitinoOptions.METALAKE)).thenReturn("metalake_demo"); + when(mockCommandLine.hasOption(GravitinoOptions.NAME)).thenReturn(true); + when(mockCommandLine.getOptionValue(GravitinoOptions.NAME)).thenReturn("catalog"); + + GravitinoCommandLine commandLine = + spy( + new GravitinoCommandLine( + mockCommandLine, mockOptions, CommandEntities.SCHEMA, CommandActions.DETAILS)); + + assertThrows(RuntimeException.class, commandLine::handleCommandLine); + assertTrue( + errContent.toString().contains("Missing required argument(s): " + CommandEntities.SCHEMA)); + } } diff --git a/clients/cli/src/test/java/org/apache/gravitino/cli/TestTableCommands.java b/clients/cli/src/test/java/org/apache/gravitino/cli/TestTableCommands.java index 07cbdbdcc6c..32c289cfd85 100644 --- a/clients/cli/src/test/java/org/apache/gravitino/cli/TestTableCommands.java +++ b/clients/cli/src/test/java/org/apache/gravitino/cli/TestTableCommands.java @@ -19,12 +19,17 @@ package org.apache.gravitino.cli; +import static org.junit.Assert.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import java.io.ByteArrayOutputStream; +import java.io.PrintStream; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.Options; import org.apache.gravitino.cli.commands.CreateTable; @@ -41,6 +46,7 @@ import org.apache.gravitino.cli.commands.TableSortOrder; import org.apache.gravitino.cli.commands.UpdateTableComment; import org.apache.gravitino.cli.commands.UpdateTableName; +import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -48,10 +54,28 @@ class TestTableCommands { private CommandLine mockCommandLine; private Options mockOptions; + private final ByteArrayOutputStream outContent = new ByteArrayOutputStream(); + private final ByteArrayOutputStream errContent = new ByteArrayOutputStream(); + private final PrintStream originalOut = System.out; + private final PrintStream originalErr = System.err; + @BeforeEach void setUp() { mockCommandLine = mock(CommandLine.class); mockOptions = mock(Options.class); + System.setOut(new PrintStream(outContent)); + System.setErr(new PrintStream(errContent)); + } + + @AfterEach + void restoreExitFlg() { + Main.useExit = true; + } + + @AfterEach + public void restoreStreams() { + System.setOut(originalOut); + System.setErr(originalErr); } @Test @@ -410,4 +434,127 @@ void testCreateTable() { commandLine.handleCommandLine(); verify(mockCreate).handle(); } + + @Test + @SuppressWarnings("DefaultCharset") + void testListTableWithoutCatalog() { + Main.useExit = false; + when(mockCommandLine.hasOption(GravitinoOptions.METALAKE)).thenReturn(true); + when(mockCommandLine.getOptionValue(CommandEntities.METALAKE)).thenReturn("metalake_demo"); + when(mockCommandLine.hasOption(GravitinoOptions.NAME)).thenReturn(false); + + GravitinoCommandLine commandLine = + spy( + new GravitinoCommandLine( + mockCommandLine, mockOptions, CommandEntities.TABLE, CommandActions.LIST)); + + assertThrows(RuntimeException.class, commandLine::handleCommandLine); + verify(commandLine, never()) + .newListTables(GravitinoCommandLine.DEFAULT_URL, false, "metalake_demo", null, null); + assertTrue( + errContent + .toString() + .contains( + "Missing required argument(s): " + + CommandEntities.CATALOG + + ", " + + CommandEntities.SCHEMA)); + } + + @Test + @SuppressWarnings("DefaultCharset") + void testListTableWithoutSchema() { + Main.useExit = false; + when(mockCommandLine.hasOption(GravitinoOptions.METALAKE)).thenReturn(true); + when(mockCommandLine.getOptionValue(CommandEntities.METALAKE)).thenReturn("metalake_demo"); + when(mockCommandLine.hasOption(GravitinoOptions.NAME)).thenReturn(true); + when(mockCommandLine.getOptionValue(GravitinoOptions.NAME)).thenReturn("catalog"); + + GravitinoCommandLine commandLine = + spy( + new GravitinoCommandLine( + mockCommandLine, mockOptions, CommandEntities.TABLE, CommandActions.LIST)); + + assertThrows(RuntimeException.class, commandLine::handleCommandLine); + verify(commandLine, never()) + .newListTables(GravitinoCommandLine.DEFAULT_URL, false, "metalake_demo", "catalog", null); + assertTrue( + errContent.toString().contains("Missing required argument(s): " + CommandEntities.SCHEMA)); + } + + @Test + @SuppressWarnings("DefaultCharset") + void testDetailTableWithoutCatalog() { + Main.useExit = false; + when(mockCommandLine.hasOption(GravitinoOptions.METALAKE)).thenReturn(true); + when(mockCommandLine.getOptionValue(CommandEntities.METALAKE)).thenReturn("metalake_demo"); + + GravitinoCommandLine commandLine = + spy( + new GravitinoCommandLine( + mockCommandLine, mockOptions, CommandEntities.TABLE, CommandActions.DETAILS)); + + assertThrows(RuntimeException.class, commandLine::handleCommandLine); + verify(commandLine, never()) + .newTableDetails( + GravitinoCommandLine.DEFAULT_URL, false, "metalake_demo", null, null, null); + assertTrue( + errContent + .toString() + .contains( + "Missing required argument(s): " + + CommandEntities.CATALOG + + ", " + + CommandEntities.SCHEMA + + ", " + + CommandEntities.TABLE)); + } + + @Test + @SuppressWarnings("DefaultCharset") + void testDetailTableWithoutSchema() { + Main.useExit = false; + when(mockCommandLine.hasOption(GravitinoOptions.METALAKE)).thenReturn(true); + when(mockCommandLine.getOptionValue(CommandEntities.METALAKE)).thenReturn("metalake_demo"); + when(mockCommandLine.hasOption(GravitinoOptions.NAME)).thenReturn(true); + when(mockCommandLine.getOptionValue(GravitinoOptions.NAME)).thenReturn("catalog"); + GravitinoCommandLine commandLine = + spy( + new GravitinoCommandLine( + mockCommandLine, mockOptions, CommandEntities.TABLE, CommandActions.DETAILS)); + assertThrows(RuntimeException.class, commandLine::handleCommandLine); + verify(commandLine, never()) + .newTableDetails( + GravitinoCommandLine.DEFAULT_URL, false, "metalake_demo", "catalog", null, null); + assertTrue( + errContent + .toString() + .contains( + "Missing required argument(s): " + + CommandEntities.SCHEMA + + ", " + + CommandEntities.TABLE)); + } + + @Test + @SuppressWarnings("DefaultCharset") + void testDetailTableWithoutTable() { + Main.useExit = false; + when(mockCommandLine.hasOption(GravitinoOptions.METALAKE)).thenReturn(true); + when(mockCommandLine.getOptionValue(CommandEntities.METALAKE)).thenReturn("metalake_demo"); + when(mockCommandLine.hasOption(GravitinoOptions.NAME)).thenReturn(true); + when(mockCommandLine.getOptionValue(GravitinoOptions.NAME)).thenReturn("catalog.schema"); + + GravitinoCommandLine commandLine = + spy( + new GravitinoCommandLine( + mockCommandLine, mockOptions, CommandEntities.TABLE, CommandActions.DETAILS)); + + assertThrows(RuntimeException.class, commandLine::handleCommandLine); + verify(commandLine, never()) + .newTableDetails( + GravitinoCommandLine.DEFAULT_URL, false, "metalake_demo", "catalog", "schema", null); + assertTrue( + errContent.toString().contains("Missing required argument(s): " + CommandEntities.TABLE)); + } } diff --git a/clients/client-python/gravitino/api/credential/adls_token_credential.py b/clients/client-python/gravitino/api/credential/adls_token_credential.py new file mode 100644 index 00000000000..40ad0eebbd9 --- /dev/null +++ b/clients/client-python/gravitino/api/credential/adls_token_credential.py @@ -0,0 +1,90 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from abc import ABC +from typing import Dict + +from gravitino.api.credential.credential import Credential +from gravitino.utils.precondition import Precondition + + +class ADLSTokenCredential(Credential, ABC): + """Represents ADLS token credential.""" + + ADLS_SAS_TOKEN_CREDENTIAL_TYPE: str = "adls-sas-token" + ADLS_DOMAIN: str = "dfs.core.windows.net" + _STORAGE_ACCOUNT_NAME: str = "azure-storage-account-name" + _SAS_TOKEN: str = "adls-sas-token" + + def __init__(self, credential_info: Dict[str, str], expire_time_in_ms: int): + self._account_name = credential_info.get(self._STORAGE_ACCOUNT_NAME, None) + self._sas_token = credential_info.get(self._SAS_TOKEN, None) + self._expire_time_in_ms = expire_time_in_ms + Precondition.check_string_not_empty( + self._account_name, "The ADLS account name should not be empty." + ) + Precondition.check_string_not_empty( + self._sas_token, "The ADLS SAS token should not be empty." + ) + Precondition.check_argument( + self._expire_time_in_ms > 0, + "The expiration time of ADLS token credential should be greater than 0", + ) + + def credential_type(self) -> str: + """The type of the credential. + + Returns: + the type of the credential. + """ + return self.ADLS_SAS_TOKEN_CREDENTIAL_TYPE + + def expire_time_in_ms(self) -> int: + """Returns the expiration time of the credential in milliseconds since + the epoch, 0 means it will never expire. + + Returns: + The expiration time of the credential. + """ + return self._expire_time_in_ms + + def credential_info(self) -> Dict[str, str]: + """The credential information. + + Returns: + The credential information. + """ + return { + self._STORAGE_ACCOUNT_NAME: self._account_name, + self._SAS_TOKEN: self._sas_token, + } + + def account_name(self) -> str: + """The ADLS account name. + + Returns: + The ADLS account name. + """ + return self._account_name + + def sas_token(self) -> str: + """The ADLS sas token. + + Returns: + The ADLS sas token. + """ + return self._sas_token diff --git a/clients/client-python/gravitino/api/credential/azure_account_key_credential.py b/clients/client-python/gravitino/api/credential/azure_account_key_credential.py new file mode 100644 index 00000000000..aa60e301548 --- /dev/null +++ b/clients/client-python/gravitino/api/credential/azure_account_key_credential.py @@ -0,0 +1,88 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from abc import ABC +from typing import Dict + +from gravitino.api.credential.credential import Credential +from gravitino.utils.precondition import Precondition + + +class AzureAccountKeyCredential(Credential, ABC): + """Represents Azure account key credential.""" + + AZURE_ACCOUNT_KEY_CREDENTIAL_TYPE: str = "azure-account-key" + _STORAGE_ACCOUNT_NAME: str = "azure-storage-account-name" + _STORAGE_ACCOUNT_KEY: str = "azure-storage-account-key" + + def __init__(self, credential_info: Dict[str, str], expire_time_in_ms: int): + self._account_name = credential_info.get(self._STORAGE_ACCOUNT_NAME, None) + self._account_key = credential_info.get(self._STORAGE_ACCOUNT_KEY, None) + Precondition.check_string_not_empty( + self._account_name, "The Azure account name should not be empty" + ) + Precondition.check_string_not_empty( + self._account_key, "The Azure account key should not be empty" + ) + Precondition.check_argument( + expire_time_in_ms == 0, + "The expiration time of Azure account key credential should be 0", + ) + + def credential_type(self) -> str: + """Returns the type of the credential. + + Returns: + The type of the credential. + """ + return self.AZURE_ACCOUNT_KEY_CREDENTIAL_TYPE + + def expire_time_in_ms(self) -> int: + """Returns the expiration time of the credential in milliseconds since + the epoch, 0 means it will never expire. + + Returns: + The expiration time of the credential. + """ + return 0 + + def credential_info(self) -> Dict[str, str]: + """The credential information. + + Returns: + The credential information. + """ + return { + self._STORAGE_ACCOUNT_NAME: self._account_name, + self._STORAGE_ACCOUNT_KEY: self._account_key, + } + + def account_name(self) -> str: + """The Azure account name. + + Returns: + The Azure account name. + """ + return self._account_name + + def account_key(self) -> str: + """The Azure account key. + + Returns: + The Azure account key. + """ + return self._account_key diff --git a/clients/client-python/gravitino/api/credential/gcs_token_credential.py b/clients/client-python/gravitino/api/credential/gcs_token_credential.py index 1362383f0bb..0221ac07ca9 100644 --- a/clients/client-python/gravitino/api/credential/gcs_token_credential.py +++ b/clients/client-python/gravitino/api/credential/gcs_token_credential.py @@ -31,7 +31,7 @@ class GCSTokenCredential(Credential, ABC): _expire_time_in_ms: int = 0 def __init__(self, credential_info: Dict[str, str], expire_time_in_ms: int): - self._token = credential_info[self._GCS_TOKEN_NAME] + self._token = credential_info.get(self._GCS_TOKEN_NAME, None) self._expire_time_in_ms = expire_time_in_ms Precondition.check_string_not_empty( self._token, "GCS token should not be empty" diff --git a/clients/client-python/gravitino/api/credential/oss_secret_key_credential.py b/clients/client-python/gravitino/api/credential/oss_secret_key_credential.py index 919a3782ef9..69a9646490e 100644 --- a/clients/client-python/gravitino/api/credential/oss_secret_key_credential.py +++ b/clients/client-python/gravitino/api/credential/oss_secret_key_credential.py @@ -26,14 +26,14 @@ class OSSSecretKeyCredential(Credential, ABC): """Represents OSS secret key credential.""" OSS_SECRET_KEY_CREDENTIAL_TYPE: str = "oss-secret-key" - _GRAVITINO_OSS_STATIC_ACCESS_KEY_ID: str = "oss-access-key-id" - _GRAVITINO_OSS_STATIC_SECRET_ACCESS_KEY: str = "oss-secret-access-key" + _STATIC_ACCESS_KEY_ID: str = "oss-access-key-id" + _STATIC_SECRET_ACCESS_KEY: str = "oss-secret-access-key" def __init__(self, credential_info: Dict[str, str], expire_time_in_ms: int): - self._access_key_id = credential_info[self._GRAVITINO_OSS_STATIC_ACCESS_KEY_ID] - self._secret_access_key = credential_info[ - self._GRAVITINO_OSS_STATIC_SECRET_ACCESS_KEY - ] + self._access_key_id = credential_info.get(self._STATIC_ACCESS_KEY_ID, None) + self._secret_access_key = credential_info.get( + self._STATIC_SECRET_ACCESS_KEY, None + ) Precondition.check_string_not_empty( self._access_key_id, "The OSS access key ID should not be empty" ) @@ -69,8 +69,8 @@ def credential_info(self) -> Dict[str, str]: The credential information. """ return { - self._GRAVITINO_OSS_STATIC_SECRET_ACCESS_KEY: self._secret_access_key, - self._GRAVITINO_OSS_STATIC_ACCESS_KEY_ID: self._access_key_id, + self._STATIC_ACCESS_KEY_ID: self._access_key_id, + self._STATIC_SECRET_ACCESS_KEY: self._secret_access_key, } def access_key_id(self) -> str: diff --git a/clients/client-python/gravitino/api/credential/oss_token_credential.py b/clients/client-python/gravitino/api/credential/oss_token_credential.py index 70dad14a1aa..d217ad8c896 100644 --- a/clients/client-python/gravitino/api/credential/oss_token_credential.py +++ b/clients/client-python/gravitino/api/credential/oss_token_credential.py @@ -26,16 +26,16 @@ class OSSTokenCredential(Credential, ABC): """Represents OSS token credential.""" OSS_TOKEN_CREDENTIAL_TYPE: str = "oss-token" - _GRAVITINO_OSS_SESSION_ACCESS_KEY_ID: str = "oss-access-key-id" - _GRAVITINO_OSS_SESSION_SECRET_ACCESS_KEY: str = "oss-secret-access-key" - _GRAVITINO_OSS_TOKEN: str = "oss-security-token" + _STATIC_ACCESS_KEY_ID: str = "oss-access-key-id" + _STATIC_SECRET_ACCESS_KEY: str = "oss-secret-access-key" + _OSS_TOKEN: str = "oss-security-token" def __init__(self, credential_info: Dict[str, str], expire_time_in_ms: int): - self._access_key_id = credential_info[self._GRAVITINO_OSS_SESSION_ACCESS_KEY_ID] - self._secret_access_key = credential_info[ - self._GRAVITINO_OSS_SESSION_SECRET_ACCESS_KEY - ] - self._security_token = credential_info[self._GRAVITINO_OSS_TOKEN] + self._access_key_id = credential_info.get(self._STATIC_ACCESS_KEY_ID, None) + self._secret_access_key = credential_info.get( + self._STATIC_SECRET_ACCESS_KEY, None + ) + self._security_token = credential_info.get(self._OSS_TOKEN, None) self._expire_time_in_ms = expire_time_in_ms Precondition.check_string_not_empty( self._access_key_id, "The OSS access key ID should not be empty" @@ -75,9 +75,9 @@ def credential_info(self) -> Dict[str, str]: The credential information. """ return { - self._GRAVITINO_OSS_TOKEN: self._security_token, - self._GRAVITINO_OSS_SESSION_ACCESS_KEY_ID: self._access_key_id, - self._GRAVITINO_OSS_SESSION_SECRET_ACCESS_KEY: self._secret_access_key, + self._STATIC_ACCESS_KEY_ID: self._access_key_id, + self._STATIC_SECRET_ACCESS_KEY: self._secret_access_key, + self._OSS_TOKEN: self._security_token, } def access_key_id(self) -> str: diff --git a/clients/client-python/gravitino/api/credential/s3_secret_key_credential.py b/clients/client-python/gravitino/api/credential/s3_secret_key_credential.py index 735c41e2ee0..05c221fe2a8 100644 --- a/clients/client-python/gravitino/api/credential/s3_secret_key_credential.py +++ b/clients/client-python/gravitino/api/credential/s3_secret_key_credential.py @@ -26,14 +26,14 @@ class S3SecretKeyCredential(Credential, ABC): """Represents S3 secret key credential.""" S3_SECRET_KEY_CREDENTIAL_TYPE: str = "s3-secret-key" - _GRAVITINO_S3_STATIC_ACCESS_KEY_ID: str = "s3-access-key-id" - _GRAVITINO_S3_STATIC_SECRET_ACCESS_KEY: str = "s3-secret-access-key" + _STATIC_ACCESS_KEY_ID: str = "s3-access-key-id" + _STATIC_SECRET_ACCESS_KEY: str = "s3-secret-access-key" def __init__(self, credential_info: Dict[str, str], expire_time: int): - self._access_key_id = credential_info[self._GRAVITINO_S3_STATIC_ACCESS_KEY_ID] - self._secret_access_key = credential_info[ - self._GRAVITINO_S3_STATIC_SECRET_ACCESS_KEY - ] + self._access_key_id = credential_info.get(self._STATIC_ACCESS_KEY_ID, None) + self._secret_access_key = credential_info.get( + self._STATIC_SECRET_ACCESS_KEY, None + ) Precondition.check_string_not_empty( self._access_key_id, "S3 access key id should not be empty" ) @@ -70,8 +70,8 @@ def credential_info(self) -> Dict[str, str]: The credential information. """ return { - self._GRAVITINO_S3_STATIC_SECRET_ACCESS_KEY: self._secret_access_key, - self._GRAVITINO_S3_STATIC_ACCESS_KEY_ID: self._access_key_id, + self._STATIC_ACCESS_KEY_ID: self._access_key_id, + self._STATIC_SECRET_ACCESS_KEY: self._secret_access_key, } def access_key_id(self) -> str: diff --git a/clients/client-python/gravitino/api/credential/s3_token_credential.py b/clients/client-python/gravitino/api/credential/s3_token_credential.py index c72d9f02a7d..d95919f6628 100644 --- a/clients/client-python/gravitino/api/credential/s3_token_credential.py +++ b/clients/client-python/gravitino/api/credential/s3_token_credential.py @@ -26,9 +26,9 @@ class S3TokenCredential(Credential, ABC): """Represents the S3 token credential.""" S3_TOKEN_CREDENTIAL_TYPE: str = "s3-token" - _GRAVITINO_S3_SESSION_ACCESS_KEY_ID: str = "s3-access-key-id" - _GRAVITINO_S3_SESSION_SECRET_ACCESS_KEY: str = "s3-secret-access-key" - _GRAVITINO_S3_TOKEN: str = "s3-session-token" + _SESSION_ACCESS_KEY_ID: str = "s3-access-key-id" + _SESSION_SECRET_ACCESS_KEY: str = "s3-secret-access-key" + _SESSION_TOKEN: str = "s3-session-token" _expire_time_in_ms: int = 0 _access_key_id: str = None @@ -36,11 +36,11 @@ class S3TokenCredential(Credential, ABC): _session_token: str = None def __init__(self, credential_info: Dict[str, str], expire_time_in_ms: int): - self._access_key_id = credential_info[self._GRAVITINO_S3_SESSION_ACCESS_KEY_ID] - self._secret_access_key = credential_info[ - self._GRAVITINO_S3_SESSION_SECRET_ACCESS_KEY - ] - self._session_token = credential_info[self._GRAVITINO_S3_TOKEN] + self._access_key_id = credential_info.get(self._SESSION_ACCESS_KEY_ID, None) + self._secret_access_key = credential_info.get( + self._SESSION_SECRET_ACCESS_KEY, None + ) + self._session_token = credential_info.get(self._SESSION_TOKEN, None) self._expire_time_in_ms = expire_time_in_ms Precondition.check_string_not_empty( self._access_key_id, "The S3 access key ID should not be empty" @@ -80,9 +80,9 @@ def credential_info(self) -> Dict[str, str]: The credential information. """ return { - self._GRAVITINO_S3_TOKEN: self._session_token, - self._GRAVITINO_S3_SESSION_ACCESS_KEY_ID: self._access_key_id, - self._GRAVITINO_S3_SESSION_SECRET_ACCESS_KEY: self._secret_access_key, + self._SESSION_ACCESS_KEY_ID: self._access_key_id, + self._SESSION_SECRET_ACCESS_KEY: self._secret_access_key, + self._SESSION_TOKEN: self._session_token, } def access_key_id(self) -> str: diff --git a/clients/client-python/gravitino/utils/credential_factory.py b/clients/client-python/gravitino/utils/credential_factory.py index 7a584caa3e6..32d7465b806 100644 --- a/clients/client-python/gravitino/utils/credential_factory.py +++ b/clients/client-python/gravitino/utils/credential_factory.py @@ -16,12 +16,17 @@ # under the License. from typing import Dict + from gravitino.api.credential.credential import Credential from gravitino.api.credential.gcs_token_credential import GCSTokenCredential from gravitino.api.credential.oss_token_credential import OSSTokenCredential from gravitino.api.credential.s3_secret_key_credential import S3SecretKeyCredential from gravitino.api.credential.s3_token_credential import S3TokenCredential from gravitino.api.credential.oss_secret_key_credential import OSSSecretKeyCredential +from gravitino.api.credential.adls_token_credential import ADLSTokenCredential +from gravitino.api.credential.azure_account_key_credential import ( + AzureAccountKeyCredential, +) class CredentialFactory: @@ -29,14 +34,28 @@ class CredentialFactory: def create( credential_type: str, credential_info: Dict[str, str], expire_time_in_ms: int ) -> Credential: + credential = None + if credential_type == S3TokenCredential.S3_TOKEN_CREDENTIAL_TYPE: - return S3TokenCredential(credential_info, expire_time_in_ms) - if credential_type == S3SecretKeyCredential.S3_SECRET_KEY_CREDENTIAL_TYPE: - return S3SecretKeyCredential(credential_info, expire_time_in_ms) - if credential_type == GCSTokenCredential.GCS_TOKEN_CREDENTIAL_TYPE: - return GCSTokenCredential(credential_info, expire_time_in_ms) - if credential_type == OSSTokenCredential.OSS_TOKEN_CREDENTIAL_TYPE: - return OSSTokenCredential(credential_info, expire_time_in_ms) - if credential_type == OSSSecretKeyCredential.OSS_SECRET_KEY_CREDENTIAL_TYPE: - return OSSSecretKeyCredential(credential_info, expire_time_in_ms) - raise NotImplementedError(f"Credential type {credential_type} is not supported") + credential = S3TokenCredential(credential_info, expire_time_in_ms) + elif credential_type == S3SecretKeyCredential.S3_SECRET_KEY_CREDENTIAL_TYPE: + credential = S3SecretKeyCredential(credential_info, expire_time_in_ms) + elif credential_type == GCSTokenCredential.GCS_TOKEN_CREDENTIAL_TYPE: + credential = GCSTokenCredential(credential_info, expire_time_in_ms) + elif credential_type == OSSTokenCredential.OSS_TOKEN_CREDENTIAL_TYPE: + credential = OSSTokenCredential(credential_info, expire_time_in_ms) + elif credential_type == OSSSecretKeyCredential.OSS_SECRET_KEY_CREDENTIAL_TYPE: + credential = OSSSecretKeyCredential(credential_info, expire_time_in_ms) + elif credential_type == ADLSTokenCredential.ADLS_SAS_TOKEN_CREDENTIAL_TYPE: + credential = ADLSTokenCredential(credential_info, expire_time_in_ms) + elif ( + credential_type + == AzureAccountKeyCredential.AZURE_ACCOUNT_KEY_CREDENTIAL_TYPE + ): + credential = AzureAccountKeyCredential(credential_info, expire_time_in_ms) + else: + raise NotImplementedError( + f"Credential type {credential_type} is not supported" + ) + + return credential diff --git a/clients/client-python/tests/unittests/test_credential_factory.py b/clients/client-python/tests/unittests/test_credential_factory.py index 94fd02d1df2..4c4a91495a1 100644 --- a/clients/client-python/tests/unittests/test_credential_factory.py +++ b/clients/client-python/tests/unittests/test_credential_factory.py @@ -25,15 +25,19 @@ from gravitino.api.credential.s3_token_credential import S3TokenCredential from gravitino.utils.credential_factory import CredentialFactory from gravitino.api.credential.oss_secret_key_credential import OSSSecretKeyCredential +from gravitino.api.credential.adls_token_credential import ADLSTokenCredential +from gravitino.api.credential.azure_account_key_credential import ( + AzureAccountKeyCredential, +) class TestCredentialFactory(unittest.TestCase): def test_s3_token_credential(self): s3_credential_info = { - S3TokenCredential._GRAVITINO_S3_SESSION_ACCESS_KEY_ID: "access_key", - S3TokenCredential._GRAVITINO_S3_SESSION_SECRET_ACCESS_KEY: "secret_key", - S3TokenCredential._GRAVITINO_S3_TOKEN: "session_token", + S3TokenCredential._SESSION_ACCESS_KEY_ID: "access_key", + S3TokenCredential._SESSION_SECRET_ACCESS_KEY: "secret_key", + S3TokenCredential._SESSION_TOKEN: "session_token", } s3_credential = S3TokenCredential(s3_credential_info, 1000) credential_info = s3_credential.credential_info() @@ -42,6 +46,12 @@ def test_s3_token_credential(self): check_credential = CredentialFactory.create( s3_credential.S3_TOKEN_CREDENTIAL_TYPE, credential_info, expire_time ) + self.assertEqual( + S3TokenCredential.S3_TOKEN_CREDENTIAL_TYPE, + check_credential.credential_type(), + ) + + self.assertIsInstance(check_credential, S3TokenCredential) self.assertEqual("access_key", check_credential.access_key_id()) self.assertEqual("secret_key", check_credential.secret_access_key()) self.assertEqual("session_token", check_credential.session_token()) @@ -49,8 +59,8 @@ def test_s3_token_credential(self): def test_s3_secret_key_credential(self): s3_credential_info = { - S3SecretKeyCredential._GRAVITINO_S3_STATIC_ACCESS_KEY_ID: "access_key", - S3SecretKeyCredential._GRAVITINO_S3_STATIC_SECRET_ACCESS_KEY: "secret_key", + S3SecretKeyCredential._STATIC_ACCESS_KEY_ID: "access_key", + S3SecretKeyCredential._STATIC_SECRET_ACCESS_KEY: "secret_key", } s3_credential = S3SecretKeyCredential(s3_credential_info, 0) credential_info = s3_credential.credential_info() @@ -59,43 +69,53 @@ def test_s3_secret_key_credential(self): check_credential = CredentialFactory.create( s3_credential.S3_SECRET_KEY_CREDENTIAL_TYPE, credential_info, expire_time ) + self.assertEqual( + S3SecretKeyCredential.S3_SECRET_KEY_CREDENTIAL_TYPE, + check_credential.credential_type(), + ) + + self.assertIsInstance(check_credential, S3SecretKeyCredential) self.assertEqual("access_key", check_credential.access_key_id()) self.assertEqual("secret_key", check_credential.secret_access_key()) self.assertEqual(0, check_credential.expire_time_in_ms()) def test_gcs_token_credential(self): - credential_info = {GCSTokenCredential._GCS_TOKEN_NAME: "token"} - credential = GCSTokenCredential(credential_info, 1000) - credential_info = credential.credential_info() - expire_time = credential.expire_time_in_ms() + gcs_credential_info = {GCSTokenCredential._GCS_TOKEN_NAME: "token"} + gcs_credential = GCSTokenCredential(gcs_credential_info, 1000) + credential_info = gcs_credential.credential_info() + expire_time = gcs_credential.expire_time_in_ms() check_credential = CredentialFactory.create( - credential.credential_type(), credential_info, expire_time + gcs_credential.credential_type(), credential_info, expire_time ) self.assertEqual( GCSTokenCredential.GCS_TOKEN_CREDENTIAL_TYPE, check_credential.credential_type(), ) + + self.assertIsInstance(check_credential, GCSTokenCredential) self.assertEqual("token", check_credential.token()) self.assertEqual(1000, check_credential.expire_time_in_ms()) def test_oss_token_credential(self): - credential_info = { - OSSTokenCredential._GRAVITINO_OSS_TOKEN: "token", - OSSTokenCredential._GRAVITINO_OSS_SESSION_ACCESS_KEY_ID: "access_id", - OSSTokenCredential._GRAVITINO_OSS_SESSION_SECRET_ACCESS_KEY: "secret_key", + oss_credential_info = { + OSSTokenCredential._STATIC_ACCESS_KEY_ID: "access_id", + OSSTokenCredential._STATIC_SECRET_ACCESS_KEY: "secret_key", + OSSTokenCredential._OSS_TOKEN: "token", } - credential = OSSTokenCredential(credential_info, 1000) - credential_info = credential.credential_info() - expire_time = credential.expire_time_in_ms() + oss_credential = OSSTokenCredential(oss_credential_info, 1000) + credential_info = oss_credential.credential_info() + expire_time = oss_credential.expire_time_in_ms() check_credential = CredentialFactory.create( - credential.credential_type(), credential_info, expire_time + oss_credential.credential_type(), credential_info, expire_time ) self.assertEqual( OSSTokenCredential.OSS_TOKEN_CREDENTIAL_TYPE, check_credential.credential_type(), ) + + self.assertIsInstance(check_credential, OSSTokenCredential) self.assertEqual("token", check_credential.security_token()) self.assertEqual("access_id", check_credential.access_key_id()) self.assertEqual("secret_key", check_credential.secret_access_key()) @@ -103,8 +123,8 @@ def test_oss_token_credential(self): def test_oss_secret_key_credential(self): oss_credential_info = { - OSSSecretKeyCredential._GRAVITINO_OSS_STATIC_ACCESS_KEY_ID: "access_key", - OSSSecretKeyCredential._GRAVITINO_OSS_STATIC_SECRET_ACCESS_KEY: "secret_key", + OSSSecretKeyCredential._STATIC_ACCESS_KEY_ID: "access_key", + OSSSecretKeyCredential._STATIC_SECRET_ACCESS_KEY: "secret_key", } oss_credential = OSSSecretKeyCredential(oss_credential_info, 0) credential_info = oss_credential.credential_info() @@ -113,6 +133,56 @@ def test_oss_secret_key_credential(self): check_credential = CredentialFactory.create( oss_credential.OSS_SECRET_KEY_CREDENTIAL_TYPE, credential_info, expire_time ) + self.assertEqual( + OSSSecretKeyCredential.OSS_SECRET_KEY_CREDENTIAL_TYPE, + check_credential.credential_type(), + ) + + self.assertIsInstance(check_credential, OSSSecretKeyCredential) self.assertEqual("access_key", check_credential.access_key_id()) self.assertEqual("secret_key", check_credential.secret_access_key()) self.assertEqual(0, check_credential.expire_time_in_ms()) + + def test_adls_token_credential(self): + adls_credential_info = { + ADLSTokenCredential._STORAGE_ACCOUNT_NAME: "account_name", + ADLSTokenCredential._SAS_TOKEN: "sas_token", + } + adls_credential = ADLSTokenCredential(adls_credential_info, 1000) + credential_info = adls_credential.credential_info() + expire_time = adls_credential.expire_time_in_ms() + + check_credential = CredentialFactory.create( + adls_credential.credential_type(), credential_info, expire_time + ) + self.assertEqual( + ADLSTokenCredential.ADLS_SAS_TOKEN_CREDENTIAL_TYPE, + check_credential.credential_type(), + ) + + self.assertIsInstance(check_credential, ADLSTokenCredential) + self.assertEqual("account_name", check_credential.account_name()) + self.assertEqual("sas_token", check_credential.sas_token()) + self.assertEqual(1000, check_credential.expire_time_in_ms()) + + def test_azure_account_key_credential(self): + azure_credential_info = { + AzureAccountKeyCredential._STORAGE_ACCOUNT_NAME: "account_name", + AzureAccountKeyCredential._STORAGE_ACCOUNT_KEY: "account_key", + } + azure_credential = AzureAccountKeyCredential(azure_credential_info, 0) + credential_info = azure_credential.credential_info() + expire_time = azure_credential.expire_time_in_ms() + + check_credential = CredentialFactory.create( + azure_credential.credential_type(), credential_info, expire_time + ) + self.assertEqual( + AzureAccountKeyCredential.AZURE_ACCOUNT_KEY_CREDENTIAL_TYPE, + check_credential.credential_type(), + ) + + self.assertIsInstance(check_credential, AzureAccountKeyCredential) + self.assertEqual("account_name", check_credential.account_name()) + self.assertEqual("account_key", check_credential.account_key()) + self.assertEqual(0, check_credential.expire_time_in_ms()) diff --git a/common/src/main/java/org/apache/gravitino/credential/CredentialPropertyUtils.java b/common/src/main/java/org/apache/gravitino/credential/CredentialPropertyUtils.java index e1803a6ddf1..d7a3caf067f 100644 --- a/common/src/main/java/org/apache/gravitino/credential/CredentialPropertyUtils.java +++ b/common/src/main/java/org/apache/gravitino/credential/CredentialPropertyUtils.java @@ -33,12 +33,19 @@ public class CredentialPropertyUtils { @VisibleForTesting static final String ICEBERG_S3_SECRET_ACCESS_KEY = "s3.secret-access-key"; @VisibleForTesting static final String ICEBERG_S3_TOKEN = "s3.session-token"; @VisibleForTesting static final String ICEBERG_GCS_TOKEN = "gcs.oauth2.token"; - @VisibleForTesting static final String ICEBERG_ADLS_TOKEN = "adls.sas-token"; @VisibleForTesting static final String ICEBERG_OSS_ACCESS_KEY_ID = "client.access-key-id"; @VisibleForTesting static final String ICEBERG_OSS_ACCESS_KEY_SECRET = "client.access-key-secret"; @VisibleForTesting static final String ICEBERG_OSS_SECURITY_TOKEN = "client.security-token"; + @VisibleForTesting static final String ICEBERG_ADLS_TOKEN = "adls.sas-token"; + + @VisibleForTesting + static final String ICEBERG_ADLS_ACCOUNT_NAME = "adls.auth.shared-key.account.name"; + + @VisibleForTesting + static final String ICEBERG_ADLS_ACCOUNT_KEY = "adls.auth.shared-key.account.key"; + private static Map icebergCredentialPropertyMap = ImmutableMap.of( GCSTokenCredential.GCS_TOKEN_NAME, @@ -54,7 +61,11 @@ public class CredentialPropertyUtils { OSSTokenCredential.GRAVITINO_OSS_SESSION_ACCESS_KEY_ID, ICEBERG_OSS_ACCESS_KEY_ID, OSSTokenCredential.GRAVITINO_OSS_SESSION_SECRET_ACCESS_KEY, - ICEBERG_OSS_ACCESS_KEY_SECRET); + ICEBERG_OSS_ACCESS_KEY_SECRET, + AzureAccountKeyCredential.GRAVITINO_AZURE_STORAGE_ACCOUNT_NAME, + ICEBERG_ADLS_ACCOUNT_NAME, + AzureAccountKeyCredential.GRAVITINO_AZURE_STORAGE_ACCOUNT_KEY, + ICEBERG_ADLS_ACCOUNT_KEY); /** * Transforms a specific credential into a map of Iceberg properties. @@ -63,6 +74,14 @@ public class CredentialPropertyUtils { * @return a map of Iceberg properties derived from the credential */ public static Map toIcebergProperties(Credential credential) { + if (credential instanceof S3TokenCredential + || credential instanceof S3SecretKeyCredential + || credential instanceof OSSTokenCredential + || credential instanceof OSSSecretKeyCredential + || credential instanceof AzureAccountKeyCredential) { + return transformProperties(credential.credentialInfo(), icebergCredentialPropertyMap); + } + if (credential instanceof GCSTokenCredential) { Map icebergGCSCredentialProperties = transformProperties(credential.credentialInfo(), icebergCredentialPropertyMap); @@ -70,12 +89,7 @@ public static Map toIcebergProperties(Credential credential) { "gcs.oauth2.token-expires-at", String.valueOf(credential.expireTimeInMs())); return icebergGCSCredentialProperties; } - if (credential instanceof S3TokenCredential || credential instanceof S3SecretKeyCredential) { - return transformProperties(credential.credentialInfo(), icebergCredentialPropertyMap); - } - if (credential instanceof OSSTokenCredential || credential instanceof OSSSecretKeyCredential) { - return transformProperties(credential.credentialInfo(), icebergCredentialPropertyMap); - } + if (credential instanceof ADLSTokenCredential) { ADLSTokenCredential adlsCredential = (ADLSTokenCredential) credential; String sasTokenKey = @@ -87,6 +101,7 @@ public static Map toIcebergProperties(Credential credential) { icebergADLSCredentialProperties.put(sasTokenKey, adlsCredential.sasToken()); return icebergADLSCredentialProperties; } + return credential.toProperties(); } diff --git a/common/src/test/java/org/apache/gravitino/credential/TestCredentialFactory.java b/common/src/test/java/org/apache/gravitino/credential/TestCredentialFactory.java index 75a669e3887..6291b8857d7 100644 --- a/common/src/test/java/org/apache/gravitino/credential/TestCredentialFactory.java +++ b/common/src/test/java/org/apache/gravitino/credential/TestCredentialFactory.java @@ -165,4 +165,31 @@ void testADLSTokenCredential() { Assertions.assertEquals(sasToken, adlsTokenCredential.sasToken()); Assertions.assertEquals(expireTime, adlsTokenCredential.expireTimeInMs()); } + + @Test + void testAzureAccountKeyCredential() { + String storageAccountName = "storage-account-name"; + String storageAccountKey = "storage-account-key"; + + Map azureAccountKeyCredentialInfo = + ImmutableMap.of( + AzureAccountKeyCredential.GRAVITINO_AZURE_STORAGE_ACCOUNT_NAME, + storageAccountName, + AzureAccountKeyCredential.GRAVITINO_AZURE_STORAGE_ACCOUNT_KEY, + storageAccountKey); + long expireTime = 0; + Credential credential = + CredentialFactory.create( + AzureAccountKeyCredential.AZURE_ACCOUNT_KEY_CREDENTIAL_TYPE, + azureAccountKeyCredentialInfo, + expireTime); + Assertions.assertEquals( + AzureAccountKeyCredential.AZURE_ACCOUNT_KEY_CREDENTIAL_TYPE, credential.credentialType()); + Assertions.assertInstanceOf(AzureAccountKeyCredential.class, credential); + + AzureAccountKeyCredential azureAccountKeyCredential = (AzureAccountKeyCredential) credential; + Assertions.assertEquals(storageAccountName, azureAccountKeyCredential.accountName()); + Assertions.assertEquals(storageAccountKey, azureAccountKeyCredential.accountKey()); + Assertions.assertEquals(expireTime, azureAccountKeyCredential.expireTimeInMs()); + } } diff --git a/core/src/main/java/org/apache/gravitino/authorization/AuthorizationUtils.java b/core/src/main/java/org/apache/gravitino/authorization/AuthorizationUtils.java index 793b478eb6d..0e236b72635 100644 --- a/core/src/main/java/org/apache/gravitino/authorization/AuthorizationUtils.java +++ b/core/src/main/java/org/apache/gravitino/authorization/AuthorizationUtils.java @@ -18,10 +18,12 @@ */ package org.apache.gravitino.authorization; +import com.google.common.collect.Lists; import com.google.common.collect.Sets; import java.util.Collection; import java.util.List; import java.util.Set; +import java.util.function.BiConsumer; import java.util.function.Consumer; import org.apache.gravitino.Catalog; import org.apache.gravitino.Entity; @@ -39,6 +41,7 @@ import org.apache.gravitino.exceptions.NoSuchCatalogException; import org.apache.gravitino.exceptions.NoSuchMetadataObjectException; import org.apache.gravitino.exceptions.NoSuchUserException; +import org.apache.gravitino.meta.RoleEntity; import org.apache.gravitino.utils.MetadataObjectUtil; import org.apache.gravitino.utils.NameIdentifierUtil; @@ -144,8 +147,8 @@ public static void checkRoleNamespace(Namespace namespace) { public static void callAuthorizationPluginForSecurableObjects( String metalake, List securableObjects, - Set catalogsAlreadySet, - Consumer consumer) { + BiConsumer consumer) { + Set catalogsAlreadySet = Sets.newHashSet(); CatalogManager catalogManager = GravitinoEnv.getInstance().catalogManager(); for (SecurableObject securableObject : securableObjects) { if (needApplyAuthorizationPluginAllCatalogs(securableObject)) { @@ -245,40 +248,6 @@ public static void checkPrivilege( } } - private static void checkCatalogType( - NameIdentifier catalogIdent, Catalog.Type type, Privilege privilege) { - Catalog catalog = GravitinoEnv.getInstance().catalogDispatcher().loadCatalog(catalogIdent); - if (catalog.type() != type) { - throw new IllegalPrivilegeException( - "Catalog %s type %s doesn't support privilege %s", - catalogIdent, catalog.type(), privilege); - } - } - - private static boolean needApplyAuthorizationPluginAllCatalogs(MetadataObject.Type type) { - return type == MetadataObject.Type.METALAKE; - } - - private static boolean needApplyAuthorization(MetadataObject.Type type) { - return type != MetadataObject.Type.ROLE && type != MetadataObject.Type.METALAKE; - } - - private static void callAuthorizationPluginImpl( - Consumer consumer, Catalog catalog) { - - if (catalog instanceof BaseCatalog) { - BaseCatalog baseCatalog = (BaseCatalog) catalog; - if (baseCatalog.getAuthorizationPlugin() != null) { - consumer.accept(baseCatalog.getAuthorizationPlugin()); - } - } else { - throw new IllegalArgumentException( - String.format( - "Catalog %s is not a BaseCatalog, we don't support authorization plugin for it", - catalog.type())); - } - } - public static void authorizationPluginRemovePrivileges( NameIdentifier ident, Entity.EntityType type) { // If we enable authorization, we should remove the privileges about the entity in the @@ -305,12 +274,94 @@ public static void authorizationPluginRenamePrivileges( NameIdentifierUtil.toMetadataObject(NameIdentifier.of(ident.namespace(), newName), type); MetadataObjectChange renameObject = MetadataObjectChange.rename(oldMetadataObject, newMetadataObject); + + String metalake = type == Entity.EntityType.METALAKE ? newName : ident.namespace().level(0); + + // For a renamed catalog, we should pass the new name catalog, otherwise we can't find the + // catalog in the entity store callAuthorizationPluginForMetadataObject( - ident.namespace().level(0), - oldMetadataObject, + metalake, + newMetadataObject, authorizationPlugin -> { authorizationPlugin.onMetadataUpdated(renameObject); }); } } + + public static Role filterSecurableObjects( + RoleEntity role, String metalakeName, String catalogName) { + List securableObjects = role.securableObjects(); + List filteredSecurableObjects = Lists.newArrayList(); + for (SecurableObject securableObject : securableObjects) { + NameIdentifier identifier = MetadataObjectUtil.toEntityIdent(metalakeName, securableObject); + if (securableObject.type() == MetadataObject.Type.METALAKE) { + filteredSecurableObjects.add(securableObject); + } else { + NameIdentifier catalogIdent = NameIdentifierUtil.getCatalogIdentifier(identifier); + + if (catalogIdent.name().equals(catalogName)) { + filteredSecurableObjects.add(securableObject); + } + } + } + + return RoleEntity.builder() + .withId(role.id()) + .withName(role.name()) + .withAuditInfo(role.auditInfo()) + .withNamespace(role.namespace()) + .withSecurableObjects(filteredSecurableObjects) + .withProperties(role.properties()) + .build(); + } + + private static boolean needApplyAuthorizationPluginAllCatalogs(MetadataObject.Type type) { + return type == MetadataObject.Type.METALAKE; + } + + private static boolean needApplyAuthorization(MetadataObject.Type type) { + return type != MetadataObject.Type.ROLE && type != MetadataObject.Type.METALAKE; + } + + private static void callAuthorizationPluginImpl( + BiConsumer consumer, Catalog catalog) { + + if (catalog instanceof BaseCatalog) { + BaseCatalog baseCatalog = (BaseCatalog) catalog; + if (baseCatalog.getAuthorizationPlugin() != null) { + consumer.accept(baseCatalog.getAuthorizationPlugin(), catalog.name()); + } + } else { + throw new IllegalArgumentException( + String.format( + "Catalog %s is not a BaseCatalog, we don't support authorization plugin for it", + catalog.type())); + } + } + + private static void callAuthorizationPluginImpl( + Consumer consumer, Catalog catalog) { + + if (catalog instanceof BaseCatalog) { + BaseCatalog baseCatalog = (BaseCatalog) catalog; + if (baseCatalog.getAuthorizationPlugin() != null) { + consumer.accept(baseCatalog.getAuthorizationPlugin()); + } + } else { + throw new IllegalArgumentException( + String.format( + "Catalog %s is not a BaseCatalog, we don't support authorization plugin for it", + catalog.type())); + } + } + + private static void checkCatalogType( + NameIdentifier catalogIdent, Catalog.Type type, Privilege privilege) { + Catalog catalog = GravitinoEnv.getInstance().catalogDispatcher().loadCatalog(catalogIdent); + if (catalog.type() != type) { + throw new IllegalPrivilegeException( + "Catalog %s type %s doesn't support privilege %s", + catalogIdent, catalog.type(), privilege); + } + } } diff --git a/core/src/main/java/org/apache/gravitino/authorization/PermissionManager.java b/core/src/main/java/org/apache/gravitino/authorization/PermissionManager.java index 02c240f30a9..bdaa8f6f74d 100644 --- a/core/src/main/java/org/apache/gravitino/authorization/PermissionManager.java +++ b/core/src/main/java/org/apache/gravitino/authorization/PermissionManager.java @@ -21,6 +21,7 @@ import static org.apache.gravitino.authorization.AuthorizationUtils.GROUP_DOES_NOT_EXIST_MSG; import static org.apache.gravitino.authorization.AuthorizationUtils.ROLE_DOES_NOT_EXIST_MSG; import static org.apache.gravitino.authorization.AuthorizationUtils.USER_DOES_NOT_EXIST_MSG; +import static org.apache.gravitino.authorization.AuthorizationUtils.filterSecurableObjects; import com.google.common.collect.Lists; import java.io.IOException; @@ -115,17 +116,22 @@ User grantRolesToUser(String metalake, List roles, String user) { .build(); }); - Set catalogs = Sets.newHashSet(); + List securableObjects = Lists.newArrayList(); + for (Role grantedRole : roleEntitiesToGrant) { - AuthorizationUtils.callAuthorizationPluginForSecurableObjects( - metalake, - grantedRole.securableObjects(), - catalogs, - authorizationPlugin -> - authorizationPlugin.onGrantedRolesToUser( - Lists.newArrayList(roleEntitiesToGrant), updatedUser)); + securableObjects.addAll(grantedRole.securableObjects()); } + AuthorizationUtils.callAuthorizationPluginForSecurableObjects( + metalake, + securableObjects, + (authorizationPlugin, catalogName) -> + authorizationPlugin.onGrantedRolesToUser( + roleEntitiesToGrant.stream() + .map(roleEntity -> filterSecurableObjects(roleEntity, metalake, catalogName)) + .collect(Collectors.toList()), + updatedUser)); + return updatedUser; } catch (NoSuchEntityException nse) { LOG.warn("Failed to grant, user {} does not exist in the metalake {}", user, metalake, nse); @@ -196,17 +202,22 @@ Group grantRolesToGroup(String metalake, List roles, String group) { .build(); }); - Set catalogs = Sets.newHashSet(); + List securableObjects = Lists.newArrayList(); + for (Role grantedRole : roleEntitiesToGrant) { - AuthorizationUtils.callAuthorizationPluginForSecurableObjects( - metalake, - grantedRole.securableObjects(), - catalogs, - authorizationPlugin -> - authorizationPlugin.onGrantedRolesToGroup( - Lists.newArrayList(roleEntitiesToGrant), updatedGroup)); + securableObjects.addAll(grantedRole.securableObjects()); } + AuthorizationUtils.callAuthorizationPluginForSecurableObjects( + metalake, + securableObjects, + (authorizationPlugin, catalogName) -> + authorizationPlugin.onGrantedRolesToGroup( + roleEntitiesToGrant.stream() + .map(roleEntity -> filterSecurableObjects(roleEntity, metalake, catalogName)) + .collect(Collectors.toList()), + updatedGroup)); + return updatedGroup; } catch (NoSuchEntityException nse) { LOG.warn("Failed to grant, group {} does not exist in the metalake {}", group, metalake, nse); @@ -276,17 +287,21 @@ Group revokeRolesFromGroup(String metalake, List roles, String group) { .build(); }); - Set catalogs = Sets.newHashSet(); + List securableObjects = Lists.newArrayList(); for (Role grantedRole : roleEntitiesToRevoke) { - AuthorizationUtils.callAuthorizationPluginForSecurableObjects( - metalake, - grantedRole.securableObjects(), - catalogs, - authorizationPlugin -> - authorizationPlugin.onRevokedRolesFromGroup( - Lists.newArrayList(roleEntitiesToRevoke), updatedGroup)); + securableObjects.addAll(grantedRole.securableObjects()); } + AuthorizationUtils.callAuthorizationPluginForSecurableObjects( + metalake, + securableObjects, + (authorizationPlugin, catalogName) -> + authorizationPlugin.onRevokedRolesFromGroup( + roleEntitiesToRevoke.stream() + .map(roleEntity -> filterSecurableObjects(roleEntity, metalake, catalogName)) + .collect(Collectors.toList()), + updatedGroup)); + return updatedGroup; } catch (NoSuchEntityException nse) { @@ -358,17 +373,21 @@ User revokeRolesFromUser(String metalake, List roles, String user) { .build(); }); - Set catalogs = Sets.newHashSet(); + List securableObjects = Lists.newArrayList(); for (Role grantedRole : roleEntitiesToRevoke) { - AuthorizationUtils.callAuthorizationPluginForSecurableObjects( - metalake, - grantedRole.securableObjects(), - catalogs, - authorizationPlugin -> - authorizationPlugin.onRevokedRolesFromUser( - Lists.newArrayList(roleEntitiesToRevoke), updatedUser)); + securableObjects.addAll(grantedRole.securableObjects()); } + AuthorizationUtils.callAuthorizationPluginForSecurableObjects( + metalake, + securableObjects, + (authorizationPlugin, catalogName) -> + authorizationPlugin.onRevokedRolesFromUser( + roleEntitiesToRevoke.stream() + .map(roleEntity -> filterSecurableObjects(roleEntity, metalake, catalogName)) + .collect(Collectors.toList()), + updatedUser)); + return updatedUser; } catch (NoSuchEntityException nse) { LOG.warn("Failed to revoke, user {} does not exist in the metalake {}", user, metalake, nse); diff --git a/core/src/main/java/org/apache/gravitino/authorization/RoleManager.java b/core/src/main/java/org/apache/gravitino/authorization/RoleManager.java index 11c24102bca..16e1cdda379 100644 --- a/core/src/main/java/org/apache/gravitino/authorization/RoleManager.java +++ b/core/src/main/java/org/apache/gravitino/authorization/RoleManager.java @@ -21,7 +21,6 @@ import static org.apache.gravitino.metalake.MetalakeManager.checkMetalake; -import com.google.common.collect.Sets; import java.io.IOException; import java.time.Instant; import java.util.List; @@ -87,8 +86,9 @@ RoleEntity createRole( AuthorizationUtils.callAuthorizationPluginForSecurableObjects( metalake, roleEntity.securableObjects(), - Sets.newHashSet(), - authorizationPlugin -> authorizationPlugin.onRoleCreated(roleEntity)); + (authorizationPlugin, catalogName) -> + authorizationPlugin.onRoleCreated( + AuthorizationUtils.filterSecurableObjects(roleEntity, metalake, catalogName))); return roleEntity; } catch (EntityAlreadyExistsException e) { @@ -122,8 +122,9 @@ boolean deleteRole(String metalake, String role) { AuthorizationUtils.callAuthorizationPluginForSecurableObjects( metalake, roleEntity.securableObjects(), - Sets.newHashSet(), - authorizationPlugin -> authorizationPlugin.onRoleDeleted(roleEntity)); + (authorizationPlugin, catalogName) -> + authorizationPlugin.onRoleDeleted( + AuthorizationUtils.filterSecurableObjects(roleEntity, metalake, catalogName))); } catch (NoSuchEntityException nse) { // ignore, because the role may have been deleted. } diff --git a/core/src/main/java/org/apache/gravitino/catalog/CatalogManager.java b/core/src/main/java/org/apache/gravitino/catalog/CatalogManager.java index 2e77b8e162a..43bc74bb2a1 100644 --- a/core/src/main/java/org/apache/gravitino/catalog/CatalogManager.java +++ b/core/src/main/java/org/apache/gravitino/catalog/CatalogManager.java @@ -95,6 +95,7 @@ import org.apache.gravitino.meta.AuditInfo; import org.apache.gravitino.meta.CatalogEntity; import org.apache.gravitino.meta.SchemaEntity; +import org.apache.gravitino.model.ModelCatalog; import org.apache.gravitino.rel.SupportsPartitions; import org.apache.gravitino.rel.Table; import org.apache.gravitino.rel.TableCatalog; @@ -178,6 +179,16 @@ public R doWithTopicOps(ThrowableFunction fn) throws Except }); } + public R doWithModelOps(ThrowableFunction fn) throws Exception { + return classLoader.withClassLoader( + cl -> { + if (asModels() == null) { + throw new UnsupportedOperationException("Catalog does not support model operations"); + } + return fn.apply(asModels()); + }); + } + public R doWithCatalogOps(ThrowableFunction fn) throws Exception { return classLoader.withClassLoader(cl -> fn.apply(catalog.ops())); } @@ -236,6 +247,10 @@ private FilesetCatalog asFilesets() { private TopicCatalog asTopics() { return catalog.ops() instanceof TopicCatalog ? (TopicCatalog) catalog.ops() : null; } + + private ModelCatalog asModels() { + return catalog.ops() instanceof ModelCatalog ? (ModelCatalog) catalog.ops() : null; + } } private final Config config; diff --git a/core/src/main/java/org/apache/gravitino/catalog/EntityCombinedFileset.java b/core/src/main/java/org/apache/gravitino/catalog/EntityCombinedFileset.java index 2a6b55a2ddd..c7b847fc9c6 100644 --- a/core/src/main/java/org/apache/gravitino/catalog/EntityCombinedFileset.java +++ b/core/src/main/java/org/apache/gravitino/catalog/EntityCombinedFileset.java @@ -48,7 +48,7 @@ public static EntityCombinedFileset of(Fileset fileset) { return new EntityCombinedFileset(fileset, null); } - public EntityCombinedFileset withHiddenPropertiesSet(Set hiddenProperties) { + public EntityCombinedFileset withHiddenProperties(Set hiddenProperties) { this.hiddenProperties = hiddenProperties; return this; } diff --git a/core/src/main/java/org/apache/gravitino/catalog/EntityCombinedModel.java b/core/src/main/java/org/apache/gravitino/catalog/EntityCombinedModel.java new file mode 100644 index 00000000000..4aeefa0be59 --- /dev/null +++ b/core/src/main/java/org/apache/gravitino/catalog/EntityCombinedModel.java @@ -0,0 +1,94 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.gravitino.catalog; + +import java.util.Collections; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import org.apache.gravitino.Audit; +import org.apache.gravitino.meta.AuditInfo; +import org.apache.gravitino.meta.ModelEntity; +import org.apache.gravitino.model.Model; + +public final class EntityCombinedModel implements Model { + + private final Model model; + + private final ModelEntity modelEntity; + + private Set hiddenProperties = Collections.emptySet(); + + private EntityCombinedModel(Model model, ModelEntity modelEntity) { + this.model = model; + this.modelEntity = modelEntity; + } + + public static EntityCombinedModel of(Model model, ModelEntity modelEntity) { + return new EntityCombinedModel(model, modelEntity); + } + + public static EntityCombinedModel of(Model model) { + return new EntityCombinedModel(model, null); + } + + public EntityCombinedModel withHiddenProperties(Set hiddenProperties) { + this.hiddenProperties = hiddenProperties; + return this; + } + + @Override + public String name() { + return model.name(); + } + + @Override + public String comment() { + return model.comment(); + } + + @Override + public Map properties() { + return model.properties() == null + ? null + : model.properties().entrySet().stream() + .filter(e -> !hiddenProperties.contains(e.getKey())) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + + @Override + public int latestVersion() { + return model.latestVersion(); + } + + @Override + public Audit auditInfo() { + AuditInfo mergedAudit = + AuditInfo.builder() + .withCreator(model.auditInfo().creator()) + .withCreateTime(model.auditInfo().createTime()) + .withLastModifier(model.auditInfo().lastModifier()) + .withLastModifiedTime(model.auditInfo().lastModifiedTime()) + .build(); + + return modelEntity == null + ? mergedAudit + : mergedAudit.merge(modelEntity.auditInfo(), true /* overwrite */); + } +} diff --git a/core/src/main/java/org/apache/gravitino/catalog/EntityCombinedModelVersion.java b/core/src/main/java/org/apache/gravitino/catalog/EntityCombinedModelVersion.java new file mode 100644 index 00000000000..b41e2889de3 --- /dev/null +++ b/core/src/main/java/org/apache/gravitino/catalog/EntityCombinedModelVersion.java @@ -0,0 +1,101 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.gravitino.catalog; + +import java.util.Collections; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import org.apache.gravitino.Audit; +import org.apache.gravitino.meta.AuditInfo; +import org.apache.gravitino.meta.ModelVersionEntity; +import org.apache.gravitino.model.ModelVersion; + +public final class EntityCombinedModelVersion implements ModelVersion { + + private final ModelVersion modelVersion; + + private final ModelVersionEntity modelVersionEntity; + + private Set hiddenProperties = Collections.emptySet(); + + private EntityCombinedModelVersion( + ModelVersion modelVersion, ModelVersionEntity modelVersionEntity) { + this.modelVersion = modelVersion; + this.modelVersionEntity = modelVersionEntity; + } + + public static EntityCombinedModelVersion of( + ModelVersion modelVersion, ModelVersionEntity modelVersionEntity) { + return new EntityCombinedModelVersion(modelVersion, modelVersionEntity); + } + + public static EntityCombinedModelVersion of(ModelVersion modelVersion) { + return new EntityCombinedModelVersion(modelVersion, null); + } + + public EntityCombinedModelVersion withHiddenProperties(Set hiddenProperties) { + this.hiddenProperties = hiddenProperties; + return this; + } + + @Override + public int version() { + return modelVersion.version(); + } + + @Override + public String comment() { + return modelVersion.comment(); + } + + @Override + public Map properties() { + return modelVersion.properties() == null + ? null + : modelVersion.properties().entrySet().stream() + .filter(e -> !hiddenProperties.contains(e.getKey())) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + + @Override + public String uri() { + return modelVersion.uri(); + } + + @Override + public String[] aliases() { + return modelVersion.aliases(); + } + + @Override + public Audit auditInfo() { + AuditInfo mergedAudit = + AuditInfo.builder() + .withCreator(modelVersion.auditInfo().creator()) + .withCreateTime(modelVersion.auditInfo().createTime()) + .withLastModifier(modelVersion.auditInfo().lastModifier()) + .withLastModifiedTime(modelVersion.auditInfo().lastModifiedTime()) + .build(); + + return modelVersionEntity == null + ? mergedAudit + : mergedAudit.merge(modelVersionEntity.auditInfo(), true /* overwrite */); + } +} diff --git a/core/src/main/java/org/apache/gravitino/catalog/EntityCombinedSchema.java b/core/src/main/java/org/apache/gravitino/catalog/EntityCombinedSchema.java index 79a4b12a10c..ce3d0a3be72 100644 --- a/core/src/main/java/org/apache/gravitino/catalog/EntityCombinedSchema.java +++ b/core/src/main/java/org/apache/gravitino/catalog/EntityCombinedSchema.java @@ -61,7 +61,7 @@ public static EntityCombinedSchema of(Schema schema) { return of(schema, null); } - public EntityCombinedSchema withHiddenPropertiesSet(Set hiddenProperties) { + public EntityCombinedSchema withHiddenProperties(Set hiddenProperties) { this.hiddenProperties = hiddenProperties; return this; } diff --git a/core/src/main/java/org/apache/gravitino/catalog/EntityCombinedTable.java b/core/src/main/java/org/apache/gravitino/catalog/EntityCombinedTable.java index 4b0da1568b9..70cbd0ace4a 100644 --- a/core/src/main/java/org/apache/gravitino/catalog/EntityCombinedTable.java +++ b/core/src/main/java/org/apache/gravitino/catalog/EntityCombinedTable.java @@ -67,7 +67,7 @@ public static EntityCombinedTable of(Table table) { return new EntityCombinedTable(table, null); } - public EntityCombinedTable withHiddenPropertiesSet(Set hiddenProperties) { + public EntityCombinedTable withHiddenProperties(Set hiddenProperties) { this.hiddenProperties = hiddenProperties; return this; } diff --git a/core/src/main/java/org/apache/gravitino/catalog/EntityCombinedTopic.java b/core/src/main/java/org/apache/gravitino/catalog/EntityCombinedTopic.java index 2360f31ae74..972df622b3d 100644 --- a/core/src/main/java/org/apache/gravitino/catalog/EntityCombinedTopic.java +++ b/core/src/main/java/org/apache/gravitino/catalog/EntityCombinedTopic.java @@ -60,7 +60,7 @@ public static EntityCombinedTopic of(Topic topic) { return new EntityCombinedTopic(topic, null); } - public EntityCombinedTopic withHiddenPropertiesSet(Set hiddenProperties) { + public EntityCombinedTopic withHiddenProperties(Set hiddenProperties) { this.hiddenProperties = hiddenProperties; return this; } diff --git a/core/src/main/java/org/apache/gravitino/catalog/FilesetOperationDispatcher.java b/core/src/main/java/org/apache/gravitino/catalog/FilesetOperationDispatcher.java index 98c6311bd7c..828e981380a 100644 --- a/core/src/main/java/org/apache/gravitino/catalog/FilesetOperationDispatcher.java +++ b/core/src/main/java/org/apache/gravitino/catalog/FilesetOperationDispatcher.java @@ -81,7 +81,7 @@ public Fileset loadFileset(NameIdentifier ident) throws NoSuchFilesetException { // Currently we only support maintaining the Fileset in the Gravitino's store. return EntityCombinedFileset.of(fileset) - .withHiddenPropertiesSet( + .withHiddenProperties( getHiddenPropertyNames( catalogIdent, HasPropertyMetadata::filesetPropertiesMetadata, @@ -137,7 +137,7 @@ public Fileset createFileset( NoSuchSchemaException.class, FilesetAlreadyExistsException.class); return EntityCombinedFileset.of(createdFileset) - .withHiddenPropertiesSet( + .withHiddenProperties( getHiddenPropertyNames( catalogIdent, HasPropertyMetadata::filesetPropertiesMetadata, @@ -172,7 +172,7 @@ public Fileset alterFileset(NameIdentifier ident, FilesetChange... changes) NoSuchFilesetException.class, IllegalArgumentException.class); return EntityCombinedFileset.of(alteredFileset) - .withHiddenPropertiesSet( + .withHiddenProperties( getHiddenPropertyNames( catalogIdent, HasPropertyMetadata::filesetPropertiesMetadata, diff --git a/core/src/main/java/org/apache/gravitino/catalog/ModelOperationDispatcher.java b/core/src/main/java/org/apache/gravitino/catalog/ModelOperationDispatcher.java index eb1f17c96da..1c5291d51a2 100644 --- a/core/src/main/java/org/apache/gravitino/catalog/ModelOperationDispatcher.java +++ b/core/src/main/java/org/apache/gravitino/catalog/ModelOperationDispatcher.java @@ -18,15 +18,23 @@ */ package org.apache.gravitino.catalog; +import static org.apache.gravitino.catalog.PropertiesMetadataHelpers.validatePropertyForCreate; +import static org.apache.gravitino.utils.NameIdentifierUtil.getCatalogIdentifier; + import java.util.Map; +import java.util.function.Supplier; import org.apache.gravitino.EntityStore; import org.apache.gravitino.NameIdentifier; import org.apache.gravitino.Namespace; +import org.apache.gravitino.StringIdentifier; +import org.apache.gravitino.connector.HasPropertyMetadata; import org.apache.gravitino.exceptions.ModelAlreadyExistsException; import org.apache.gravitino.exceptions.ModelVersionAliasesAlreadyExistException; import org.apache.gravitino.exceptions.NoSuchModelException; import org.apache.gravitino.exceptions.NoSuchModelVersionException; import org.apache.gravitino.exceptions.NoSuchSchemaException; +import org.apache.gravitino.lock.LockType; +import org.apache.gravitino.lock.TreeLockUtils; import org.apache.gravitino.model.Model; import org.apache.gravitino.model.ModelVersion; import org.apache.gravitino.storage.IdGenerator; @@ -40,40 +48,114 @@ public ModelOperationDispatcher( @Override public NameIdentifier[] listModels(Namespace namespace) throws NoSuchSchemaException { - throw new UnsupportedOperationException("Not implemented"); + return TreeLockUtils.doWithTreeLock( + NameIdentifier.of(namespace.levels()), + LockType.READ, + () -> + doWithCatalog( + getCatalogIdentifier(NameIdentifier.of(namespace.levels())), + c -> c.doWithModelOps(m -> m.listModels(namespace)), + NoSuchSchemaException.class)); } @Override public Model getModel(NameIdentifier ident) throws NoSuchModelException { - throw new UnsupportedOperationException("Not implemented"); + NameIdentifier catalogIdent = getCatalogIdentifier(ident); + Model model = + TreeLockUtils.doWithTreeLock( + ident, + LockType.READ, + () -> + doWithCatalog( + catalogIdent, + c -> c.doWithModelOps(m -> m.getModel(ident)), + NoSuchModelException.class)); + + return EntityCombinedModel.of(model) + .withHiddenProperties( + getHiddenPropertyNames( + catalogIdent, HasPropertyMetadata::modelPropertiesMetadata, model.properties())); } @Override public Model registerModel(NameIdentifier ident, String comment, Map properties) throws NoSuchModelException, ModelAlreadyExistsException { - throw new UnsupportedOperationException("Not implemented"); + NameIdentifier catalogIdent = getCatalogIdentifier(ident); + Map updatedProperties = checkAndUpdateProperties(catalogIdent, properties); + + Model registeredModel = + TreeLockUtils.doWithTreeLock( + NameIdentifier.of(ident.namespace().levels()), + LockType.WRITE, + () -> + doWithCatalog( + catalogIdent, + c -> c.doWithModelOps(m -> m.registerModel(ident, comment, updatedProperties)), + NoSuchSchemaException.class, + ModelAlreadyExistsException.class)); + + return EntityCombinedModel.of(registeredModel) + .withHiddenProperties( + getHiddenPropertyNames( + catalogIdent, + HasPropertyMetadata::modelPropertiesMetadata, + registeredModel.properties())); } @Override public boolean deleteModel(NameIdentifier ident) { - throw new UnsupportedOperationException("Not implemented"); + return TreeLockUtils.doWithTreeLock( + NameIdentifier.of(ident.namespace().levels()), + LockType.WRITE, + () -> + doWithCatalog( + getCatalogIdentifier(ident), + c -> c.doWithModelOps(m -> m.deleteModel(ident)), + RuntimeException.class)); } @Override public int[] listModelVersions(NameIdentifier ident) throws NoSuchModelException { - throw new UnsupportedOperationException("Not implemented"); + return TreeLockUtils.doWithTreeLock( + ident, + LockType.READ, + () -> + doWithCatalog( + getCatalogIdentifier(ident), + c -> c.doWithModelOps(m -> m.listModelVersions(ident)), + NoSuchModelException.class)); } @Override public ModelVersion getModelVersion(NameIdentifier ident, int version) throws NoSuchModelVersionException { - throw new UnsupportedOperationException("Not implemented"); + return internalGetModelVersion( + ident, + () -> + TreeLockUtils.doWithTreeLock( + ident, + LockType.READ, + () -> + doWithCatalog( + getCatalogIdentifier(ident), + c -> c.doWithModelOps(m -> m.getModelVersion(ident, version)), + NoSuchModelVersionException.class))); } @Override public ModelVersion getModelVersion(NameIdentifier ident, String alias) throws NoSuchModelVersionException { - throw new UnsupportedOperationException("Not implemented"); + return internalGetModelVersion( + ident, + () -> + TreeLockUtils.doWithTreeLock( + ident, + LockType.READ, + () -> + doWithCatalog( + getCatalogIdentifier(ident), + c -> c.doWithModelOps(m -> m.getModelVersion(ident, alias)), + NoSuchModelVersionException.class))); } @Override @@ -84,16 +166,80 @@ public void linkModelVersion( String comment, Map properties) throws NoSuchModelException, ModelVersionAliasesAlreadyExistException { - throw new UnsupportedOperationException("Not implemented"); + NameIdentifier catalogIdent = getCatalogIdentifier(ident); + Map updatedProperties = checkAndUpdateProperties(catalogIdent, properties); + + TreeLockUtils.doWithTreeLock( + ident, + LockType.WRITE, + () -> + doWithCatalog( + catalogIdent, + c -> + c.doWithModelOps( + m -> { + m.linkModelVersion(ident, uri, aliases, comment, updatedProperties); + return null; + }), + NoSuchModelException.class, + ModelVersionAliasesAlreadyExistException.class)); } @Override public boolean deleteModelVersion(NameIdentifier ident, int version) { - throw new UnsupportedOperationException("Not implemented"); + return TreeLockUtils.doWithTreeLock( + ident, + LockType.WRITE, + () -> + doWithCatalog( + getCatalogIdentifier(ident), + c -> c.doWithModelOps(m -> m.deleteModelVersion(ident, version)), + RuntimeException.class)); } @Override public boolean deleteModelVersion(NameIdentifier ident, String alias) { - throw new UnsupportedOperationException("Not implemented"); + return TreeLockUtils.doWithTreeLock( + ident, + LockType.WRITE, + () -> + doWithCatalog( + getCatalogIdentifier(ident), + c -> c.doWithModelOps(m -> m.deleteModelVersion(ident, alias)), + RuntimeException.class)); + } + + private ModelVersion internalGetModelVersion( + NameIdentifier ident, Supplier supplier) { + NameIdentifier catalogIdent = getCatalogIdentifier(ident); + + ModelVersion modelVersion = supplier.get(); + return EntityCombinedModelVersion.of(modelVersion) + .withHiddenProperties( + getHiddenPropertyNames( + catalogIdent, + HasPropertyMetadata::modelPropertiesMetadata, + modelVersion.properties())); + } + + private Map checkAndUpdateProperties( + NameIdentifier catalogIdent, Map properties) { + TreeLockUtils.doWithTreeLock( + catalogIdent, + LockType.READ, + () -> + doWithCatalog( + catalogIdent, + c -> + c.doWithPropertiesMeta( + p -> { + validatePropertyForCreate(p.modelPropertiesMetadata(), properties); + return null; + }), + IllegalArgumentException.class)); + + long uid = idGenerator.nextId(); + StringIdentifier stringId = StringIdentifier.fromId(uid); + return StringIdentifier.newPropertiesWithId(stringId, properties); } } diff --git a/core/src/main/java/org/apache/gravitino/catalog/SchemaOperationDispatcher.java b/core/src/main/java/org/apache/gravitino/catalog/SchemaOperationDispatcher.java index ce870523a14..789e5e47155 100644 --- a/core/src/main/java/org/apache/gravitino/catalog/SchemaOperationDispatcher.java +++ b/core/src/main/java/org/apache/gravitino/catalog/SchemaOperationDispatcher.java @@ -125,7 +125,7 @@ public Schema createSchema(NameIdentifier ident, String comment, Map public static final String CATALOG_OPERATION_IMPL = "ops-impl"; // Underlying access control system plugin for this catalog. - private volatile BaseAuthorization authorization; + private volatile AuthorizationPlugin authorizationPlugin; private CatalogEntity entity; @@ -187,54 +187,64 @@ public CatalogOperations ops() { } public AuthorizationPlugin getAuthorizationPlugin() { - if (authorization == null) { - return null; + if (authorizationPlugin == null) { + synchronized (this) { + if (authorizationPlugin == null) { + return null; + } + } } - return authorization.plugin(entity.namespace().level(0), provider(), this.conf); + return authorizationPlugin; } public void initAuthorizationPluginInstance(IsolatedClassLoader classLoader) { - if (authorization != null) { - return; - } - - String authorizationProvider = - (String) catalogPropertiesMetadata().getOrDefault(conf, AUTHORIZATION_PROVIDER); - if (authorizationProvider == null) { - LOG.info("Authorization provider is not set!"); - return; - } - - try { - authorization = - classLoader.withClassLoader( - cl -> { - try { - ServiceLoader loader = - ServiceLoader.load(AuthorizationProvider.class, cl); - - List> providers = - Streams.stream(loader.iterator()) - .filter(p -> p.shortName().equalsIgnoreCase(authorizationProvider)) - .map(AuthorizationProvider::getClass) - .collect(Collectors.toList()); - if (providers.isEmpty()) { - throw new IllegalArgumentException( - "No authorization provider found for: " + authorizationProvider); - } else if (providers.size() > 1) { - throw new IllegalArgumentException( - "Multiple authorization providers found for: " + authorizationProvider); - } - return (BaseAuthorization) - Iterables.getOnlyElement(providers).getDeclaredConstructor().newInstance(); - } catch (Exception e) { - LOG.error("Failed to create authorization instance", e); - throw new RuntimeException(e); - } - }); - } catch (Exception e) { - LOG.error("Failed to load authorization with class loader", e); - throw new RuntimeException(e); + if (authorizationPlugin == null) { + synchronized (this) { + if (authorizationPlugin == null) { + String authorizationProvider = + (String) catalogPropertiesMetadata().getOrDefault(conf, AUTHORIZATION_PROVIDER); + if (authorizationProvider == null) { + LOG.info("Authorization provider is not set!"); + return; + } + try { + BaseAuthorization authorization = + classLoader.withClassLoader( + cl -> { + try { + ServiceLoader loader = + ServiceLoader.load(AuthorizationProvider.class, cl); + + List> providers = + Streams.stream(loader.iterator()) + .filter(p -> p.shortName().equalsIgnoreCase(authorizationProvider)) + .map(AuthorizationProvider::getClass) + .collect(Collectors.toList()); + if (providers.isEmpty()) { + throw new IllegalArgumentException( + "No authorization provider found for: " + authorizationProvider); + } else if (providers.size() > 1) { + throw new IllegalArgumentException( + "Multiple authorization providers found for: " + + authorizationProvider); + } + return (BaseAuthorization) + Iterables.getOnlyElement(providers) + .getDeclaredConstructor() + .newInstance(); + } catch (Exception e) { + LOG.error("Failed to create authorization instance", e); + throw new RuntimeException(e); + } + }); + authorizationPlugin = + authorization.newPlugin(entity.namespace().level(0), provider(), this.conf); + } catch (Exception e) { + LOG.error("Failed to load authorization with class loader", e); + throw new RuntimeException(e); + } + } + } } } @@ -244,9 +254,9 @@ public void close() throws IOException { ops.close(); ops = null; } - if (authorization != null) { - authorization.close(); - authorization = null; + if (authorizationPlugin != null) { + authorizationPlugin.close(); + authorizationPlugin = null; } } diff --git a/core/src/main/java/org/apache/gravitino/connector/authorization/BaseAuthorization.java b/core/src/main/java/org/apache/gravitino/connector/authorization/BaseAuthorization.java index ce460e675e1..173ad3527a8 100644 --- a/core/src/main/java/org/apache/gravitino/connector/authorization/BaseAuthorization.java +++ b/core/src/main/java/org/apache/gravitino/connector/authorization/BaseAuthorization.java @@ -33,7 +33,6 @@ */ public abstract class BaseAuthorization implements AuthorizationProvider, Closeable { - private volatile AuthorizationPlugin plugin = null; /** * Creates a new instance of AuthorizationPlugin.
@@ -42,26 +41,9 @@ public abstract class BaseAuthorization * * @return A new instance of AuthorizationHook. */ - protected abstract AuthorizationPlugin newPlugin( + public abstract AuthorizationPlugin newPlugin( String metalake, String catalogProvider, Map config); - public AuthorizationPlugin plugin( - String metalake, String catalogProvider, Map config) { - if (plugin == null) { - synchronized (this) { - if (plugin == null) { - plugin = newPlugin(metalake, catalogProvider, config); - } - } - } - - return plugin; - } - @Override - public void close() throws IOException { - if (plugin != null) { - plugin.close(); - } - } + public void close() throws IOException {} } diff --git a/core/src/main/java/org/apache/gravitino/credential/config/ADLSCredentialConfig.java b/core/src/main/java/org/apache/gravitino/credential/config/AzureCredentialConfig.java similarity index 96% rename from core/src/main/java/org/apache/gravitino/credential/config/ADLSCredentialConfig.java rename to core/src/main/java/org/apache/gravitino/credential/config/AzureCredentialConfig.java index e9d368e6752..155cc6806e0 100644 --- a/core/src/main/java/org/apache/gravitino/credential/config/ADLSCredentialConfig.java +++ b/core/src/main/java/org/apache/gravitino/credential/config/AzureCredentialConfig.java @@ -29,7 +29,7 @@ import org.apache.gravitino.credential.CredentialConstants; import org.apache.gravitino.storage.AzureProperties; -public class ADLSCredentialConfig extends Config { +public class AzureCredentialConfig extends Config { public static final ConfigEntry AZURE_STORAGE_ACCOUNT_NAME = new ConfigBuilder(AzureProperties.GRAVITINO_AZURE_STORAGE_ACCOUNT_NAME) @@ -79,7 +79,7 @@ public class ADLSCredentialConfig extends Config { .intConf() .createWithDefault(3600); - public ADLSCredentialConfig(Map properties) { + public AzureCredentialConfig(Map properties) { super(false); loadFromMap(properties, k -> true); } @@ -110,7 +110,7 @@ public String clientSecret() { } @NotNull - public Integer tokenExpireInSecs() { + public Integer adlsTokenExpireInSecs() { return this.get(ADLS_TOKEN_EXPIRE_IN_SECS); } } diff --git a/core/src/test/java/org/apache/gravitino/TestCatalog.java b/core/src/test/java/org/apache/gravitino/TestCatalog.java index bdb409f20fb..420396559d5 100644 --- a/core/src/test/java/org/apache/gravitino/TestCatalog.java +++ b/core/src/test/java/org/apache/gravitino/TestCatalog.java @@ -134,4 +134,9 @@ public PropertiesMetadata filesetPropertiesMetadata() throws UnsupportedOperatio public PropertiesMetadata topicPropertiesMetadata() throws UnsupportedOperationException { return BASE_PROPERTIES_METADATA; } + + @Override + public PropertiesMetadata modelPropertiesMetadata() throws UnsupportedOperationException { + return BASE_PROPERTIES_METADATA; + } } diff --git a/core/src/test/java/org/apache/gravitino/TestModel.java b/core/src/test/java/org/apache/gravitino/TestModel.java new file mode 100644 index 00000000000..ee632192f39 --- /dev/null +++ b/core/src/test/java/org/apache/gravitino/TestModel.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.gravitino; + +import org.apache.gravitino.connector.BaseModel; + +public class TestModel extends BaseModel { + + public static class Builder extends BaseModelBuilder { + + private Builder() {} + + @Override + protected TestModel internalBuild() { + TestModel model = new TestModel(); + model.name = name; + model.comment = comment; + model.properties = properties; + model.latestVersion = latestVersion; + model.auditInfo = auditInfo; + return model; + } + } + + public static Builder builder() { + return new Builder(); + } +} diff --git a/core/src/test/java/org/apache/gravitino/TestModelVersion.java b/core/src/test/java/org/apache/gravitino/TestModelVersion.java new file mode 100644 index 00000000000..487496c5fb0 --- /dev/null +++ b/core/src/test/java/org/apache/gravitino/TestModelVersion.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.gravitino; + +import org.apache.gravitino.connector.BaseModelVersion; + +public class TestModelVersion extends BaseModelVersion { + + public static class Builder extends BaseModelVersionBuilder { + + private Builder() {} + + @Override + protected TestModelVersion internalBuild() { + TestModelVersion modelVersion = new TestModelVersion(); + modelVersion.version = version; + modelVersion.comment = comment; + modelVersion.aliases = aliases; + modelVersion.uri = uri; + modelVersion.properties = properties; + modelVersion.auditInfo = auditInfo; + return modelVersion; + } + } + + public static Builder builder() { + return new Builder(); + } +} diff --git a/core/src/test/java/org/apache/gravitino/authorization/TestAuthorizationUtils.java b/core/src/test/java/org/apache/gravitino/authorization/TestAuthorizationUtils.java index c2d844fbd86..b602471c4d1 100644 --- a/core/src/test/java/org/apache/gravitino/authorization/TestAuthorizationUtils.java +++ b/core/src/test/java/org/apache/gravitino/authorization/TestAuthorizationUtils.java @@ -18,10 +18,14 @@ */ package org.apache.gravitino.authorization; +import com.google.common.collect.Lists; +import java.util.List; import org.apache.gravitino.NameIdentifier; import org.apache.gravitino.Namespace; import org.apache.gravitino.exceptions.IllegalNameIdentifierException; import org.apache.gravitino.exceptions.IllegalNamespaceException; +import org.apache.gravitino.meta.AuditInfo; +import org.apache.gravitino.meta.RoleEntity; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; @@ -149,4 +153,61 @@ void testCheckNamespace() { IllegalNamespaceException.class, () -> AuthorizationUtils.checkRoleNamespace(Namespace.of("a", "b", "c", "d"))); } + + @Test + void testFilteredSecurableObjects() { + + List securableObjects = Lists.newArrayList(); + + SecurableObject metalakeObject = + SecurableObjects.ofMetalake("metalake", Lists.newArrayList(Privileges.SelectTable.allow())); + securableObjects.add(metalakeObject); + + SecurableObject catalog1Object = + SecurableObjects.ofCatalog("catalog1", Lists.newArrayList(Privileges.SelectTable.allow())); + securableObjects.add(catalog1Object); + + SecurableObject catalog2Object = + SecurableObjects.ofCatalog("catalog2", Lists.newArrayList(Privileges.SelectTable.allow())); + securableObjects.add(catalog2Object); + + SecurableObject schema1Object = + SecurableObjects.ofSchema( + catalog1Object, "schema1", Lists.newArrayList(Privileges.SelectTable.allow())); + SecurableObject table1Object = + SecurableObjects.ofTable( + schema1Object, "table1", Lists.newArrayList(Privileges.SelectTable.allow())); + securableObjects.add(table1Object); + securableObjects.add(schema1Object); + + SecurableObject schema2Object = + SecurableObjects.ofSchema( + catalog2Object, "schema2", Lists.newArrayList(Privileges.SelectTable.allow())); + SecurableObject table2Object = + SecurableObjects.ofTable( + schema2Object, "table2", Lists.newArrayList(Privileges.SelectTable.allow())); + securableObjects.add(table2Object); + securableObjects.add(schema2Object); + + RoleEntity role = + RoleEntity.builder() + .withId(1L) + .withName("role") + .withSecurableObjects(securableObjects) + .withAuditInfo(AuditInfo.EMPTY) + .build(); + Role filteredRole = AuthorizationUtils.filterSecurableObjects(role, "metalake", "catalog1"); + Assertions.assertEquals(4, filteredRole.securableObjects().size()); + Assertions.assertTrue(filteredRole.securableObjects().contains(metalakeObject)); + Assertions.assertTrue(filteredRole.securableObjects().contains(catalog1Object)); + Assertions.assertTrue(filteredRole.securableObjects().contains(schema1Object)); + Assertions.assertTrue(filteredRole.securableObjects().contains(table1Object)); + + filteredRole = AuthorizationUtils.filterSecurableObjects(role, "metalake", "catalog2"); + Assertions.assertEquals(4, filteredRole.securableObjects().size()); + Assertions.assertTrue(filteredRole.securableObjects().contains(metalakeObject)); + Assertions.assertTrue(filteredRole.securableObjects().contains(catalog2Object)); + Assertions.assertTrue(filteredRole.securableObjects().contains(schema2Object)); + Assertions.assertTrue(filteredRole.securableObjects().contains(table2Object)); + } } diff --git a/core/src/test/java/org/apache/gravitino/catalog/TestModelOperationDispatcher.java b/core/src/test/java/org/apache/gravitino/catalog/TestModelOperationDispatcher.java new file mode 100644 index 00000000000..10bb85a1e11 --- /dev/null +++ b/core/src/test/java/org/apache/gravitino/catalog/TestModelOperationDispatcher.java @@ -0,0 +1,264 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.gravitino.catalog; + +import static org.apache.gravitino.Configs.TREE_LOCK_CLEAN_INTERVAL; +import static org.apache.gravitino.Configs.TREE_LOCK_MAX_NODE_IN_MEMORY; +import static org.apache.gravitino.Configs.TREE_LOCK_MIN_NODE_IN_MEMORY; +import static org.apache.gravitino.StringIdentifier.ID_KEY; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Sets; +import java.io.IOException; +import java.util.Arrays; +import java.util.Map; +import java.util.Set; +import java.util.UUID; +import java.util.stream.Collectors; +import org.apache.commons.lang3.reflect.FieldUtils; +import org.apache.gravitino.Config; +import org.apache.gravitino.GravitinoEnv; +import org.apache.gravitino.NameIdentifier; +import org.apache.gravitino.exceptions.NoSuchModelException; +import org.apache.gravitino.exceptions.NoSuchModelVersionException; +import org.apache.gravitino.lock.LockManager; +import org.apache.gravitino.model.Model; +import org.apache.gravitino.model.ModelVersion; +import org.apache.gravitino.utils.NameIdentifierUtil; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +public class TestModelOperationDispatcher extends TestOperationDispatcher { + + static ModelOperationDispatcher modelOperationDispatcher; + + static SchemaOperationDispatcher schemaOperationDispatcher; + + @BeforeAll + public static void initialize() throws IOException, IllegalAccessException { + Config config = Mockito.mock(Config.class); + Mockito.doReturn(100000L).when(config).get(TREE_LOCK_MAX_NODE_IN_MEMORY); + Mockito.doReturn(1000L).when(config).get(TREE_LOCK_MIN_NODE_IN_MEMORY); + Mockito.doReturn(36000L).when(config).get(TREE_LOCK_CLEAN_INTERVAL); + FieldUtils.writeField(GravitinoEnv.getInstance(), "lockManager", new LockManager(config), true); + + modelOperationDispatcher = + new ModelOperationDispatcher(catalogManager, entityStore, idGenerator); + schemaOperationDispatcher = + new SchemaOperationDispatcher(catalogManager, entityStore, idGenerator); + } + + @Test + public void testRegisterAndGetModel() { + String schemaName = randomSchemaName(); + NameIdentifier schemaIdent = NameIdentifier.of(metalake, catalog, schemaName); + schemaOperationDispatcher.createSchema(schemaIdent, "comment", null); + + Map props = ImmutableMap.of("k1", "v1", "k2", "v2"); + String modelName = randomModelName(); + NameIdentifier modelIdent = + NameIdentifierUtil.ofModel(metalake, catalog, schemaName, modelName); + + Model model = modelOperationDispatcher.registerModel(modelIdent, "comment", props); + Assertions.assertEquals(modelName, model.name()); + Assertions.assertEquals("comment", model.comment()); + Assertions.assertEquals(props, model.properties()); + Assertions.assertFalse(model.properties().containsKey(ID_KEY)); + + Model registeredModel = modelOperationDispatcher.getModel(modelIdent); + Assertions.assertEquals(modelName, registeredModel.name()); + Assertions.assertEquals("comment", registeredModel.comment()); + Assertions.assertEquals(props, registeredModel.properties()); + Assertions.assertFalse(registeredModel.properties().containsKey(ID_KEY)); + + // Test register model with illegal property + Map illegalProps = ImmutableMap.of("k1", "v1", ID_KEY, "test"); + testPropertyException( + () -> modelOperationDispatcher.registerModel(modelIdent, "comment", illegalProps), + "Properties are reserved and cannot be set", + ID_KEY); + } + + @Test + public void testRegisterAndListModels() { + String schemaName = randomSchemaName(); + NameIdentifier schemaIdent = NameIdentifier.of(metalake, catalog, schemaName); + schemaOperationDispatcher.createSchema(schemaIdent, "comment", null); + + Map props = ImmutableMap.of("k1", "v1", "k2", "v2"); + String modelName1 = randomModelName(); + NameIdentifier modelIdent1 = + NameIdentifierUtil.ofModel(metalake, catalog, schemaName, modelName1); + modelOperationDispatcher.registerModel(modelIdent1, "comment", props); + + String modelName2 = randomModelName(); + NameIdentifier modelIdent2 = + NameIdentifierUtil.ofModel(metalake, catalog, schemaName, modelName2); + modelOperationDispatcher.registerModel(modelIdent2, "comment", props); + + NameIdentifier[] modelIdents = modelOperationDispatcher.listModels(modelIdent1.namespace()); + Assertions.assertEquals(2, modelIdents.length); + Set modelIdentSet = Sets.newHashSet(modelIdents); + Assertions.assertTrue(modelIdentSet.contains(modelIdent1)); + Assertions.assertTrue(modelIdentSet.contains(modelIdent2)); + } + + @Test + public void testRegisterAndDeleteModel() { + String schemaName = randomSchemaName(); + NameIdentifier schemaIdent = NameIdentifier.of(metalake, catalog, schemaName); + schemaOperationDispatcher.createSchema(schemaIdent, "comment", null); + + Map props = ImmutableMap.of("k1", "v1", "k2", "v2"); + String modelName = randomModelName(); + NameIdentifier modelIdent = + NameIdentifierUtil.ofModel(metalake, catalog, schemaName, modelName); + + modelOperationDispatcher.registerModel(modelIdent, "comment", props); + Assertions.assertTrue(modelOperationDispatcher.deleteModel(modelIdent)); + Assertions.assertFalse(modelOperationDispatcher.deleteModel(modelIdent)); + Assertions.assertThrows( + NoSuchModelException.class, () -> modelOperationDispatcher.getModel(modelIdent)); + + // Test delete in-existent model + Assertions.assertFalse( + modelOperationDispatcher.deleteModel(NameIdentifier.of(metalake, catalog, "inexistent"))); + } + + @Test + public void testLinkAndGetModelVersion() { + String schemaName = randomSchemaName(); + NameIdentifier schemaIdent = NameIdentifier.of(metalake, catalog, schemaName); + schemaOperationDispatcher.createSchema(schemaIdent, "comment", null); + + Map props = ImmutableMap.of("k1", "v1", "k2", "v2"); + String modelName = randomModelName(); + NameIdentifier modelIdent = + NameIdentifierUtil.ofModel(metalake, catalog, schemaName, modelName); + + Model model = modelOperationDispatcher.registerModel(modelIdent, "comment", props); + Assertions.assertEquals(0, model.latestVersion()); + + String[] aliases = new String[] {"alias1", "alias2"}; + modelOperationDispatcher.linkModelVersion(modelIdent, "path", aliases, "comment", props); + + ModelVersion linkedModelVersion = modelOperationDispatcher.getModelVersion(modelIdent, 0); + Assertions.assertEquals(0, linkedModelVersion.version()); + Assertions.assertEquals("path", linkedModelVersion.uri()); + Assertions.assertArrayEquals(aliases, linkedModelVersion.aliases()); + Assertions.assertEquals("comment", linkedModelVersion.comment()); + Assertions.assertEquals(props, linkedModelVersion.properties()); + Assertions.assertFalse(linkedModelVersion.properties().containsKey(ID_KEY)); + + // Test get model version with alias + ModelVersion linkedModelVersionWithAlias = + modelOperationDispatcher.getModelVersion(modelIdent, "alias1"); + Assertions.assertEquals(0, linkedModelVersionWithAlias.version()); + Assertions.assertEquals("path", linkedModelVersionWithAlias.uri()); + Assertions.assertArrayEquals(aliases, linkedModelVersionWithAlias.aliases()); + Assertions.assertFalse(linkedModelVersionWithAlias.properties().containsKey(ID_KEY)); + + ModelVersion linkedModelVersionWithAlias2 = + modelOperationDispatcher.getModelVersion(modelIdent, "alias2"); + Assertions.assertEquals(0, linkedModelVersionWithAlias2.version()); + Assertions.assertEquals("path", linkedModelVersionWithAlias2.uri()); + Assertions.assertArrayEquals(aliases, linkedModelVersionWithAlias2.aliases()); + Assertions.assertFalse(linkedModelVersionWithAlias2.properties().containsKey(ID_KEY)); + + // Test Link model version with illegal property + Map illegalProps = ImmutableMap.of("k1", "v1", ID_KEY, "test"); + testPropertyException( + () -> + modelOperationDispatcher.linkModelVersion( + modelIdent, "path", aliases, "comment", illegalProps), + "Properties are reserved and cannot be set", + ID_KEY); + } + + @Test + public void testLinkAndListModelVersion() { + String schemaName = randomSchemaName(); + NameIdentifier schemaIdent = NameIdentifier.of(metalake, catalog, schemaName); + schemaOperationDispatcher.createSchema(schemaIdent, "comment", null); + + Map props = ImmutableMap.of("k1", "v1", "k2", "v2"); + String modelName = randomModelName(); + NameIdentifier modelIdent = + NameIdentifierUtil.ofModel(metalake, catalog, schemaName, modelName); + + Model model = modelOperationDispatcher.registerModel(modelIdent, "comment", props); + Assertions.assertEquals(0, model.latestVersion()); + + String[] aliases1 = new String[] {"alias1"}; + String[] aliases2 = new String[] {"alias2"}; + modelOperationDispatcher.linkModelVersion(modelIdent, "path1", aliases1, "comment", props); + modelOperationDispatcher.linkModelVersion(modelIdent, "path2", aliases2, "comment", props); + + int[] versions = modelOperationDispatcher.listModelVersions(modelIdent); + Assertions.assertEquals(2, versions.length); + Set versionSet = Arrays.stream(versions).boxed().collect(Collectors.toSet()); + Assertions.assertTrue(versionSet.contains(0)); + Assertions.assertTrue(versionSet.contains(1)); + } + + @Test + public void testLinkAndDeleteModelVersion() { + String schemaName = randomSchemaName(); + NameIdentifier schemaIdent = NameIdentifier.of(metalake, catalog, schemaName); + schemaOperationDispatcher.createSchema(schemaIdent, "comment", null); + + Map props = ImmutableMap.of("k1", "v1", "k2", "v2"); + String modelName = randomModelName(); + NameIdentifier modelIdent = + NameIdentifierUtil.ofModel(metalake, catalog, schemaName, modelName); + + Model model = modelOperationDispatcher.registerModel(modelIdent, "comment", props); + Assertions.assertEquals(0, model.latestVersion()); + + String[] aliases = new String[] {"alias1"}; + modelOperationDispatcher.linkModelVersion(modelIdent, "path", aliases, "comment", props); + Assertions.assertTrue(modelOperationDispatcher.deleteModelVersion(modelIdent, 0)); + Assertions.assertFalse(modelOperationDispatcher.deleteModelVersion(modelIdent, 0)); + Assertions.assertThrows( + NoSuchModelVersionException.class, + () -> modelOperationDispatcher.getModelVersion(modelIdent, 0)); + + // Test delete in-existent model version + Assertions.assertFalse(modelOperationDispatcher.deleteModelVersion(modelIdent, 1)); + + // Tet delete model version with alias + String[] aliases2 = new String[] {"alias2"}; + modelOperationDispatcher.linkModelVersion(modelIdent, "path2", aliases2, "comment", props); + Assertions.assertTrue(modelOperationDispatcher.deleteModelVersion(modelIdent, "alias2")); + Assertions.assertFalse(modelOperationDispatcher.deleteModelVersion(modelIdent, "alias2")); + Assertions.assertThrows( + NoSuchModelVersionException.class, + () -> modelOperationDispatcher.getModelVersion(modelIdent, "alias2")); + } + + private String randomSchemaName() { + return "schema_" + UUID.randomUUID().toString().replace("-", ""); + } + + private String randomModelName() { + return "model_" + UUID.randomUUID().toString().replace("-", ""); + } +} diff --git a/core/src/test/java/org/apache/gravitino/connector/TestCatalogOperations.java b/core/src/test/java/org/apache/gravitino/connector/TestCatalogOperations.java index 4fb98c596b8..f7775ef32e7 100644 --- a/core/src/test/java/org/apache/gravitino/connector/TestCatalogOperations.java +++ b/core/src/test/java/org/apache/gravitino/connector/TestCatalogOperations.java @@ -26,11 +26,13 @@ import java.util.Arrays; import java.util.Comparator; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.IntStream; import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.tuple.Pair; import org.apache.gravitino.Catalog; import org.apache.gravitino.NameIdentifier; import org.apache.gravitino.Namespace; @@ -38,6 +40,8 @@ import org.apache.gravitino.SchemaChange; import org.apache.gravitino.TestColumn; import org.apache.gravitino.TestFileset; +import org.apache.gravitino.TestModel; +import org.apache.gravitino.TestModelVersion; import org.apache.gravitino.TestSchema; import org.apache.gravitino.TestTable; import org.apache.gravitino.TestTopic; @@ -47,8 +51,12 @@ import org.apache.gravitino.exceptions.ConnectionFailedException; import org.apache.gravitino.exceptions.FilesetAlreadyExistsException; import org.apache.gravitino.exceptions.GravitinoRuntimeException; +import org.apache.gravitino.exceptions.ModelAlreadyExistsException; +import org.apache.gravitino.exceptions.ModelVersionAliasesAlreadyExistException; import org.apache.gravitino.exceptions.NoSuchCatalogException; import org.apache.gravitino.exceptions.NoSuchFilesetException; +import org.apache.gravitino.exceptions.NoSuchModelException; +import org.apache.gravitino.exceptions.NoSuchModelVersionException; import org.apache.gravitino.exceptions.NoSuchSchemaException; import org.apache.gravitino.exceptions.NoSuchTableException; import org.apache.gravitino.exceptions.NoSuchTopicException; @@ -64,6 +72,9 @@ import org.apache.gravitino.messaging.TopicCatalog; import org.apache.gravitino.messaging.TopicChange; import org.apache.gravitino.meta.AuditInfo; +import org.apache.gravitino.model.Model; +import org.apache.gravitino.model.ModelCatalog; +import org.apache.gravitino.model.ModelVersion; import org.apache.gravitino.rel.Column; import org.apache.gravitino.rel.Table; import org.apache.gravitino.rel.TableCatalog; @@ -76,7 +87,12 @@ import org.slf4j.LoggerFactory; public class TestCatalogOperations - implements CatalogOperations, TableCatalog, FilesetCatalog, TopicCatalog, SupportsSchemas { + implements CatalogOperations, + TableCatalog, + FilesetCatalog, + TopicCatalog, + ModelCatalog, + SupportsSchemas { private static final Logger LOG = LoggerFactory.getLogger(TestCatalogOperations.class); private final Map tables; @@ -87,6 +103,12 @@ public class TestCatalogOperations private final Map topics; + private final Map models; + + private final Map, TestModelVersion> modelVersions; + + private final Map, Integer> modelAliasToVersion; + public static final String FAIL_CREATE = "fail-create"; public static final String FAIL_TEST = "need-fail"; @@ -98,6 +120,9 @@ public TestCatalogOperations(Map config) { schemas = Maps.newHashMap(); filesets = Maps.newHashMap(); topics = Maps.newHashMap(); + models = Maps.newHashMap(); + modelVersions = Maps.newHashMap(); + modelAliasToVersion = Maps.newHashMap(); } @Override @@ -649,6 +674,227 @@ public void testConnection( } } + @Override + public NameIdentifier[] listModels(Namespace namespace) throws NoSuchSchemaException { + NameIdentifier modelSchemaIdent = NameIdentifier.of(namespace.levels()); + if (!schemas.containsKey(modelSchemaIdent)) { + throw new NoSuchSchemaException("Schema %s does not exist", modelSchemaIdent); + } + + return models.keySet().stream() + .filter(ident -> ident.namespace().equals(namespace)) + .toArray(NameIdentifier[]::new); + } + + @Override + public Model getModel(NameIdentifier ident) throws NoSuchModelException { + if (models.containsKey(ident)) { + return models.get(ident); + } else { + throw new NoSuchModelException("Model %s does not exist", ident); + } + } + + @Override + public Model registerModel(NameIdentifier ident, String comment, Map properties) + throws NoSuchSchemaException, ModelAlreadyExistsException { + NameIdentifier schemaIdent = NameIdentifier.of(ident.namespace().levels()); + if (!schemas.containsKey(schemaIdent)) { + throw new NoSuchSchemaException("Schema %s does not exist", schemaIdent); + } + + AuditInfo auditInfo = + AuditInfo.builder().withCreator("test").withCreateTime(Instant.now()).build(); + TestModel model = + TestModel.builder() + .withName(ident.name()) + .withComment(comment) + .withProperties(properties) + .withLatestVersion(0) + .withAuditInfo(auditInfo) + .build(); + + if (models.containsKey(ident)) { + throw new ModelAlreadyExistsException("Model %s already exists", ident); + } else { + models.put(ident, model); + } + + return model; + } + + @Override + public boolean deleteModel(NameIdentifier ident) { + if (!models.containsKey(ident)) { + return false; + } + + models.remove(ident); + + List> deletedVersions = + modelVersions.entrySet().stream() + .filter(e -> e.getKey().getLeft().equals(ident)) + .map(Map.Entry::getKey) + .collect(Collectors.toList()); + deletedVersions.forEach(modelVersions::remove); + + List> deletedAliases = + modelAliasToVersion.entrySet().stream() + .filter(e -> e.getKey().getLeft().equals(ident)) + .map(Map.Entry::getKey) + .collect(Collectors.toList()); + deletedAliases.forEach(modelAliasToVersion::remove); + + return true; + } + + @Override + public int[] listModelVersions(NameIdentifier ident) throws NoSuchModelException { + if (!models.containsKey(ident)) { + throw new NoSuchModelException("Model %s does not exist", ident); + } + + return modelVersions.entrySet().stream() + .filter(e -> e.getKey().getLeft().equals(ident)) + .mapToInt(e -> e.getValue().version()) + .toArray(); + } + + @Override + public ModelVersion getModelVersion(NameIdentifier ident, int version) + throws NoSuchModelVersionException { + if (!models.containsKey(ident)) { + throw new NoSuchModelVersionException("Model %s does not exist", ident); + } + + Pair versionPair = Pair.of(ident, version); + if (!modelVersions.containsKey(versionPair)) { + throw new NoSuchModelVersionException("Model version %s does not exist", versionPair); + } + + return modelVersions.get(versionPair); + } + + @Override + public ModelVersion getModelVersion(NameIdentifier ident, String alias) + throws NoSuchModelVersionException { + if (!models.containsKey(ident)) { + throw new NoSuchModelVersionException("Model %s does not exist", ident); + } + + Pair aliasPair = Pair.of(ident, alias); + if (!modelAliasToVersion.containsKey(aliasPair)) { + throw new NoSuchModelVersionException("Model version %s does not exist", alias); + } + + int version = modelAliasToVersion.get(aliasPair); + Pair versionPair = Pair.of(ident, version); + if (!modelVersions.containsKey(versionPair)) { + throw new NoSuchModelVersionException("Model version %s does not exist", versionPair); + } + + return modelVersions.get(versionPair); + } + + @Override + public void linkModelVersion( + NameIdentifier ident, + String uri, + String[] aliases, + String comment, + Map properties) + throws NoSuchModelException, ModelVersionAliasesAlreadyExistException { + if (!models.containsKey(ident)) { + throw new NoSuchModelException("Model %s does not exist", ident); + } + + String[] aliasArray = aliases != null ? aliases : new String[0]; + for (String alias : aliasArray) { + Pair aliasPair = Pair.of(ident, alias); + if (modelAliasToVersion.containsKey(aliasPair)) { + throw new ModelVersionAliasesAlreadyExistException( + "Model version alias %s already exists", alias); + } + } + + int version = models.get(ident).latestVersion(); + TestModelVersion modelVersion = + TestModelVersion.builder() + .withVersion(version) + .withAliases(aliases) + .withComment(comment) + .withUri(uri) + .withProperties(properties) + .withAuditInfo( + AuditInfo.builder().withCreator("test").withCreateTime(Instant.now()).build()) + .build(); + Pair versionPair = Pair.of(ident, version); + modelVersions.put(versionPair, modelVersion); + for (String alias : aliasArray) { + Pair aliasPair = Pair.of(ident, alias); + modelAliasToVersion.put(aliasPair, version); + } + + TestModel model = models.get(ident); + TestModel updatedModel = + TestModel.builder() + .withName(model.name()) + .withComment(model.comment()) + .withProperties(model.properties()) + .withLatestVersion(version + 1) + .withAuditInfo(model.auditInfo()) + .build(); + models.put(ident, updatedModel); + } + + @Override + public boolean deleteModelVersion(NameIdentifier ident, int version) { + if (!models.containsKey(ident)) { + return false; + } + + Pair versionPair = Pair.of(ident, version); + if (!modelVersions.containsKey(versionPair)) { + return false; + } + + TestModelVersion modelVersion = modelVersions.remove(versionPair); + if (modelVersion.aliases() != null) { + for (String alias : modelVersion.aliases()) { + Pair aliasPair = Pair.of(ident, alias); + modelAliasToVersion.remove(aliasPair); + } + } + + return true; + } + + @Override + public boolean deleteModelVersion(NameIdentifier ident, String alias) { + if (!models.containsKey(ident)) { + return false; + } + + Pair aliasPair = Pair.of(ident, alias); + if (!modelAliasToVersion.containsKey(aliasPair)) { + return false; + } + + int version = modelAliasToVersion.remove(aliasPair); + Pair versionPair = Pair.of(ident, version); + if (!modelVersions.containsKey(versionPair)) { + return false; + } + + TestModelVersion modelVersion = modelVersions.remove(versionPair); + for (String modelVersionAlias : modelVersion.aliases()) { + Pair modelAliasPair = Pair.of(ident, modelVersionAlias); + modelAliasToVersion.remove(modelAliasPair); + } + + return true; + } + private boolean hasCallerContext() { return CallerContext.CallerContextHolder.get() != null && CallerContext.CallerContextHolder.get().context() != null diff --git a/core/src/test/java/org/apache/gravitino/connector/authorization/mysql/TestMySQLAuthorization.java b/core/src/test/java/org/apache/gravitino/connector/authorization/mysql/TestMySQLAuthorization.java index db7c629bbd5..e8d747da11f 100644 --- a/core/src/test/java/org/apache/gravitino/connector/authorization/mysql/TestMySQLAuthorization.java +++ b/core/src/test/java/org/apache/gravitino/connector/authorization/mysql/TestMySQLAuthorization.java @@ -32,7 +32,7 @@ public String shortName() { } @Override - protected AuthorizationPlugin newPlugin( + public AuthorizationPlugin newPlugin( String metalake, String catalogProvider, Map config) { return new TestMySQLAuthorizationPlugin(); } diff --git a/core/src/test/java/org/apache/gravitino/connector/authorization/ranger/TestRangerAuthorization.java b/core/src/test/java/org/apache/gravitino/connector/authorization/ranger/TestRangerAuthorization.java index 383339d0847..9df9a8d63b7 100644 --- a/core/src/test/java/org/apache/gravitino/connector/authorization/ranger/TestRangerAuthorization.java +++ b/core/src/test/java/org/apache/gravitino/connector/authorization/ranger/TestRangerAuthorization.java @@ -32,7 +32,7 @@ public String shortName() { } @Override - protected AuthorizationPlugin newPlugin( + public AuthorizationPlugin newPlugin( String metalake, String catalogProvider, Map config) { return new TestRangerAuthorizationPlugin(); } diff --git a/docs/cli.md b/docs/cli.md index e6e2f5aa609..64d720f2e8a 100644 --- a/docs/cli.md +++ b/docs/cli.md @@ -276,6 +276,24 @@ gcli metalake set --property test --value value gcli metalake remove --property test ``` +#### Enable a metalake + +```bash +gcli metalake update -m metalake_demo --enable +``` + +#### Enable a metalake and all catalogs + +```bash +gcli metalake update -m metalake_demo --enable --all +``` + +#### Disable a metalake + +```bash +gcli metalake update -m metalake_demo --disable +``` + ### Catalog commands #### Show all catalogs in a metalake @@ -390,6 +408,24 @@ gcli catalog set --name catalog_mysql --property test --value value gcli catalog remove --name catalog_mysql --property test ``` +#### Enable a catalog + +```bash +gcli catalog update -m metalake_demo --name catalog --enable +``` + +#### Enable a catalog and it's metalake + +```bash +gcli catalog update -m metalake_demo --name catalog --enable --all +``` + +#### Disable a catalog + +```bash +gcli catalog update -m metalake_demo --name catalog --disable +``` + ### Schema commands #### Show all schemas in a catalog diff --git a/docs/how-to-use-the-playground.md b/docs/how-to-use-the-playground.md index 390e7a37be4..d65d40acdd2 100644 --- a/docs/how-to-use-the-playground.md +++ b/docs/how-to-use-the-playground.md @@ -14,7 +14,6 @@ Depending on your network and computer, startup time may take 3-5 minutes. Once ## Prerequisites Install Git (optional), Docker, Docker Compose. -Docker Desktop (or Orbstack) with Kubernetes enabled and helm CLI is required if you use helm-chart to deploy services. ## System Resource Requirements @@ -50,82 +49,22 @@ git clone git@github.com:apache/gravitino-playground.git cd gravitino-playground ``` -#### Docker - -##### Start - -``` -./playground.sh docker start -``` - -##### Check status - -```shell -./playground.sh docker status -``` - -##### Stop playground - -```shell -./playground.sh docker stop -``` - -#### Kubernetes - -Enable Kubernetes in Docker Desktop or Orbstack. - -In the project root directory, execute this command: - -``` -helm upgrade --install gravitino-playground ./helm-chart/ --create-namespace --namespace gravitino-playground --set projectRoot=$(pwd) -``` - -##### Start +#### Start ``` -./playground.sh k8s start +./playground.sh start ``` -##### Check status +#### Check status ```shell -./playground.sh k8s status -``` - -##### Port Forwarding - -To access pods or services at `localhost`, you need to do these steps: - -1. Log in to the Gravitino playground Trino pod using the following command: - -``` -TRINO_POD=$(kubectl get pods --namespace gravitino-playground -l app=trino -o jsonpath="{.items[0].metadata.name}") -kubectl exec $TRINO_POD -n gravitino-playground -it -- /bin/bash -``` - -2. Log in to the Gravitino playground Spark pod using the following command: - -``` -SPARK_POD=$(kubectl get pods --namespace gravitino-playground -l app=spark -o jsonpath="{.items[0].metadata.name}") -kubectl exec $SPARK_POD -n gravitino-playground -it -- /bin/bash -``` - -3. Port-forward the Gravitino service to access it at `localhost:8090`. - -``` -kubectl port-forward svc/gravitino -n gravitino-playground 8090:8090 -``` - -4. Port-forward the Jupyter Notebook service to access it at `localhost:8888`. - -``` -kubectl port-forward svc/jupyternotebook -n gravitino-playground 8888:8888 +./playground.sh status ``` -##### Stop playground +#### Stop playground ```shell -./playground.sh k8s stop +./playground.sh stop ``` ## Experiencing Apache Gravitino with Trino SQL diff --git a/docs/iceberg-rest-service.md b/docs/iceberg-rest-service.md index 8d9d49745c2..f31aa13685a 100644 --- a/docs/iceberg-rest-service.md +++ b/docs/iceberg-rest-service.md @@ -106,18 +106,18 @@ The detailed configuration items are as follows: Gravitino Iceberg REST service supports using static S3 secret key or generating temporary token to access S3 data. -| Configuration item | Description | Default value | Required | Since Version | -|---------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------|----------------------------------------------------|------------------| -| `gravitino.iceberg-rest.io-impl` | The IO implementation for `FileIO` in Iceberg, use `org.apache.iceberg.aws.s3.S3FileIO` for S3. | (none) | No | 0.6.0-incubating | -| `gravitino.iceberg-rest.credential-provider-type` | Supports `s3-token` and `s3-secret-key` for S3. `s3-token` generates a temporary token according to the query data path while `s3-secret-key` using the s3 secret access key to access S3 data. | (none) | No | 0.7.0-incubating | -| `gravitino.iceberg-rest.s3-access-key-id` | The static access key ID used to access S3 data. | (none) | No | 0.6.0-incubating | -| `gravitino.iceberg-rest.s3-secret-access-key` | The static secret access key used to access S3 data. | (none) | No | 0.6.0-incubating | -| `gravitino.iceberg-rest.s3-endpoint` | An alternative endpoint of the S3 service, This could be used for S3FileIO with any s3-compatible object storage service that has a different endpoint, or access a private S3 endpoint in a virtual private cloud. | (none) | No | 0.6.0-incubating | -| `gravitino.iceberg-rest.s3-region` | The region of the S3 service, like `us-west-2`. | (none) | No | 0.6.0-incubating | -| `gravitino.iceberg-rest.s3-role-arn` | The ARN of the role to access the S3 data. | (none) | Yes, when `credential-provider-type` is `s3-token` | 0.7.0-incubating | -| `gravitino.iceberg-rest.s3-external-id` | The S3 external id to generate token, only used when `credential-provider-type` is `s3-token`. | (none) | No | 0.7.0-incubating | -| `gravitino.iceberg-rest.s3-token-expire-in-secs` | The S3 session token expire time in secs, it couldn't exceed the max session time of the assumed role, only used when `credential-provider-type` is `s3-token`. | 3600 | No | 0.7.0-incubating | -| `gravitino.iceberg-rest.s3-token-service-endpoint` | An alternative endpoint of the S3 token service, This could be used with s3-compatible object storage service like MINIO that has a different STS endpoint. | (none) | No | 0.8.0-incubating | +| Configuration item | Description | Default value | Required | Since Version | +|----------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------|----------------------------------------------------|------------------| +| `gravitino.iceberg-rest.io-impl` | The IO implementation for `FileIO` in Iceberg, use `org.apache.iceberg.aws.s3.S3FileIO` for S3. | (none) | No | 0.6.0-incubating | +| `gravitino.iceberg-rest.credential-provider-type` | Supports `s3-token` and `s3-secret-key` for S3. `s3-token` generates a temporary token according to the query data path while `s3-secret-key` using the s3 secret access key to access S3 data. | (none) | No | 0.7.0-incubating | +| `gravitino.iceberg-rest.s3-access-key-id` | The static access key ID used to access S3 data. | (none) | No | 0.6.0-incubating | +| `gravitino.iceberg-rest.s3-secret-access-key` | The static secret access key used to access S3 data. | (none) | No | 0.6.0-incubating | +| `gravitino.iceberg-rest.s3-endpoint` | An alternative endpoint of the S3 service, This could be used for S3FileIO with any s3-compatible object storage service that has a different endpoint, or access a private S3 endpoint in a virtual private cloud. | (none) | No | 0.6.0-incubating | +| `gravitino.iceberg-rest.s3-region` | The region of the S3 service, like `us-west-2`. | (none) | No | 0.6.0-incubating | +| `gravitino.iceberg-rest.s3-role-arn` | The ARN of the role to access the S3 data. | (none) | Yes, when `credential-provider-type` is `s3-token` | 0.7.0-incubating | +| `gravitino.iceberg-rest.s3-external-id` | The S3 external id to generate token, only used when `credential-provider-type` is `s3-token`. | (none) | No | 0.7.0-incubating | +| `gravitino.iceberg-rest.s3-token-expire-in-secs` | The S3 session token expire time in secs, it couldn't exceed the max session time of the assumed role, only used when `credential-provider-type` is `s3-token`. | 3600 | No | 0.7.0-incubating | +| `gravitino.iceberg-rest.s3-token-service-endpoint` | An alternative endpoint of the S3 token service, This could be used with s3-compatible object storage service like MINIO that has a different STS endpoint. | (none) | No | 0.8.0-incubating | For other Iceberg s3 properties not managed by Gravitino like `s3.sse.type`, you could config it directly by `gravitino.iceberg-rest.s3.sse.type`. @@ -175,15 +175,16 @@ Please set `gravitino.iceberg-rest.warehouse` to `gs://{bucket_name}/${prefix_na Gravitino Iceberg REST service supports generating SAS token to access ADLS data. -| Configuration item | Description | Default value | Required | Since Version | -|-----------------------------------------------------|-----------------------------------------------------------------------------------------------------------|---------------|----------|------------------| -| `gravitino.iceberg-rest.io-impl` | The IO implementation for `FileIO` in Iceberg, use `org.apache.iceberg.azure.adlsv2.ADLSFileIO` for ADLS. | (none) | Yes | 0.8.0-incubating | -| `gravitino.iceberg-rest.credential-provider-type` | Supports `adls-token`, generates a temporary token according to the query data path. | (none) | Yes | 0.8.0-incubating | -| `gravitino.iceberg-rest.azure-storage-account-name` | The static storage account name used to access ADLS data. | (none) | Yes | 0.8.0-incubating | -| `gravitino.iceberg-rest.azure-storage-account-key` | The static storage account key used to access ADLS data. | (none) | Yes | 0.8.0-incubating | -| `gravitino.iceberg-rest.azure-tenant-id` | Azure Active Directory (AAD) tenant ID. | (none) | Yes | 0.8.0-incubating | -| `gravitino.iceberg-rest.azure-client-id` | Azure Active Directory (AAD) client ID used for authentication. | (none) | Yes | 0.8.0-incubating | -| `gravitino.iceberg-rest.azure-client-secret` | Azure Active Directory (AAD) client secret used for authentication. | (none) | Yes | 0.8.0-incubating | +| Configuration item | Description | Default value | Required | Since Version | +|-----------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------|----------|------------------| +| `gravitino.iceberg-rest.io-impl` | The IO implementation for `FileIO` in Iceberg, use `org.apache.iceberg.azure.adlsv2.ADLSFileIO` for ADLS. | (none) | Yes | 0.8.0-incubating | +| `gravitino.iceberg-rest.credential-provider-type` | Supports `adls-token` and `azure-account-key`. `adls-token` generates a temporary token according to the query data path while `azure-account-key` uses a storage account key to access ADLS data. | (none) | Yes | 0.8.0-incubating | +| `gravitino.iceberg-rest.azure-storage-account-name` | The static storage account name used to access ADLS data. | (none) | Yes | 0.8.0-incubating | +| `gravitino.iceberg-rest.azure-storage-account-key` | The static storage account key used to access ADLS data. | (none) | Yes | 0.8.0-incubating | +| `gravitino.iceberg-rest.azure-tenant-id` | Azure Active Directory (AAD) tenant ID, only used when `credential-provider-type` is `adls-token`. | (none) | Yes | 0.8.0-incubating | +| `gravitino.iceberg-rest.azure-client-id` | Azure Active Directory (AAD) client ID used for authentication, only used when `credential-provider-type` is `adls-token`. | (none) | Yes | 0.8.0-incubating | +| `gravitino.iceberg-rest.azure-client-secret` | Azure Active Directory (AAD) client secret used for authentication, only used when `credential-provider-type` is `adls-token`. | (none) | Yes | 0.8.0-incubating | +| `gravitino.iceberg-rest.adls-token-expire-in-secs` | The ADLS SAS token expire time in secs, only used when `credential-provider-type` is `adls-token`. | 3600 | No | 0.8.0-incubating | For other Iceberg ADLS properties not managed by Gravitino like `adls.read.block-size-bytes`, you could config it directly by `gravitino.iceberg-rest.adls.read.block-size-bytes`. diff --git a/docs/lakehouse-iceberg-catalog.md b/docs/lakehouse-iceberg-catalog.md index 393ef26b8cf..6ad011d7160 100644 --- a/docs/lakehouse-iceberg-catalog.md +++ b/docs/lakehouse-iceberg-catalog.md @@ -28,10 +28,7 @@ Builds with Apache Iceberg `1.5.2`. The Apache Iceberg table format version is ` - Works as a catalog proxy, supporting `Hive`, `JDBC` and `REST` as catalog backend. - Supports DDL operations for Iceberg schemas and tables. - Doesn't support snapshot or table management operations. -- Supports multi storage. - - S3 - - HDFS - - OSS +- Supports multi storage, including S3, GCS, ADLS, OSS and HDFS. - Supports Kerberos or simple authentication for Iceberg catalog with Hive backend. ### Catalog properties @@ -119,6 +116,22 @@ Please make sure the credential file is accessible by Gravitino, like using `exp Please set `warehouse` to `gs://{bucket_name}/${prefix_name}`, and download [Iceberg GCP bundle jar](https://mvnrepository.com/artifact/org.apache.iceberg/iceberg-gcp-bundle) and place it to `catalogs/lakehouse-iceberg/libs/`. ::: +#### ADLS + +Supports using Azure account name and secret key to access ADLS data. + +| Configuration item | Description | Default value | Required | Since Version | +|------------------------------|-----------------------------------------------------------------------------------------------------------|---------------|----------|------------------| +| `io-impl` | The io implementation for `FileIO` in Iceberg, use `org.apache.iceberg.azure.adlsv2.ADLSFileIO` for ADLS. | (none) | No | 0.6.0-incubating | +| `azure-storage-account-name` | The static storage account name used to access ADLS data. | (none) | No | 0.8.0-incubating | +| `azure-storage-account-key` | The static storage account key used to access ADLS data. | (none) | No | 0.8.0-incubating | + +For other Iceberg ADLS properties not managed by Gravitino like `adls.read.block-size-bytes`, you could config it directly by `gravitino.iceberg-rest.adls.read.block-size-bytes`. + +:::info +Please set `warehouse` to `abfs[s]://{container-name}@{storage-account-name}.dfs.core.windows.net/{path}`, and download the [Iceberg Azure bundle](https://mvnrepository.com/artifact/org.apache.iceberg/iceberg-azure-bundle) and place it to `catalogs/lakehouse-iceberg/libs/`. +::: + #### Other storages For other storages that are not managed by Gravitino directly, you can manage them through custom catalog properties. diff --git a/docs/open-api/openapi.yaml b/docs/open-api/openapi.yaml index 0985a60eddb..dd0564a7f9c 100644 --- a/docs/open-api/openapi.yaml +++ b/docs/open-api/openapi.yaml @@ -469,6 +469,7 @@ components: schema: type: string enum: + - "METALAKE" - "CATALOG" - "SCHEMA" - "TABLE" @@ -476,8 +477,6 @@ components: - "FILESET" - "TOPIC" - "ROLE" - - "METALAKE" - metadataObjectFullName: name: metadataObjectFullName in: path diff --git a/docs/open-api/owners.yaml b/docs/open-api/owners.yaml index c0c6b8173f3..0ef0d4e9f01 100644 --- a/docs/open-api/owners.yaml +++ b/docs/open-api/owners.yaml @@ -22,7 +22,7 @@ paths: /metalakes/{metalake}/owners/{metadataObjectType}/{metadataObjectFullName}: parameters: - $ref: "./openapi.yaml#/components/parameters/metalake" - - $ref: "./openapi.yaml#/components/parameters/metadataObjectType" + - $ref: "#/components/parameters/metadataObjectTypeOfOwner" - $ref: "./openapi.yaml#/components/parameters/metadataObjectFullName" put: @@ -171,4 +171,21 @@ components: "org.apache.gravitino.exceptions.NotFoundException: Metadata object or owner does not exist", "..." ] - } \ No newline at end of file + } + + parameters: + metadataObjectTypeOfOwner: + name: metadataObjectType + in: path + description: The type of the metadata object + required: true + schema: + type: string + enum: + - "METALAKE" + - "CATALOG" + - "SCHEMA" + - "TABLE" + - "FILESET" + - "TOPIC" + - "ROLE" \ No newline at end of file diff --git a/docs/spark-connector/spark-catalog-iceberg.md b/docs/spark-connector/spark-catalog-iceberg.md index e4933a3036f..28f2b55c7e6 100644 --- a/docs/spark-connector/spark-catalog-iceberg.md +++ b/docs/spark-connector/spark-catalog-iceberg.md @@ -111,7 +111,13 @@ Gravitino spark connector will transform below property names which are defined | `io-impl` | `io-impl` | The io implementation for `FileIO` in Iceberg. | 0.6.0-incubating | | `s3-endpoint` | `s3.endpoint` | An alternative endpoint of the S3 service, This could be used for S3FileIO with any s3-compatible object storage service that has a different endpoint, or access a private S3 endpoint in a virtual private cloud. | 0.6.0-incubating | | `s3-region` | `client.region` | The region of the S3 service, like `us-west-2`. | 0.6.0-incubating | +| `s3-access-key-id` | `s3.access-key-id` | The static access key ID used to access S3 data. | 0.8.0-incubating | +| `s3-secret-access-key` | `s3.secret-access-key` | The static secret access key used to access S3 data. | 0.8.0-incubating | | `oss-endpoint` | `oss.endpoint` | The endpoint of Aliyun OSS service. | 0.7.0-incubating | +| `oss-access-key-id` | `client.access-key-id` | The static access key ID used to access OSS data. | 0.8.0-incubating | +| `oss-secret-access-key` | `client.access-key-secret` | The static secret access key used to access OSS data. | 0.8.0-incubating | +| `azure-storage-account-name` | `adls.auth.shared-key.account.name` | The static storage account name used to access ADLS data. | 0.8.0-incubating | +| `azure-storage-account-key` | `adls.auth.shared-key.account.key` | The static storage account key used to access ADLS data.. | 0.8.0-incubating | Gravitino catalog property names with the prefix `spark.bypass.` are passed to Spark Iceberg connector. For example, using `spark.bypass.clients` to pass the `clients` to the Spark Iceberg connector. @@ -121,17 +127,23 @@ Iceberg catalog property `cache-enabled` is setting to `false` internally and no ## Storage +Spark connector could convert storage properties in the Gravitino catalog to Spark Iceberg connector automatically, No extra configuration is needed for `S3`, `ADLS`, `OSS`, `GCS`. + ### S3 -You need to add s3 secret to the Spark configuration using `spark.sql.catalog.${iceberg_catalog_name}.s3.access-key-id` and `spark.sql.catalog.${iceberg_catalog_name}.s3.secret-access-key`. Additionally, download the [Iceberg AWS bundle](https://mvnrepository.com/artifact/org.apache.iceberg/iceberg-aws-bundle) and place it in the classpath of Spark. +Please downloading the [Iceberg AWS bundle](https://mvnrepository.com/artifact/org.apache.iceberg/iceberg-aws-bundle) and place it in the classpath of Spark. ### OSS -You need to add OSS secret key to the Spark configuration using `spark.sql.catalog.${iceberg_catalog_name}.client.access-key-id` and `spark.sql.catalog.${iceberg_catalog_name}.client.access-key-secret`. Additionally, download the [Aliyun OSS SDK](https://gosspublic.alicdn.com/sdks/java/aliyun_java_sdk_3.10.2.zip) and copy `aliyun-sdk-oss-3.10.2.jar`, `hamcrest-core-1.1.jar`, `jdom2-2.0.6.jar` in the classpath of Spark. +Please downloading the [Aliyun OSS SDK](https://gosspublic.alicdn.com/sdks/java/aliyun_java_sdk_3.10.2.zip) and copy `aliyun-sdk-oss-3.10.2.jar`, `hamcrest-core-1.1.jar`, `jdom2-2.0.6.jar` in the classpath of Spark. ### GCS -No extra configuration is needed. Please make sure the credential file is accessible by Spark, like using `export GOOGLE_APPLICATION_CREDENTIALS=/xx/application_default_credentials.json`, and download [Iceberg GCP bundle](https://mvnrepository.com/artifact/org.apache.iceberg/iceberg-gcp-bundle) and place it to the classpath of Spark. +Please make sure the credential file is accessible by Spark, like using `export GOOGLE_APPLICATION_CREDENTIALS=/xx/application_default_credentials.json`, and download [Iceberg GCP bundle](https://mvnrepository.com/artifact/org.apache.iceberg/iceberg-gcp-bundle) and place it to the classpath of Spark. + +### ADLS + +Please downloading the [Iceberg Azure bundle](https://mvnrepository.com/artifact/org.apache.iceberg/iceberg-azure-bundle) and place it in the classpath of Spark. ### Other storage diff --git a/iceberg/iceberg-common/src/main/java/org/apache/gravitino/iceberg/common/IcebergConfig.java b/iceberg/iceberg-common/src/main/java/org/apache/gravitino/iceberg/common/IcebergConfig.java index 2e7eb74e2f1..60a7491b854 100644 --- a/iceberg/iceberg-common/src/main/java/org/apache/gravitino/iceberg/common/IcebergConfig.java +++ b/iceberg/iceberg-common/src/main/java/org/apache/gravitino/iceberg/common/IcebergConfig.java @@ -65,7 +65,6 @@ public class IcebergConfig extends Config implements OverwriteDefaultConfig { .doc("Warehouse directory of catalog") .version(ConfigConstants.VERSION_0_2_0) .stringConf() - .checkValue(StringUtils::isNotBlank, ConfigConstants.NOT_BLANK_ERROR_MSG) .create(); public static final ConfigEntry CATALOG_URI = diff --git a/iceberg/iceberg-common/src/main/java/org/apache/gravitino/iceberg/common/ops/IcebergCatalogWrapper.java b/iceberg/iceberg-common/src/main/java/org/apache/gravitino/iceberg/common/ops/IcebergCatalogWrapper.java index 05c9ee2a1eb..0ed62b26f7f 100644 --- a/iceberg/iceberg-common/src/main/java/org/apache/gravitino/iceberg/common/ops/IcebergCatalogWrapper.java +++ b/iceberg/iceberg-common/src/main/java/org/apache/gravitino/iceberg/common/ops/IcebergCatalogWrapper.java @@ -29,6 +29,7 @@ import java.util.function.Supplier; import lombok.Getter; import lombok.Setter; +import org.apache.commons.lang3.StringUtils; import org.apache.gravitino.catalog.lakehouse.iceberg.IcebergConstants; import org.apache.gravitino.iceberg.common.IcebergCatalogBackend; import org.apache.gravitino.iceberg.common.IcebergConfig; @@ -82,9 +83,14 @@ public IcebergCatalogWrapper(IcebergConfig icebergConfig) { this.catalogBackend = IcebergCatalogBackend.valueOf( icebergConfig.get(IcebergConfig.CATALOG_BACKEND).toUpperCase(Locale.ROOT)); - if (!IcebergCatalogBackend.MEMORY.equals(catalogBackend)) { + if (!IcebergCatalogBackend.MEMORY.equals(catalogBackend) + && !IcebergCatalogBackend.REST.equals(catalogBackend)) { // check whether IcebergConfig.CATALOG_WAREHOUSE exists - icebergConfig.get(IcebergConfig.CATALOG_WAREHOUSE); + if (StringUtils.isBlank(icebergConfig.get(IcebergConfig.CATALOG_WAREHOUSE))) { + throw new IllegalArgumentException("The 'warehouse' parameter must have a value."); + } + } + if (!IcebergCatalogBackend.MEMORY.equals(catalogBackend)) { this.catalogUri = icebergConfig.get(IcebergConfig.CATALOG_URI); } this.catalog = IcebergCatalogUtil.loadCatalogBackend(catalogBackend, icebergConfig); diff --git a/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTADLSIT.java b/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTADLSTokenIT.java similarity index 92% rename from iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTADLSIT.java rename to iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTADLSTokenIT.java index 570298d050b..b16d504e1ea 100644 --- a/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTADLSIT.java +++ b/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTADLSTokenIT.java @@ -36,7 +36,7 @@ @SuppressWarnings("FormatStringAnnotation") @EnabledIfEnvironmentVariable(named = "GRAVITINO_TEST_CLOUD_IT", matches = "true") -public class IcebergRESTADLSIT extends IcebergRESTJdbcCatalogIT { +public class IcebergRESTADLSTokenIT extends IcebergRESTJdbcCatalogIT { private String storageAccountName; private String storageAccountKey; @@ -49,13 +49,14 @@ public class IcebergRESTADLSIT extends IcebergRESTJdbcCatalogIT { void initEnv() { this.storageAccountName = System.getenv() - .getOrDefault("GRAVITINO_ADLS_STORAGE_ACCOUNT_NAME", "{STORAGE_ACCOUNT_NAME}"); + .getOrDefault("GRAVITINO_AZURE_STORAGE_ACCOUNT_NAME", "{STORAGE_ACCOUNT_NAME}"); this.storageAccountKey = - System.getenv().getOrDefault("GRAVITINO_ADLS_STORAGE_ACCOUNT_KEY", "{STORAGE_ACCOUNT_KEY}"); - this.tenantId = System.getenv().getOrDefault("GRAVITINO_ADLS_TENANT_ID", "{TENANT_ID}"); - this.clientId = System.getenv().getOrDefault("GRAVITINO_ADLS_CLIENT_ID", "{CLIENT_ID}"); + System.getenv() + .getOrDefault("GRAVITINO_AZURE_STORAGE_ACCOUNT_KEY", "{STORAGE_ACCOUNT_KEY}"); + this.tenantId = System.getenv().getOrDefault("GRAVITINO_AZURE_TENANT_ID", "{TENANT_ID}"); + this.clientId = System.getenv().getOrDefault("GRAVITINO_AZURE_CLIENT_ID", "{CLIENT_ID}"); this.clientSecret = - System.getenv().getOrDefault("GRAVITINO_ADLS_CLIENT_SECRET", "{CLIENT_SECRET}"); + System.getenv().getOrDefault("GRAVITINO_AZURE_CLIENT_SECRET", "{CLIENT_SECRET}"); this.warehousePath = String.format( "abfss://%s@%s.dfs.core.windows.net/data/test", diff --git a/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTAzureAccountKeyIT.java b/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTAzureAccountKeyIT.java new file mode 100644 index 00000000000..42709162aaa --- /dev/null +++ b/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTAzureAccountKeyIT.java @@ -0,0 +1,117 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.gravitino.iceberg.integration.test; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import org.apache.gravitino.catalog.lakehouse.iceberg.IcebergConstants; +import org.apache.gravitino.credential.CredentialConstants; +import org.apache.gravitino.iceberg.common.IcebergConfig; +import org.apache.gravitino.integration.test.util.BaseIT; +import org.apache.gravitino.integration.test.util.DownloaderUtils; +import org.apache.gravitino.integration.test.util.ITUtils; +import org.apache.gravitino.storage.AzureProperties; +import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable; + +@SuppressWarnings("FormatStringAnnotation") +@EnabledIfEnvironmentVariable(named = "GRAVITINO_TEST_CLOUD_IT", matches = "true") +public class IcebergRESTAzureAccountKeyIT extends IcebergRESTJdbcCatalogIT { + + private String storageAccountName; + private String storageAccountKey; + private String warehousePath; + + @Override + void initEnv() { + this.storageAccountName = + System.getenv() + .getOrDefault("GRAVITINO_AZURE_STORAGE_ACCOUNT_NAME", "{STORAGE_ACCOUNT_NAME}"); + this.storageAccountKey = + System.getenv() + .getOrDefault("GRAVITINO_AZURE_STORAGE_ACCOUNT_KEY", "{STORAGE_ACCOUNT_KEY}"); + this.warehousePath = + String.format( + "abfss://%s@%s.dfs.core.windows.net/data/test", + System.getenv().getOrDefault("GRAVITINO_ADLS_CONTAINER", "{ADLS_CONTAINER}"), + storageAccountName); + + if (ITUtils.isEmbedded()) { + return; + } + try { + downloadIcebergAzureBundleJar(); + } catch (IOException e) { + LOG.warn("Download Iceberg Azure bundle jar failed,", e); + throw new RuntimeException(e); + } + copyAzureBundleJar(); + } + + @Override + public Map getCatalogConfig() { + HashMap m = new HashMap(); + m.putAll(getCatalogJdbcConfig()); + m.putAll(getADLSConfig()); + return m; + } + + public boolean supportsCredentialVending() { + return true; + } + + private Map getADLSConfig() { + Map configMap = new HashMap(); + + configMap.put( + IcebergConfig.ICEBERG_CONFIG_PREFIX + CredentialConstants.CREDENTIAL_PROVIDER_TYPE, + CredentialConstants.AZURE_ACCOUNT_KEY_CREDENTIAL_PROVIDER_TYPE); + configMap.put( + IcebergConfig.ICEBERG_CONFIG_PREFIX + AzureProperties.GRAVITINO_AZURE_STORAGE_ACCOUNT_NAME, + storageAccountName); + configMap.put( + IcebergConfig.ICEBERG_CONFIG_PREFIX + AzureProperties.GRAVITINO_AZURE_STORAGE_ACCOUNT_KEY, + storageAccountKey); + + configMap.put( + IcebergConfig.ICEBERG_CONFIG_PREFIX + IcebergConstants.IO_IMPL, + "org.apache.iceberg.azure.adlsv2.ADLSFileIO"); + configMap.put(IcebergConfig.ICEBERG_CONFIG_PREFIX + IcebergConstants.WAREHOUSE, warehousePath); + + return configMap; + } + + private void downloadIcebergAzureBundleJar() throws IOException { + String icebergBundleJarName = "iceberg-azure-bundle-1.5.2.jar"; + String icebergBundleJarUri = + "https://repo1.maven.org/maven2/org/apache/iceberg/" + + "iceberg-azure-bundle/1.5.2/" + + icebergBundleJarName; + String gravitinoHome = System.getenv("GRAVITINO_HOME"); + String targetDir = String.format("%s/iceberg-rest-server/libs/", gravitinoHome); + DownloaderUtils.downloadFile(icebergBundleJarUri, targetDir); + } + + private void copyAzureBundleJar() { + String gravitinoHome = System.getenv("GRAVITINO_HOME"); + String targetDir = String.format("%s/iceberg-rest-server/libs/", gravitinoHome); + BaseIT.copyBundleJarsToDirectory("azure-bundle", targetDir); + } +} diff --git a/integration-test-common/src/test/java/org/apache/gravitino/integration/test/container/ContainerSuite.java b/integration-test-common/src/test/java/org/apache/gravitino/integration/test/container/ContainerSuite.java index 5745cc6d08f..d2a5ee6152b 100644 --- a/integration-test-common/src/test/java/org/apache/gravitino/integration/test/container/ContainerSuite.java +++ b/integration-test-common/src/test/java/org/apache/gravitino/integration/test/container/ContainerSuite.java @@ -86,22 +86,24 @@ public class ContainerSuite implements Closeable { protected static final CloseableGroup closer = CloseableGroup.create(); private static void initIfNecessary() { - if (initialized) { - return; - } - - try { - // Check if docker is available and you should never close the global DockerClient! - DockerClient dockerClient = DockerClientFactory.instance().client(); - Info info = dockerClient.infoCmd().exec(); - LOG.info("Docker info: {}", info); - - if ("true".equalsIgnoreCase(System.getenv("NEED_CREATE_DOCKER_NETWORK"))) { - network = createDockerNetwork(); + if (!initialized) { + synchronized (ContainerSuite.class) { + if (!initialized) { + try { + // Check if docker is available and you should never close the global DockerClient! + DockerClient dockerClient = DockerClientFactory.instance().client(); + Info info = dockerClient.infoCmd().exec(); + LOG.info("Docker info: {}", info); + + if ("true".equalsIgnoreCase(System.getenv("NEED_CREATE_DOCKER_NETWORK"))) { + network = createDockerNetwork(); + } + initialized = true; + } catch (Exception e) { + throw new RuntimeException("Failed to initialize ContainerSuite", e); + } + } } - initialized = true; - } catch (Exception e) { - throw new RuntimeException("Failed to initialize ContainerSuite", e); } } diff --git a/settings.gradle.kts b/settings.gradle.kts index 150acdb00ce..b3eb56578aa 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -57,7 +57,7 @@ if (gradle.startParameter.projectProperties["enableFuse"]?.toBoolean() == true) } include("iceberg:iceberg-common") include("iceberg:iceberg-rest-server") -include("authorizations:authorization-ranger") +include("authorizations:authorization-ranger", "authorizations:authorization-jdbc") include("trino-connector:trino-connector", "trino-connector:integration-test") include("spark-connector:spark-common") // kyuubi hive connector doesn't support 2.13 for Spark3.3