Skip to content

Commit

Permalink
[test](vault) Add more regression test about storage vault
Browse files Browse the repository at this point in the history
* Add case sensitive test
* Add kerberos test
  • Loading branch information
SWJTU-ZhangLei committed Jan 23, 2025
1 parent 72cffc5 commit 66a209c
Show file tree
Hide file tree
Showing 7 changed files with 602 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,8 @@
import org.apache.doris.mysql.privilege.PrivPredicate;
import org.apache.doris.qe.ConnectContext;

import com.google.common.base.Strings;

import java.util.Map;

// CREATE [EXTERNAL] RESOURCE resource_name
Expand Down Expand Up @@ -69,8 +71,13 @@ public ResourceType getResourceType() {
}

public void analyzeResourceType() throws UserException {
String type = properties.get(TYPE);
if (type == null) {
String type = null;
for (Map.Entry<String, String> property : properties.entrySet()) {
if (property.getKey().equalsIgnoreCase(TYPE)) {
type = property.getValue();
}
}
if (Strings.isNullOrEmpty(type)) {
throw new AnalysisException("Resource type can't be null");
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@
import org.apache.doris.mysql.privilege.PrivPredicate;
import org.apache.doris.qe.ConnectContext;

import com.google.common.base.Strings;

import java.util.Map;

// CREATE STORAGE VAULT vault_name
Expand Down Expand Up @@ -119,10 +121,17 @@ public void analyze(Analyzer analyzer) throws UserException {
if (properties == null || properties.isEmpty()) {
throw new AnalysisException("Storage Vault properties can't be null");
}
String type = properties.get(TYPE);
if (type == null) {

String type = null;
for (Map.Entry<String, String> property : properties.entrySet()) {
if (property.getKey().equalsIgnoreCase(TYPE)) {
type = property.getValue();
}
}
if (Strings.isNullOrEmpty(type)) {
throw new AnalysisException("Storage Vault type can't be null");
}

final String pathVersionString = properties.get(PATH_VERSION);
if (pathVersionString != null) {
this.pathVersion = Integer.parseInt(pathVersionString);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,9 @@
import org.apache.doris.cloud.proto.Cloud;
import org.apache.doris.common.DdlException;
import org.apache.doris.common.security.authentication.AuthenticationConfig;
import org.apache.doris.datasource.property.constants.S3Properties;

import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
import com.google.gson.annotations.SerializedName;
Expand Down Expand Up @@ -109,6 +111,10 @@ public static Cloud.HdfsVaultInfo generateHdfsParam(Map<String, String> properti
} else if (property.getKey().equalsIgnoreCase(VAULT_NAME)) {
continue;
} else {
Preconditions.checkArgument(!property.getKey().toLowerCase().contains(S3Properties.S3_PREFIX),
"Invalid argument %s", property.getKey());
Preconditions.checkArgument(!property.getKey().toLowerCase().contains(S3Properties.PROVIDER),
"Invalid argument %s", property.getKey());
if (!nonHdfsConfPropertyKeys.contains(property.getKey().toLowerCase())) {
Cloud.HdfsBuildConf.HdfsConfKVPair.Builder conf = Cloud.HdfsBuildConf.HdfsConfKVPair.newBuilder();
conf.setKey(property.getKey());
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,228 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.

suite("test_create_vault_with_case_sensitive", "nonConcurrent") {
def suiteName = name;
if (!isCloudMode()) {
logger.info("skip ${name} case, because not cloud mode")
return
}

if (!enableStoragevault()) {
logger.info("skip ${name} case, because storage vault not enabled")
return
}

def randomStr = UUID.randomUUID().toString().replace("-", "")
def s3VaultName = "s3_" + randomStr
def hdfsVaultName = "hdfs_" + randomStr

// hdfs vault case
expectExceptionLike({
sql """
CREATE STORAGE VAULT ${hdfsVaultName}
PROPERTIES (
"type" = "aaaa",
"fs.defaultFS"="${getHmsHdfsFs()}",
"path_prefix" = "${hdfsVaultName}",
"hadoop.username" = "${getHmsUser()}"
);
"""
}, "Unsupported Storage Vault type")

expectExceptionLike({
sql """
CREATE STORAGE VAULT ${hdfsVaultName}
PROPERTIES (
"type" = "s3",
"fs.defaultFS"="${getHmsHdfsFs()}",
"path_prefix" = "${hdfsVaultName}",
"hadoop.username" = "${getHmsUser()}"
);
"""
}, "Missing [s3.endpoint] in properties")

expectExceptionLike({
sql """
CREATE STORAGE VAULT ${hdfsVaultName}
PROPERTIES (
"type" = "S3",
"fs.defaultFS"="${getHmsHdfsFs()}",
"path_prefix" = "${hdfsVaultName}",
"hadoop.username" = "${getHmsUser()}"
);
"""
}, "Missing [s3.endpoint] in properties")

sql """
CREATE STORAGE VAULT ${hdfsVaultName}
PROPERTIES (
"type" = "hdfs",
"fs.defaultFS"="${getHmsHdfsFs()}",
"path_prefix" = "${hdfsVaultName}",
"hadoop.username" = "${getHmsUser()}"
);
"""

sql """
CREATE STORAGE VAULT ${hdfsVaultName.toUpperCase()}
PROPERTIES (
"TYPE" = "HDFS",
"FS.DEFAULTFS"="${getHmsHdfsFs()}",
"PATH_PREFIX" = "${hdfsVaultName.toUpperCase()}",
"HADOOP.USERNAME" = "${getHmsUser()}"
);
"""

// s3 vault case
expectExceptionLike({
sql """
CREATE STORAGE VAULT ${s3VaultName}
PROPERTIES (
"type" = "bbbb",
"s3.endpoint"="${getS3Endpoint()}",
"s3.region" = "${getS3Region()}",
"s3.access_key" = "${getS3AK()}",
"s3.secret_key" = "${getS3SK()}",
"s3.root.path" = "${s3VaultName}",
"s3.bucket" = "${getS3BucketName()}",
"s3.external_endpoint" = "",
"provider" = "${getS3Provider()}",
"use_path_style" = "false"
);
"""
}, "Unsupported Storage Vault type")

expectExceptionLike({
sql """
CREATE STORAGE VAULT ${s3VaultName}
PROPERTIES (
"type" = "hdfs",
"FS.DEFAULTFS"="${getHmsHdfsFs()}",
"s3.endpoint"="${getS3Endpoint()}",
"s3.region" = "${getS3Region()}",
"s3.access_key" = "${getS3AK()}",
"s3.secret_key" = "${getS3SK()}",
"s3.root.path" = "${s3VaultName}",
"s3.bucket" = "${getS3BucketName()}",
"s3.external_endpoint" = "",
"provider" = "${getS3Provider()}",
"use_path_style" = "false"
);
"""
}, "Invalid argument s3.region")

expectExceptionLike({
sql """
CREATE STORAGE VAULT ${s3VaultName}
PROPERTIES (
"type" = "HDFS",
"s3.endpoint"="${getS3Endpoint()}",
"s3.region" = "${getS3Region()}",
"s3.access_key" = "${getS3AK()}",
"s3.secret_key" = "${getS3SK()}",
"s3.root.path" = "${s3VaultName}",
"s3.bucket" = "${getS3BucketName()}",
"s3.external_endpoint" = "",
"provider" = "${getS3Provider()}",
"use_path_style" = "false"
);
"""
}, "Invalid argument s3.region")

sql """
CREATE STORAGE VAULT ${s3VaultName}
PROPERTIES (
"type" = "s3",
"s3.endpoint"="${getS3Endpoint()}",
"s3.region" = "${getS3Region()}",
"s3.access_key" = "${getS3AK()}",
"s3.secret_key" = "${getS3SK()}",
"s3.root.path" = "${s3VaultName}",
"s3.bucket" = "${getS3BucketName()}",
"s3.external_endpoint" = "",
"provider" = "${getS3Provider()}",
"use_path_style" = "false"
);
"""

// S3.xx properties is case sensitive
expectExceptionLike({
sql """
CREATE STORAGE VAULT ${s3VaultName.toUpperCase()}
PROPERTIES (
"TYPE" = "S3",
"S3.ENDPOINT"="${getS3Endpoint()}",
"S3.REGION" = "${getS3Region()}",
"S3.ACCESS_KEY" = "${getS3AK()}",
"S3.SECRET_KEY" = "${getS3SK()}",
"S3.ROOT.PATH" = "${s3VaultName}",
"S3.BUCKET" = "${getS3BucketName()}",
"S3.EXTERNAL_ENDPOINT" = "",
"PROVIDER" = "${getS3Provider()}",
"USE_PATH_STYLE" = "false"
);
"""
}, "Missing [s3.endpoint] in properties")

sql """
CREATE STORAGE VAULT ${s3VaultName.toUpperCase()}
PROPERTIES (
"TYPE" = "S3",
"s3.endpoint"="${getS3Endpoint()}",
"s3.region" = "${getS3Region()}",
"s3.access_key" = "${getS3AK()}",
"s3.secret_key" = "${getS3SK()}",
"s3.root.path" = "${s3VaultName}",
"s3.bucket" = "${getS3BucketName()}",
"s3.external_endpoint" = "",
"provider" = "${getS3Provider()}",
"use_path_style" = "false"
);
"""

def vaultInfos = try_sql """SHOW STORAGE VAULTS"""

boolean hdfsVaultLowerExist = false;
boolean hdfsVaultUpperExist = false;

boolean s3VaultLowerExist = false;
boolean s3VaultUpperExist = false;

for (int i = 0; i < vaultInfos.size(); i++) {
logger.info("vault info: ${vaultInfos[i]}")
if (vaultInfos[i][0].equals(hdfsVaultName)) {
hdfsVaultLowerExist = true
}

if (vaultInfos[i][0].equals(hdfsVaultName.toUpperCase())) {
hdfsVaultUpperExist = true
}

if (vaultInfos[i][0].equals(s3VaultName)) {
s3VaultLowerExist = true
}

if (vaultInfos[i][0].equals(s3VaultName.toUpperCase())) {
s3VaultUpperExist = true
}
}
assertTrue(hdfsVaultLowerExist)
assertTrue(hdfsVaultUpperExist)
assertTrue(s3VaultLowerExist)
assertTrue(s3VaultUpperExist)
}
Loading

0 comments on commit 66a209c

Please sign in to comment.