Skip to content

Commit

Permalink
Test Hive and Iceberg with Apache Hive 3.1.2
Browse files Browse the repository at this point in the history
Test Hive and Iceberg connectors with vanilla Apache Hive 3.1.2
(i.e. without HDP's modifications).

Since HiveServer2 startup in currently unreliable in hive3.1-hive testing image,
this includes only tests that do not require HiveServer2.
  • Loading branch information
homar authored and findepi committed Sep 23, 2021
1 parent e38dbf0 commit fd878a6
Show file tree
Hide file tree
Showing 15 changed files with 114 additions and 27 deletions.
5 changes: 5 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,7 @@ jobs:
- config-empty
- config-hdp3
# TODO: config-cdh5
# TODO: config-apache-hive3
timeout-minutes: 60
steps:
- uses: actions/checkout@v2
Expand Down Expand Up @@ -385,6 +386,7 @@ jobs:
- default
- hdp3
- cdh5
# TODO: config-apache-hive3
suite:
- suite-1
- suite-2
Expand Down Expand Up @@ -436,6 +438,9 @@ jobs:
# this suite is not meant to be run with different configs
- config: default
suite: suite-compatibility
# this suite is designed specifically for apache-hive3. TODO remove the suite once we can run all regular tests on apache-hive3.
- config: apache-hive3
suite: suite-hms-only
# PT Launcher's timeout defaults to 2h, account for preparation steps (compilation) and add some margin
timeout-minutes: 140
steps:
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
export HADOOP_BASE_IMAGE="ghcr.io/trinodb/testing/hive3.1-hive"
export TEMPTO_ENVIRONMENT_CONFIG_FILE="/docker/presto-product-tests/conf/tempto/tempto-configuration-for-hive3.yaml,/docker/presto-product-tests/conf/tempto/tempto-configuration-for-hms-only.yaml"
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@

public final class EnvironmentDefaults
{
public static final String DOCKER_IMAGES_VERSION = "41";
public static final String DOCKER_IMAGES_VERSION = "43";
public static final String HADOOP_BASE_IMAGE = "ghcr.io/trinodb/testing/hdp2.6-hive";
public static final String HADOOP_IMAGES_VERSION = DOCKER_IMAGES_VERSION;
public static final String TEMPTO_ENVIRONMENT_CONFIG = "/dev/null";
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.tests.product.launcher.env.configs;

public class ConfigApacheHive3
extends ConfigDefault
{
@Override
public String getHadoopBaseImage()
{
return "ghcr.io/trinodb/testing/hive3.1-hive";
}

@Override
public String getTemptoEnvironmentConfigFile()
{
return "/docker/presto-product-tests/conf/tempto/tempto-configuration-for-hive3.yaml,/docker/presto-product-tests/conf/tempto/tempto-configuration-for-hms-only.yaml";
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.tests.product.launcher.suite.suites;

import com.google.common.collect.ImmutableList;
import io.trino.tests.product.launcher.env.EnvironmentConfig;
import io.trino.tests.product.launcher.env.environment.EnvSinglenode;
import io.trino.tests.product.launcher.suite.Suite;
import io.trino.tests.product.launcher.suite.SuiteTestRun;

import java.util.List;

import static io.trino.tests.product.launcher.suite.SuiteTestRun.testOnEnvironment;

public class SuiteHmsOnly
extends Suite
{
@Override
public List<SuiteTestRun> getTestRuns(EnvironmentConfig config)
{
return ImmutableList.of(
testOnEnvironment(EnvSinglenode.class)
.withGroups("hms_only")
.build());
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
databases:
hive:
# Make hive server configuration invalid to make sure the hms_only tests do not accidentally depend on HS2.
jdbc_url: jdbc:hive2://${databases.hive.host}:12345
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ public final class TestGroups
public static final String JSON_FUNCTIONS = "json_functions";
public static final String STORAGE_FORMATS = "storage_formats";
public static final String STORAGE_FORMATS_DETAILED = "storage_formats_detailed";
public static final String HMS_ONLY = "hms_only";
public static final String PROFILE_SPECIFIC_TESTS = "profile_specific_tests";
public static final String HDFS_IMPERSONATION = "hdfs_impersonation";
public static final String HDFS_NO_IMPERSONATION = "hdfs_no_impersonation";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import static io.trino.tempto.assertions.QueryAssert.Row.row;
import static io.trino.tempto.assertions.QueryAssert.assertThat;
import static io.trino.tempto.query.QueryExecutor.query;
import static io.trino.tests.product.TestGroups.HMS_ONLY;
import static io.trino.tests.product.TestGroups.STORAGE_FORMATS;
import static io.trino.tests.product.hive.HiveProductTest.ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE;
import static io.trino.tests.product.hive.HiveProductTest.ERROR_COMMITTING_WRITE_TO_HIVE_MATCH;
Expand All @@ -34,13 +35,13 @@
public class TestCsv
extends ProductTest
{
@Test(groups = STORAGE_FORMATS)
@Test(groups = {STORAGE_FORMATS, HMS_ONLY})
public void testInsertIntoCsvTable()
{
testInsertIntoCsvTable("storage_formats_test_insert_into_csv", "");
}

@Test(groups = STORAGE_FORMATS)
@Test(groups = {STORAGE_FORMATS, HMS_ONLY})
public void testInsertIntoCsvTableWithCustomProperties()
{
testInsertIntoCsvTable("storage_formats_test_insert_into_csv_with_custom_properties", ", csv_escape = 'e', csv_separator='s', csv_quote='q'");
Expand All @@ -64,14 +65,14 @@ private void testInsertIntoCsvTable(String tableName, String additionalTableProp
query("DROP TABLE " + tableName);
}

@Test(groups = STORAGE_FORMATS)
@Test(groups = {STORAGE_FORMATS, HMS_ONLY})
@Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH)
public void testCreateCsvTableAs()
{
testCreateCsvTableAs("");
}

@Test(groups = STORAGE_FORMATS)
@Test(groups = {STORAGE_FORMATS, HMS_ONLY})
@Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH)
public void testCreateCsvTableAsWithCustomProperties()
{
Expand All @@ -96,14 +97,14 @@ private void testCreateCsvTableAs(String additionalParameters)
query("DROP TABLE " + tableName);
}

@Test(groups = STORAGE_FORMATS)
@Test(groups = {STORAGE_FORMATS, HMS_ONLY})
@Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH)
public void testInsertIntoPartitionedCsvTable()
{
testInsertIntoPartitionedCsvTable("test_partitioned_csv_table", "");
}

@Test(groups = STORAGE_FORMATS)
@Test(groups = {STORAGE_FORMATS, HMS_ONLY})
@Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH)
public void testInsertIntoPartitionedCsvTableWithCustomProperties()
{
Expand All @@ -130,14 +131,14 @@ private void testInsertIntoPartitionedCsvTable(String tableName, String addition
query("DROP TABLE " + tableName);
}

@Test(groups = STORAGE_FORMATS)
@Test(groups = {STORAGE_FORMATS, HMS_ONLY})
@Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH)
public void testCreatePartitionedCsvTableAs()
{
testCreatePartitionedCsvTableAs("storage_formats_test_create_table_as_select_partitioned_csv", "");
}

@Test(groups = STORAGE_FORMATS)
@Test(groups = {STORAGE_FORMATS, HMS_ONLY})
@Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH)
public void testCreatePartitionedCsvTableAsWithCustomParamters()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
import static io.trino.tempto.assertions.QueryAssert.Row.row;
import static io.trino.tempto.assertions.QueryAssert.assertQueryFailure;
import static io.trino.tempto.assertions.QueryAssert.assertThat;
import static io.trino.tests.product.TestGroups.HMS_ONLY;
import static io.trino.tests.product.TestGroups.STORAGE_FORMATS;
import static io.trino.tests.product.utils.QueryExecutors.onTrino;
import static java.util.Objects.requireNonNull;
Expand All @@ -54,7 +55,7 @@ public void tearDown()
}

// Note: this test is run on various Hive versions. Hive before 3 did not have `sys` schema, but it does not hurt to run the test there too.
@Test(groups = STORAGE_FORMATS)
@Test(groups = {STORAGE_FORMATS, HMS_ONLY})
public void testSysSchemaFilteredOut()
{
// SHOW SCHEMAS
Expand Down Expand Up @@ -125,7 +126,7 @@ public void testSysSchemaFilteredOut()
}

// Note: this test is run on various Hive versions. Hive before 3 did not have `information_schema` schema, but it does not hurt to run the test there too.
@Test(groups = STORAGE_FORMATS)
@Test(groups = {STORAGE_FORMATS, HMS_ONLY})
public void testHiveInformationSchemaFilteredOut()
{
List<String> allInformationSchemaTables = ImmutableList.<String>builder()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@
import static io.trino.tempto.query.QueryExecutor.defaultQueryExecutor;
import static io.trino.tempto.query.QueryExecutor.param;
import static io.trino.tempto.query.QueryExecutor.query;
import static io.trino.tests.product.TestGroups.HMS_ONLY;
import static io.trino.tests.product.TestGroups.STORAGE_FORMATS;
import static io.trino.tests.product.TestGroups.STORAGE_FORMATS_DETAILED;
import static io.trino.tests.product.hive.HiveProductTest.ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE;
Expand Down Expand Up @@ -290,7 +291,7 @@ public void verifyDataProviderCompleteness()
.isEqualTo(allFormatsToTest);
}

@Test(dataProvider = "storageFormatsWithConfiguration", groups = STORAGE_FORMATS)
@Test(dataProvider = "storageFormatsWithConfiguration", groups = {STORAGE_FORMATS, HMS_ONLY})
@Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH)
public void testInsertIntoTable(StorageFormat storageFormat)
{
Expand Down Expand Up @@ -335,7 +336,7 @@ public void testInsertIntoTable(StorageFormat storageFormat)
query(format("DROP TABLE %s", tableName));
}

@Test(dataProvider = "storageFormatsWithConfiguration", groups = STORAGE_FORMATS)
@Test(dataProvider = "storageFormatsWithConfiguration", groups = {STORAGE_FORMATS, HMS_ONLY})
@Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH)
public void testCreateTableAs(StorageFormat storageFormat)
{
Expand Down Expand Up @@ -363,7 +364,7 @@ public void testCreateTableAs(StorageFormat storageFormat)
query(format("DROP TABLE %s", tableName));
}

@Test(dataProvider = "storageFormatsWithConfiguration", groups = STORAGE_FORMATS)
@Test(dataProvider = "storageFormatsWithConfiguration", groups = {STORAGE_FORMATS, HMS_ONLY})
@Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH)
public void testInsertIntoPartitionedTable(StorageFormat storageFormat)
{
Expand Down Expand Up @@ -408,7 +409,7 @@ public void testInsertIntoPartitionedTable(StorageFormat storageFormat)
query(format("DROP TABLE %s", tableName));
}

@Test(dataProvider = "storageFormatsWithNullFormat", groups = STORAGE_FORMATS_DETAILED)
@Test(dataProvider = "storageFormatsWithNullFormat", groups = {STORAGE_FORMATS_DETAILED, HMS_ONLY})
@Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH)
public void testInsertAndSelectWithNullFormat(StorageFormat storageFormat)
{
Expand Down Expand Up @@ -437,7 +438,7 @@ public void testInsertAndSelectWithNullFormat(StorageFormat storageFormat)

assertThat(query(format("SELECT * FROM %s", tableName))).containsOnly(storedValues);

onHive().executeQuery(format("DROP TABLE %s", tableName));
query(format("DROP TABLE %s", tableName));
}

@Test(dataProvider = "storageFormatsWithNullFormat", groups = STORAGE_FORMATS_DETAILED)
Expand All @@ -464,7 +465,7 @@ public void testSelectWithNullFormat(StorageFormat storageFormat)
onHive().executeQuery(format("DROP TABLE %s", tableName));
}

@Test(dataProvider = "storageFormatsWithConfiguration", groups = STORAGE_FORMATS)
@Test(dataProvider = "storageFormatsWithConfiguration", groups = {STORAGE_FORMATS, HMS_ONLY})
@Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH)
public void testCreatePartitionedTableAs(StorageFormat storageFormat)
{
Expand Down Expand Up @@ -650,7 +651,7 @@ public void testTimestampCreatedFromHive(StorageFormat storageFormat)
onTrino().executeQuery("DROP TABLE " + tableName);
}

@Test(dataProvider = "storageFormatsWithNanosecondPrecision", groups = STORAGE_FORMATS_DETAILED)
@Test(dataProvider = "storageFormatsWithNanosecondPrecision", groups = {STORAGE_FORMATS_DETAILED, HMS_ONLY})
public void testTimestampCreatedFromTrino(StorageFormat storageFormat)
{
String tableName = createSimpleTimestampTable("timestamps_from_trino", storageFormat);
Expand Down Expand Up @@ -697,7 +698,7 @@ public void testStructTimestampsFromHive(StorageFormat format)
onTrino().executeQuery(format("DROP TABLE %s", tableName));
}

@Test(dataProvider = "storageFormatsWithNanosecondPrecision", groups = STORAGE_FORMATS_DETAILED)
@Test(dataProvider = "storageFormatsWithNanosecondPrecision", groups = {STORAGE_FORMATS_DETAILED, HMS_ONLY})
public void testStructTimestampsFromTrino(StorageFormat format)
{
String tableName = createStructTimestampTable("trino_struct_timestamp", format);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

import static io.trino.tempto.assertions.QueryAssert.Row.row;
import static io.trino.tempto.assertions.QueryAssert.assertThat;
import static io.trino.tests.product.TestGroups.HMS_ONLY;
import static io.trino.tests.product.TestGroups.ICEBERG;
import static io.trino.tests.product.TestGroups.STORAGE_FORMATS;
import static io.trino.tests.product.hive.util.TemporaryHiveTable.randomTableSuffix;
Expand All @@ -41,7 +42,7 @@ public void cleanUp()
onTrino().executeQuery("DROP SCHEMA iceberg.iceberg");
}

@Test(groups = {ICEBERG, STORAGE_FORMATS})
@Test(groups = {ICEBERG, STORAGE_FORMATS, HMS_ONLY})
public void testCreateTable()
{
String tableName = "iceberg.iceberg.test_create_table_" + randomTableSuffix();
Expand All @@ -62,7 +63,7 @@ public void testCreateTable()
}
}

@Test(groups = {ICEBERG, STORAGE_FORMATS})
@Test(groups = {ICEBERG, STORAGE_FORMATS, HMS_ONLY})
public void testCreateTableAsSelect()
{
String tableName = "iceberg.iceberg.test_create_table_as_select_" + randomTableSuffix();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import static com.google.common.collect.Iterators.getOnlyElement;
import static io.trino.tempto.assertions.QueryAssert.Row.row;
import static io.trino.tempto.assertions.QueryAssert.assertThat;
import static io.trino.tests.product.TestGroups.HMS_ONLY;
import static io.trino.tests.product.TestGroups.ICEBERG;
import static io.trino.tests.product.TestGroups.STORAGE_FORMATS;
import static io.trino.tests.product.utils.QueryExecutors.onTrino;
Expand Down Expand Up @@ -75,7 +76,7 @@ public void cleanUp()
onTrino().executeQuery("DROP TABLE IF EXISTS iceberg.default.iceberg_table1");
}

@Test(groups = {ICEBERG, STORAGE_FORMATS})
@Test(groups = {ICEBERG, STORAGE_FORMATS, HMS_ONLY})
public void testTableListing()
{
assertThat(onTrino().executeQuery("SHOW TABLES FROM iceberg.default"))
Expand All @@ -90,7 +91,7 @@ public void testTableListing()
.build());
}

@Test(groups = {ICEBERG, STORAGE_FORMATS})
@Test(groups = {ICEBERG, STORAGE_FORMATS, HMS_ONLY})
public void testColumnListing()
{
assertThat(onTrino().executeQuery(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

import static io.trino.tempto.assertions.QueryAssert.assertQueryFailure;
import static io.trino.tempto.assertions.QueryAssert.assertThat;
import static io.trino.tests.product.TestGroups.HMS_ONLY;
import static io.trino.tests.product.TestGroups.ICEBERG;
import static io.trino.tests.product.TestGroups.STORAGE_FORMATS;
import static io.trino.tests.product.hive.util.TemporaryHiveTable.randomTableSuffix;
Expand All @@ -26,7 +27,7 @@
public class TestIcebergHiveTablesCompatibility
extends ProductTest
{
@Test(groups = {ICEBERG, STORAGE_FORMATS})
@Test(groups = {ICEBERG, STORAGE_FORMATS, HMS_ONLY})
public void testIcebergSelectFromHiveTable()
{
String tableName = "test_iceberg_select_from_hive_" + randomTableSuffix();
Expand All @@ -41,7 +42,7 @@ public void testIcebergSelectFromHiveTable()
onTrino().executeQuery("DROP TABLE hive.default." + tableName);
}

@Test(groups = {ICEBERG, STORAGE_FORMATS})
@Test(groups = {ICEBERG, STORAGE_FORMATS, HMS_ONLY})
public void testHiveSelectFromIcebergTable()
{
String tableName = "test_hive_select_from_iceberg_" + randomTableSuffix();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@

import static io.trino.tempto.assertions.QueryAssert.Row.row;
import static io.trino.tempto.assertions.QueryAssert.assertThat;
import static io.trino.tests.product.TestGroups.HMS_ONLY;
import static io.trino.tests.product.TestGroups.ICEBERG;
import static io.trino.tests.product.TestGroups.STORAGE_FORMATS;
import static io.trino.tests.product.utils.QueryExecutors.onTrino;
Expand All @@ -31,7 +32,7 @@
public class TestIcebergHiveViewsCompatibility
extends ProductTest
{
@Test(groups = {ICEBERG, STORAGE_FORMATS})
@Test(groups = {ICEBERG, STORAGE_FORMATS, HMS_ONLY})
public void testIcebergHiveViewsCompatibility()
{
try {
Expand Down
Loading

0 comments on commit fd878a6

Please sign in to comment.