diff --git a/spark-bigquery-connector-common/src/test/java/com/google/cloud/spark/bigquery/acceptance/DataprocAcceptanceTestBase.java b/spark-bigquery-connector-common/src/test/java/com/google/cloud/spark/bigquery/acceptance/DataprocAcceptanceTestBase.java index b5644133f..c3f90f145 100644 --- a/spark-bigquery-connector-common/src/test/java/com/google/cloud/spark/bigquery/acceptance/DataprocAcceptanceTestBase.java +++ b/spark-bigquery-connector-common/src/test/java/com/google/cloud/spark/bigquery/acceptance/DataprocAcceptanceTestBase.java @@ -45,7 +45,7 @@ import org.junit.Test; public class DataprocAcceptanceTestBase { - + private static final long TIMEOUT_IN_SECONDS = 180; protected static final ClusterProperty DISABLE_CONSCRYPT = ClusterProperty.of("dataproc:dataproc.conscrypt.provider.enable", "false", "nc"); protected static final ImmutableList DISABLE_CONSCRYPT_LIST = @@ -184,8 +184,7 @@ public void testRead() throws Exception { testName, "read_shakespeare.py", null, - Arrays.asList(context.getResultsDirUri(testName)), - 120); + Arrays.asList(context.getResultsDirUri(testName))); assertThat(result.getStatus().getState()).isEqualTo(JobStatus.State.DONE); String output = AcceptanceTestUtils.getCsv(context.getResultsDirUri(testName)); assertThat(output.trim()).isEqualTo("spark,10"); @@ -213,8 +212,7 @@ public void writeStream() throws Exception { context.testBaseGcsDir + "/" + testName + "/json/", context.bqDataset, context.bqStreamTable, - AcceptanceTestUtils.BUCKET), - 120); + AcceptanceTestUtils.BUCKET)); assertThat(result.getStatus().getState()).isEqualTo(JobStatus.State.DONE); int numOfRows = getNumOfRowsOfBqTable(context.bqDataset, context.bqStreamTable); @@ -245,8 +243,7 @@ public void testBigNumeric() throws Exception { testName, "big_numeric.py", zipFileUri, - Arrays.asList(tableName, context.getResultsDirUri(testName)), - 120); + Arrays.asList(tableName, context.getResultsDirUri(testName))); assertThat(result.getStatus().getState()).isEqualTo(JobStatus.State.DONE); String output = AcceptanceTestUtils.getCsv(context.getResultsDirUri(testName)); @@ -254,8 +251,7 @@ public void testBigNumeric() throws Exception { } private Job createAndRunPythonJob( - String testName, String pythonFile, String pythonZipUri, List args, long duration) - throws Exception { + String testName, String pythonFile, String pythonZipUri, List args) throws Exception { AcceptanceTestUtils.uploadToGcs( getClass().getResourceAsStream("/acceptance/" + pythonFile), context.getScriptUri(testName), @@ -267,7 +263,7 @@ private Job createAndRunPythonJob( .setPysparkJob(createPySparkJobBuilder(testName, pythonZipUri, args)) .build(); - return runAndWait(job, Duration.ofSeconds(duration)); + return runAndWait(job, Duration.ofSeconds(TIMEOUT_IN_SECONDS)); } private PySparkJob.Builder createPySparkJobBuilder(