diff --git a/catalogs/catalog-hadoop/build.gradle.kts b/catalogs/catalog-hadoop/build.gradle.kts index 8afcf6a23e3..ccdc0519683 100644 --- a/catalogs/catalog-hadoop/build.gradle.kts +++ b/catalogs/catalog-hadoop/build.gradle.kts @@ -99,7 +99,7 @@ tasks.test { doFirst { environment("GRAVITINO_CI_HIVE_DOCKER_IMAGE", "datastrato/gravitino-ci-hive:0.1.12") - environment("GRAVITINO_CI_KERBEROS_HIVE_DOCKER_IMAGE", "datastrato/gravitino-ci-kerberos-hive:0.1.0") + environment("GRAVITINO_CI_KERBEROS_HIVE_DOCKER_IMAGE", "datastrato/gravitino-ci-kerberos-hive:0.1.2") } val init = project.extra.get("initIntegrationTest") as (Test) -> Unit diff --git a/catalogs/catalog-hive/build.gradle.kts b/catalogs/catalog-hive/build.gradle.kts index 9baa2abafac..65a36cd6b37 100644 --- a/catalogs/catalog-hive/build.gradle.kts +++ b/catalogs/catalog-hive/build.gradle.kts @@ -165,7 +165,7 @@ tasks.test { doFirst { environment("GRAVITINO_CI_HIVE_DOCKER_IMAGE", "datastrato/gravitino-ci-hive:0.1.12") - environment("GRAVITINO_CI_KERBEROS_HIVE_DOCKER_IMAGE", "datastrato/gravitino-ci-kerberos-hive:0.1.1") + environment("GRAVITINO_CI_KERBEROS_HIVE_DOCKER_IMAGE", "datastrato/gravitino-ci-kerberos-hive:0.1.2") } val init = project.extra.get("initIntegrationTest") as (Test) -> Unit diff --git a/dev/docker/kerberos-hive/start.sh b/dev/docker/kerberos-hive/start.sh index e937a12309b..869d8603913 100644 --- a/dev/docker/kerberos-hive/start.sh +++ b/dev/docker/kerberos-hive/start.sh @@ -7,8 +7,10 @@ # start ssh HOSTNAME=`hostname` service ssh start -ssh-keyscan localhost > /root/.ssh/known_hosts +ssh-keyscan ${HOSTNAME} >> /root/.ssh/known_hosts +ssh-keyscan localhost >> /root/.ssh/known_hosts ssh-keyscan 0.0.0.0 >> /root/.ssh/known_hosts +ssh-keyscan 127.0.0.1 >> /root/.ssh/known_hosts # init the Kerberos database echo -e "${PASS}\n${PASS}" | kdb5_util create -s @@ -65,6 +67,33 @@ ${HADOOP_HOME}/sbin/hadoop-daemon.sh start namenode echo "Starting DataNode..." ${HADOOP_HOME}/sbin/start-secure-dns.sh +sleep 5 + +# Check if the DataNode is running +ps -ef | grep DataNode | grep -v "color=auto" +if [[ $? -ne 0 ]]; then + echo "DataNode failed to start, please check the logs" + exit 1 +fi + +retry_times=0 +ready=0 +while [[ ${retry_times} -lt 10 ]]; do + hdfs_ready=$(hdfs dfsadmin -report | grep "Live datanodes" | awk '{print $3}') + if [[ ${hdfs_ready} == "(1):" ]]; then + echo "HDFS is ready, retry_times = ${retry_times}" + let "ready=0" + break + fi + retry_times=$((retry_times+1)) +done + +if [[ ${ready} -ne 0 ]]; then + echo "HDFS is not ready" + cat ${HADOOP_HOME}/bin/logs/hadoop-root-datanode-*.log + exit 1 +fi + # start mysql and create databases/users for hive chown -R mysql:mysql /var/lib/mysql