Skip to content
This repository has been archived by the owner on Mar 27, 2022. It is now read-only.

Commit

Permalink
Add proper escaping for bigtable_env.sh variables.
Browse files Browse the repository at this point in the history
Fix handling of the bigtable-hbase.jar destination.
-------------
Created by MOE: http://code.google.com/p/moe-java
MOE_MIGRATED_REVID=92781791
  • Loading branch information
angusdavis committed May 6, 2015
1 parent 4228e91 commit 50119a5
Showing 1 changed file with 22 additions and 22 deletions.
44 changes: 22 additions & 22 deletions extensions/bigtable/install_hbase_bigtable.sh
Original file line number Diff line number Diff line change
Expand Up @@ -16,65 +16,65 @@ set -o nounset
set -o errexit

# Get the filename out of the full URI.
HBASE_TARBALL=${HBASE_TARBALL_URI##*/}
HBASE_TARBALL="${HBASE_TARBALL_URI##*/}"

# Get the tarball, untar it.
download_bd_resource ${HBASE_TARBALL_URI} /home/hadoop/${HBASE_TARBALL}
download_bd_resource "${HBASE_TARBALL_URI}" "/home/hadoop/${HBASE_TARBALL}"

tar -C /home/hadoop -xzvf /home/hadoop/${HBASE_TARBALL}
mv /home/hadoop/hbase*/ ${HBASE_INSTALL_DIR}
tar -C /home/hadoop -xzvf "/home/hadoop/${HBASE_TARBALL}"
mv /home/hadoop/hbase*/ "${HBASE_INSTALL_DIR}"

mkdir -p ${BIGTABLE_LIB_DIR}
mkdir -p "${BIGTABLE_LIB_DIR}"

# Download the alpn jar. The Alpn jar should be a fully qualified URL.
# download_bd_resource needs a fully qualified file path and not just a
# directory name to put the file in when the file to download starts with
# http://.
ALPN_JAR_NAME=${ALPN_REMOTE_JAR##*/}
ALPN_BOOT_JAR=${BIGTABLE_LIB_DIR}/${ALPN_JAR_NAME}
download_bd_resource ${ALPN_REMOTE_JAR} ${ALPN_BOOT_JAR}
ALPN_JAR_NAME="${ALPN_REMOTE_JAR##*/}"
ALPN_BOOT_JAR="${BIGTABLE_LIB_DIR}/${ALPN_JAR_NAME}"
download_bd_resource "${ALPN_REMOTE_JAR}" "${ALPN_BOOT_JAR}"

# Download the jar contains the Bigtable API and the Bigtable HBase integration.
download_bd_resource ${BIGTABLE_HBASE_JAR} ${BIGTABLE_LIB_DIR}
BIGTABLE_HBASE_JAR_NAME="${BIGTABLE_HBASE_JAR##*/}"
download_bd_resource "${BIGTABLE_HBASE_JAR}" "${BIGTABLE_LIB_DIR}/${BIGTABLE_HBASE_JAR_NAME}"

BIGTABLE_CLASSPATH=`readlink -f ${BIGTABLE_LIB_DIR}/bigtable-hbase-*.jar`

# Set up hbase-site.xml to make sure it can access HDFS.
bdconfig merge_configurations \
--configuration_file ${HBASE_CONF_DIR}/hbase-site.xml \
--configuration_file "${HBASE_CONF_DIR}/hbase-site.xml" \
--source_configuration_file bigtable-hbase-site-template.xml \
--resolve_environment_variables \
--create_if_absent \
--clobber

# Symlink the Hadoop hdfs-site.xml to hbase's "copy" of it.
ln -s ${HADOOP_CONF_DIR}/hdfs-site.xml ${HBASE_CONF_DIR}/hdfs-site.xml
ln -s "${HADOOP_CONF_DIR}/hdfs-site.xml" ${HBASE_CONF_DIR}/hdfs-site.xml

# Add the hbase 'bin' path to the .bashrc so that it's easy to call 'hbase'
# during interactive ssh session.
add_to_path_at_login "${HBASE_INSTALL_DIR}/bin"

# Assign ownership of everything to the 'hadoop' user.
chown -R hadoop:hadoop /home/hadoop/ ${HBASE_INSTALL_DIR}
chown -R hadoop:hadoop /home/hadoop/ "${HBASE_INSTALL_DIR}"

# Update hadoop-env.sh with alpn boot classpath. Create an environment variable
# BIGTABLE_BOOT_OPTS that makes command line requests a bit easier.
echo -e "" >> ${HADOOP_CONF_DIR}/hadoop-env.sh
echo -e "HADOOP_OPTS=\"\${HADOOP_OPTS} -Xbootclasspath/p:${ALPN_BOOT_JAR}\"" >> ${HADOOP_CONF_DIR}/hadoop-env.sh
echo -e "HADOOP_TASKTRACKER_OPTS=\"\${HADOOP_TASKTRACKER_OPTS} -Xbootclasspath/p:${ALPN_BOOT_JAR}\"" >> ${HADOOP_CONF_DIR}/hadoop-env.sh
echo -e "BIGTABLE_BOOT_OPTS=\"${BIGTABLE_BOOT_OPTS}\"" >> ${HADOOP_CONF_DIR}/hadoop-env.sh
echo -e "" >> "${HADOOP_CONF_DIR}/hadoop-env.sh"
echo -e "HADOOP_OPTS=\"\${HADOOP_OPTS} -Xbootclasspath/p:${ALPN_BOOT_JAR}\"" >> "${HADOOP_CONF_DIR}/hadoop-env.sh"
echo -e "HADOOP_TASKTRACKER_OPTS=\"\${HADOOP_TASKTRACKER_OPTS} -Xbootclasspath/p:${ALPN_BOOT_JAR}\"" >> "${HADOOP_CONF_DIR}/hadoop-env.sh"
echo -e "BIGTABLE_BOOT_OPTS=\"${BIGTABLE_BOOT_OPTS}\"" >> "${HADOOP_CONF_DIR}/hadoop-env.sh"

# TODO: This should probably be removed at some point. This is done in order
# add in a newer version of guava that's bundled with ${BIGTABLE_RPC_JAR}
echo -e "HADOOP_CLASSPATH=${BIGTABLE_CLASSPATH}:\${HADOOP_CLASSPATH}" >> ${HADOOP_CONF_DIR}/hadoop-env.sh
echo -e "HADOOP_USER_CLASSPATH_FIRST=true" >> ${HADOOP_CONF_DIR}/hadoop-env.sh
echo -e "HADOOP_CLASSPATH=${BIGTABLE_CLASSPATH}:\${HADOOP_CLASSPATH}" >> "${HADOOP_CONF_DIR}/hadoop-env.sh"
echo -e "HADOOP_USER_CLASSPATH_FIRST=true" >> "${HADOOP_CONF_DIR}/hadoop-env.sh"

# Update yarn-env.sh with alpn boot classpath.
echo -e "" >> ${HADOOP_CONF_DIR}/yarn-env.sh.
echo -e "YARN_OPTS=\"\${YARN_OPTS} -Dyarn.app.mapreduce.am.command-opts=\"${BIGTABLE_BOOT_OPTS}\"\"" >> ${HADOOP_CONF_DIR}/yarn-env.sh
echo -e "" >> "${HADOOP_CONF_DIR}/yarn-env.sh"
echo -e "YARN_OPTS=\"\${YARN_OPTS} -Dyarn.app.mapreduce.am.command-opts=\"${BIGTABLE_BOOT_OPTS}\"\"" >> "${HADOOP_CONF_DIR}/yarn-env.sh"

# Update base-env.sh with alpn boot classpath and add the Bigtable classpath to
# the hbase classpath.
echo -e "HBASE_OPTS=\"\${HBASE_OPTS} ${BIGTABLE_BOOT_OPTS}\"" >> ${HBASE_CONF_DIR}/hbase-env.sh
echo -e "HBASE_OPTS=\"\${HBASE_OPTS} ${BIGTABLE_BOOT_OPTS}\"" >> "${HBASE_CONF_DIR}/hbase-env.sh"

exit

0 comments on commit 50119a5

Please sign in to comment.