|
1 | 1 | #!/bin/bash
|
| 2 | +################################################################################ |
| 3 | +# myhadoop-configure.sh - establish a valid $HADOOP_CONF_DIR with all of the |
| 4 | +# configurations necessary to start a Hadoop cluster from within a HPC batch |
| 5 | +# environment. Additionally format HDFS and leave everything in a state ready |
| 6 | +# for Hadoop to start up via start-all.sh. |
| 7 | +# |
| 8 | +# Glenn K. Lockwood, San Diego Supercomputer Center |
| 9 | +# Sriram Krishnan, San Diego Supercomputer Center Feburary 2014 |
| 10 | +################################################################################ |
2 | 11 |
|
3 | 12 | function print_usage {
|
4 | 13 | echo "Usage: [-n NODES] [-p -d BASE_DIR] -c CONFIG_DIR -s LOCAL_SCRATCH"
|
@@ -137,7 +146,6 @@ cat $HADOOP_CONF_DIR/slaves
|
137 | 146 | ### the subsitutions to be applied to the conf/*.xml files below. If you update
|
138 | 147 | ### the config_subs hash, be sure to also update myhadoop-cleanup.sh to ensure
|
139 | 148 | ### any new directories you define get properly deleted at the end of the job!
|
140 |
| - |
141 | 149 | cat <<EOF > $HADOOP_CONF_DIR/myhadoop.conf
|
142 | 150 | NODES=$NODES
|
143 | 151 | declare -A config_subs
|
@@ -166,6 +174,7 @@ cat <<EOF >> $HADOOP_CONF_DIR/hadoop-env.sh
|
166 | 174 | # myHadoop alterations for this job:
|
167 | 175 | export HADOOP_LOG_DIR=${config_subs[HADOOP_LOG_DIR]}
|
168 | 176 | export HADOOP_PID_DIR=${config_subs[HADOOP_PID_DIR]}
|
| 177 | +export HADOOP_HOME_WARN_SUPPRESS=TRUE |
169 | 178 | ### Jetty leaves garbage in /tmp no matter what \$TMPDIR is; this is an extreme
|
170 | 179 | ### way of preventing that
|
171 | 180 | # export _JAVA_OPTIONS="-Djava.io.tmpdir=${config_subs[HADOOP_TMP_DIR]} $_JAVA_OPTIONS"
|
|
0 commit comments