Skip to content

Commit a16bab4

Browse files
Updated headers for all scripts; added default suppression of HADOOP_HOME being deprecated
1 parent 225434d commit a16bab4

File tree

3 files changed

+27
-2
lines changed

3 files changed

+27
-2
lines changed

bin/myhadoop-bootstrap.sh

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,17 @@
11
#!/bin/bash
22
################################################################################
33
# myhadoop-bootstrap - call from within a job script to do the entire cluster
4-
# setup in a hands-off fashion
4+
# setup in a hands-off fashion.
5+
#
6+
# This script still requires manual intervention in editing the following
7+
# environment variables:
8+
# * MY_HADOOP_IPOIB_SUFFIX
9+
# * MY_HADOOP_IPOIB_PREFIX
10+
# * MY_HADOOP_LIFETIME
11+
# * SCRATCH_DIR
12+
#
13+
# It is considered experimental and is still in the process of being updated
14+
# to be as flexible as possible.
515
#
616
# Glenn K. Lockwood January 2014
717
################################################################################

bin/myhadoop-cleanup.sh

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,10 @@
11
#!/bin/bash
2+
################################################################################
3+
# myhadoop-cleanup.sh - clean up all of the directories created by running a
4+
# Hadoop cluster via myHadoop.
5+
#
6+
# Glenn K. Lockwood, San Diego Supercomputer Center February 2014
7+
################################################################################
28

39
### Make sure HADOOP_CONF_DIR is set
410
if [ "z$HADOOP_CONF_DIR " == "z" ]; then

bin/myhadoop-configure.sh

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,13 @@
11
#!/bin/bash
2+
################################################################################
3+
# myhadoop-configure.sh - establish a valid $HADOOP_CONF_DIR with all of the
4+
# configurations necessary to start a Hadoop cluster from within a HPC batch
5+
# environment. Additionally format HDFS and leave everything in a state ready
6+
# for Hadoop to start up via start-all.sh.
7+
#
8+
# Glenn K. Lockwood, San Diego Supercomputer Center
9+
# Sriram Krishnan, San Diego Supercomputer Center Feburary 2014
10+
################################################################################
211

312
function print_usage {
413
echo "Usage: [-n NODES] [-p -d BASE_DIR] -c CONFIG_DIR -s LOCAL_SCRATCH"
@@ -137,7 +146,6 @@ cat $HADOOP_CONF_DIR/slaves
137146
### the subsitutions to be applied to the conf/*.xml files below. If you update
138147
### the config_subs hash, be sure to also update myhadoop-cleanup.sh to ensure
139148
### any new directories you define get properly deleted at the end of the job!
140-
141149
cat <<EOF > $HADOOP_CONF_DIR/myhadoop.conf
142150
NODES=$NODES
143151
declare -A config_subs
@@ -166,6 +174,7 @@ cat <<EOF >> $HADOOP_CONF_DIR/hadoop-env.sh
166174
# myHadoop alterations for this job:
167175
export HADOOP_LOG_DIR=${config_subs[HADOOP_LOG_DIR]}
168176
export HADOOP_PID_DIR=${config_subs[HADOOP_PID_DIR]}
177+
export HADOOP_HOME_WARN_SUPPRESS=TRUE
169178
### Jetty leaves garbage in /tmp no matter what \$TMPDIR is; this is an extreme
170179
### way of preventing that
171180
# export _JAVA_OPTIONS="-Djava.io.tmpdir=${config_subs[HADOOP_TMP_DIR]} $_JAVA_OPTIONS"

0 commit comments

Comments
 (0)