hadoop
上传用户:quxuerui
上传日期:2018-01-08
资源大小:41811k
文件大小:10k
源码类别:

网格计算

开发平台:

Java

  1. #!/usr/bin/env bash
  2. # Licensed to the Apache Software Foundation (ASF) under one or more
  3. # contributor license agreements.  See the NOTICE file distributed with
  4. # this work for additional information regarding copyright ownership.
  5. # The ASF licenses this file to You under the Apache License, Version 2.0
  6. # (the "License"); you may not use this file except in compliance with
  7. # the License.  You may obtain a copy of the License at
  8. #
  9. #     http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. # The Hadoop command script
  17. #
  18. # Environment Variables
  19. #
  20. #   JAVA_HOME        The java implementation to use.  Overrides JAVA_HOME.
  21. #
  22. #   HADOOP_CLASSPATH Extra Java CLASSPATH entries.
  23. #
  24. #   HADOOP_HEAPSIZE  The maximum amount of heap to use, in MB. 
  25. #                    Default is 1000.
  26. #
  27. #   HADOOP_OPTS      Extra Java runtime options.
  28. #   
  29. #   HADOOP_NAMENODE_OPTS       These options are added to HADOOP_OPTS 
  30. #   HADOOP_CLIENT_OPTS         when the respective command is run.
  31. #   HADOOP_{COMMAND}_OPTS etc  HADOOP_JT_OPTS applies to JobTracker 
  32. #                              for e.g.  HADOOP_CLIENT_OPTS applies to 
  33. #                              more than one command (fs, dfs, fsck, 
  34. #                              dfsadmin etc)  
  35. #
  36. #   HADOOP_CONF_DIR  Alternate conf dir. Default is ${HADOOP_HOME}/conf.
  37. #
  38. #   HADOOP_ROOT_LOGGER The root appender. Default is INFO,console
  39. #
  40. bin=`dirname "$0"`
  41. bin=`cd "$bin"; pwd`
  42. . "$bin"/hadoop-config.sh
  43. cygwin=false
  44. case "`uname`" in
  45. CYGWIN*) cygwin=true;;
  46. esac
  47. # if no args specified, show usage
  48. if [ $# = 0 ]; then
  49.   echo "Usage: hadoop [--config confdir] COMMAND"
  50.   echo "where COMMAND is one of:"
  51.   echo "  namenode -format     format the DFS filesystem"
  52.   echo "  secondarynamenode    run the DFS secondary namenode"
  53.   echo "  namenode             run the DFS namenode"
  54.   echo "  datanode             run a DFS datanode"
  55.   echo "  dfsadmin             run a DFS admin client"
  56.   echo "  mradmin              run a Map-Reduce admin client"
  57.   echo "  fsck                 run a DFS filesystem checking utility"
  58.   echo "  fs                   run a generic filesystem user client"
  59.   echo "  balancer             run a cluster balancing utility"
  60.   echo "  jobtracker           run the MapReduce job Tracker node" 
  61.   echo "  pipes                run a Pipes job"
  62.   echo "  tasktracker          run a MapReduce task Tracker node" 
  63.   echo "  job                  manipulate MapReduce jobs"
  64.   echo "  queue                get information regarding JobQueues" 
  65.   echo "  version              print the version"
  66.   echo "  jar <jar>            run a jar file"
  67.   echo "  distcp <srcurl> <desturl> copy file or directories recursively"
  68.   echo "  archive -archiveName NAME <src>* <dest> create a hadoop archive"
  69.   echo "  daemonlog            get/set the log level for each daemon"
  70.   echo " or"
  71.   echo "  CLASSNAME            run the class named CLASSNAME"
  72.   echo "Most commands print help when invoked w/o parameters."
  73.   exit 1
  74. fi
  75. # get arguments
  76. COMMAND=$1
  77. shift
  78. if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
  79.   . "${HADOOP_CONF_DIR}/hadoop-env.sh"
  80. fi
  81. # some Java parameters
  82. if [ "$JAVA_HOME" != "" ]; then
  83.   #echo "run java in $JAVA_HOME"
  84.   JAVA_HOME=$JAVA_HOME
  85. fi
  86.   
  87. if [ "$JAVA_HOME" = "" ]; then
  88.   echo "Error: JAVA_HOME is not set."
  89.   exit 1
  90. fi
  91. JAVA=$JAVA_HOME/bin/java
  92. JAVA_HEAP_MAX=-Xmx1000m 
  93. # check envvars which might override default args
  94. if [ "$HADOOP_HEAPSIZE" != "" ]; then
  95.   #echo "run with heapsize $HADOOP_HEAPSIZE"
  96.   JAVA_HEAP_MAX="-Xmx""$HADOOP_HEAPSIZE""m"
  97.   #echo $JAVA_HEAP_MAX
  98. fi
  99. # CLASSPATH initially contains $HADOOP_CONF_DIR
  100. CLASSPATH="${HADOOP_CONF_DIR}"
  101. CLASSPATH=${CLASSPATH}:$JAVA_HOME/lib/tools.jar
  102. # for developers, add Hadoop classes to CLASSPATH
  103. if [ -d "$HADOOP_HOME/build/classes" ]; then
  104.   CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/classes
  105. fi
  106. if [ -d "$HADOOP_HOME/build/webapps" ]; then
  107.   CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build
  108. fi
  109. if [ -d "$HADOOP_HOME/build/test/classes" ]; then
  110.   CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/test/classes
  111. fi
  112. if [ -d "$HADOOP_HOME/build/tools" ]; then
  113.   CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/tools
  114. fi
  115. # so that filenames w/ spaces are handled correctly in loops below
  116. IFS=
  117. # for releases, add core hadoop jar & webapps to CLASSPATH
  118. if [ -d "$HADOOP_HOME/webapps" ]; then
  119.   CLASSPATH=${CLASSPATH}:$HADOOP_HOME
  120. fi
  121. for f in $HADOOP_HOME/hadoop-*-core.jar; do
  122.   CLASSPATH=${CLASSPATH}:$f;
  123. done
  124. # add libs to CLASSPATH
  125. for f in $HADOOP_HOME/lib/*.jar; do
  126.   CLASSPATH=${CLASSPATH}:$f;
  127. done
  128. if [ -d "$HADOOP_HOME/build/ivy/lib/Hadoop/common" ]; then
  129. for f in $HADOOP_HOME/build/ivy/lib/Hadoop/common/*.jar; do
  130.   CLASSPATH=${CLASSPATH}:$f;
  131. done
  132. fi
  133. for f in $HADOOP_HOME/lib/jsp-2.1/*.jar; do
  134.   CLASSPATH=${CLASSPATH}:$f;
  135. done
  136. for f in $HADOOP_HOME/hadoop-*-tools.jar; do
  137.   TOOL_PATH=${TOOL_PATH}:$f;
  138. done
  139. for f in $HADOOP_HOME/build/hadoop-*-tools.jar; do
  140.   TOOL_PATH=${TOOL_PATH}:$f;
  141. done
  142. # add user-specified CLASSPATH last
  143. if [ "$HADOOP_CLASSPATH" != "" ]; then
  144.   CLASSPATH=${CLASSPATH}:${HADOOP_CLASSPATH}
  145. fi
  146. # default log directory & file
  147. if [ "$HADOOP_LOG_DIR" = "" ]; then
  148.   HADOOP_LOG_DIR="$HADOOP_HOME/logs"
  149. fi
  150. if [ "$HADOOP_LOGFILE" = "" ]; then
  151.   HADOOP_LOGFILE='hadoop.log'
  152. fi
  153. # default policy file for service-level authorization
  154. if [ "$HADOOP_POLICYFILE" = "" ]; then
  155.   HADOOP_POLICYFILE="hadoop-policy.xml"
  156. fi
  157. # restore ordinary behaviour
  158. unset IFS
  159. # figure out which class to run
  160. if [ "$COMMAND" = "namenode" ] ; then
  161.   CLASS='org.apache.hadoop.hdfs.server.namenode.NameNode'
  162.   HADOOP_OPTS="$HADOOP_OPTS $HADOOP_NAMENODE_OPTS"
  163. elif [ "$COMMAND" = "secondarynamenode" ] ; then
  164.   CLASS='org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode'
  165.   HADOOP_OPTS="$HADOOP_OPTS $HADOOP_SECONDARYNAMENODE_OPTS"
  166. elif [ "$COMMAND" = "datanode" ] ; then
  167.   CLASS='org.apache.hadoop.hdfs.server.datanode.DataNode'
  168.   HADOOP_OPTS="$HADOOP_OPTS $HADOOP_DATANODE_OPTS"
  169. elif [ "$COMMAND" = "fs" ] ; then
  170.   CLASS=org.apache.hadoop.fs.FsShell
  171.   HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
  172. elif [ "$COMMAND" = "dfs" ] ; then
  173.   CLASS=org.apache.hadoop.fs.FsShell
  174.   HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
  175. elif [ "$COMMAND" = "dfsadmin" ] ; then
  176.   CLASS=org.apache.hadoop.hdfs.tools.DFSAdmin
  177.   HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
  178. elif [ "$COMMAND" = "mradmin" ] ; then
  179.   CLASS=org.apache.hadoop.mapred.tools.MRAdmin
  180.   HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
  181. elif [ "$COMMAND" = "fsck" ] ; then
  182.   CLASS=org.apache.hadoop.hdfs.tools.DFSck
  183.   HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
  184. elif [ "$COMMAND" = "balancer" ] ; then
  185.   CLASS=org.apache.hadoop.hdfs.server.balancer.Balancer
  186.   HADOOP_OPTS="$HADOOP_OPTS $HADOOP_BALANCER_OPTS"
  187. elif [ "$COMMAND" = "jobtracker" ] ; then
  188.   CLASS=org.apache.hadoop.mapred.JobTracker
  189.   HADOOP_OPTS="$HADOOP_OPTS $HADOOP_JOBTRACKER_OPTS"
  190. elif [ "$COMMAND" = "tasktracker" ] ; then
  191.   CLASS=org.apache.hadoop.mapred.TaskTracker
  192.   HADOOP_OPTS="$HADOOP_OPTS $HADOOP_TASKTRACKER_OPTS"
  193. elif [ "$COMMAND" = "job" ] ; then
  194.   CLASS=org.apache.hadoop.mapred.JobClient
  195. elif [ "$COMMAND" = "queue" ] ; then
  196.   CLASS=org.apache.hadoop.mapred.JobQueueClient
  197. elif [ "$COMMAND" = "pipes" ] ; then
  198.   CLASS=org.apache.hadoop.mapred.pipes.Submitter
  199.   HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
  200. elif [ "$COMMAND" = "version" ] ; then
  201.   CLASS=org.apache.hadoop.util.VersionInfo
  202.   HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
  203. elif [ "$COMMAND" = "jar" ] ; then
  204.   CLASS=org.apache.hadoop.util.RunJar
  205. elif [ "$COMMAND" = "distcp" ] ; then
  206.   CLASS=org.apache.hadoop.tools.DistCp
  207.   CLASSPATH=${CLASSPATH}:${TOOL_PATH}
  208.   HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
  209. elif [ "$COMMAND" = "daemonlog" ] ; then
  210.   CLASS=org.apache.hadoop.log.LogLevel
  211.   HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
  212. elif [ "$COMMAND" = "archive" ] ; then
  213.   CLASS=org.apache.hadoop.tools.HadoopArchives
  214.   CLASSPATH=${CLASSPATH}:${TOOL_PATH}
  215.   HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
  216. elif [ "$COMMAND" = "sampler" ] ; then
  217.   CLASS=org.apache.hadoop.mapred.lib.InputSampler
  218.   HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
  219. else
  220.   CLASS=$COMMAND
  221. fi
  222. # cygwin path translation
  223. if $cygwin; then
  224.   CLASSPATH=`cygpath -p -w "$CLASSPATH"`
  225.   HADOOP_HOME=`cygpath -w "$HADOOP_HOME"`
  226.   HADOOP_LOG_DIR=`cygpath -w "$HADOOP_LOG_DIR"`
  227.   TOOL_PATH=`cygpath -p -w "$TOOL_PATH"`
  228. fi
  229. # setup 'java.library.path' for native-hadoop code if necessary
  230. JAVA_LIBRARY_PATH=''
  231. if [ -d "${HADOOP_HOME}/build/native" -o -d "${HADOOP_HOME}/lib/native" ]; then
  232.   JAVA_PLATFORM=`CLASSPATH=${CLASSPATH} ${JAVA} -Xmx32m org.apache.hadoop.util.PlatformName | sed -e "s/ /_/g"`
  233.   
  234.   if [ -d "$HADOOP_HOME/build/native" ]; then
  235.     JAVA_LIBRARY_PATH=${HADOOP_HOME}/build/native/${JAVA_PLATFORM}/lib
  236.   fi
  237.   
  238.   if [ -d "${HADOOP_HOME}/lib/native" ]; then
  239.     if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
  240.       JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:${HADOOP_HOME}/lib/native/${JAVA_PLATFORM}
  241.     else
  242.       JAVA_LIBRARY_PATH=${HADOOP_HOME}/lib/native/${JAVA_PLATFORM}
  243.     fi
  244.   fi
  245. fi
  246. # cygwin path translation
  247. if $cygwin; then
  248.   JAVA_LIBRARY_PATH=`cygpath -p "$JAVA_LIBRARY_PATH"`
  249. fi
  250. HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.dir=$HADOOP_LOG_DIR"
  251. HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.file=$HADOOP_LOGFILE"
  252. HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.home.dir=$HADOOP_HOME"
  253. HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.id.str=$HADOOP_IDENT_STRING"
  254. HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.root.logger=${HADOOP_ROOT_LOGGER:-INFO,console}"
  255. if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
  256.   HADOOP_OPTS="$HADOOP_OPTS -Djava.library.path=$JAVA_LIBRARY_PATH"
  257. fi  
  258. HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.policy.file=$HADOOP_POLICYFILE"
  259. # run it
  260. exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS -classpath "$CLASSPATH" $CLASS "$@"