|
| 1 | +#!/usr/bin/env bash |
| 2 | + |
| 3 | +# Licensed to the Apache Software Foundation (ASF) under one or more |
| 4 | +# contributor license agreements. See the NOTICE file distributed with |
| 5 | +# this work for additional information regarding copyright ownership. |
| 6 | +# The ASF licenses this file to You under the Apache License, Version 2.0 |
| 7 | +# (the "License"); you may not use this file except in compliance with |
| 8 | +# the License. You may obtain a copy of the License at |
| 9 | +# |
| 10 | +# http://www.apache.org/licenses/LICENSE-2.0 |
| 11 | +# |
| 12 | +# Unless required by applicable law or agreed to in writing, software |
| 13 | +# distributed under the License is distributed on an "AS IS" BASIS, |
| 14 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 15 | +# See the License for the specific language governing permissions and |
| 16 | +# limitations under the License. |
| 17 | + |
| 18 | +# This script runs the hadoop core commands. |
| 19 | + |
| 20 | +bin=`which $0` |
| 21 | +bin=`dirname ${bin}` |
| 22 | +bin=`cd "$bin"; pwd` |
| 23 | + |
| 24 | +DEFAULT_LIBEXEC_DIR="$bin"/../libexec |
| 25 | + |
| 26 | +if [ -n "$HADOOP_HOME" ]; then |
| 27 | + DEFAULT_LIBEXEC_DIR="$HADOOP_HOME"/libexec |
| 28 | +fi |
| 29 | + |
| 30 | +HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR} |
| 31 | +. $HADOOP_LIBEXEC_DIR/hadoop-config.sh |
| 32 | + |
| 33 | +function print_usage(){ |
| 34 | + echo "Usage: hadoop [--config confdir] COMMAND" |
| 35 | + echo " where COMMAND is one of:" |
| 36 | + echo " fs run a generic filesystem user client" |
| 37 | + echo " version print the version" |
| 38 | + echo " jar <jar> run a jar file" |
| 39 | + echo " checknative [-a|-h] check native hadoop and compression libraries availability" |
| 40 | + echo " distcp <srcurl> <desturl> copy file or directories recursively" |
| 41 | + echo " archive -archiveName NAME -p <parent path> <src>* <dest> create a hadoop archive" |
| 42 | + echo " classpath prints the class path needed to get the" |
| 43 | + echo " credential interact with credential providers" |
| 44 | + echo " Hadoop jar and the required libraries" |
| 45 | + echo " daemonlog get/set the log level for each daemon" |
| 46 | + echo " trace view and modify Hadoop tracing settings" |
| 47 | + echo " or" |
| 48 | + echo " CLASSNAME run the class named CLASSNAME" |
| 49 | + echo "" |
| 50 | + echo "Most commands print help when invoked w/o parameters." |
| 51 | +} |
| 52 | + |
| 53 | +if [ $# = 0 ]; then |
| 54 | + print_usage |
| 55 | + exit |
| 56 | +fi |
| 57 | + |
| 58 | +COMMAND=$1 |
| 59 | +case $COMMAND in |
| 60 | + # usage flags |
| 61 | + --help|-help|-h) |
| 62 | + print_usage |
| 63 | + exit |
| 64 | + ;; |
| 65 | + |
| 66 | + #hdfs commands |
| 67 | + namenode|secondarynamenode|datanode|dfs|dfsadmin|fsck|balancer|fetchdt|oiv|dfsgroups|portmap|nfs3) |
| 68 | + echo "DEPRECATED: Use of this script to execute hdfs command is deprecated." 1>&2 |
| 69 | + echo "Instead use the hdfs command for it." 1>&2 |
| 70 | + echo "" 1>&2 |
| 71 | + #try to locate hdfs and if present, delegate to it. |
| 72 | + shift |
| 73 | + if [ -f "${HADOOP_HDFS_HOME}"/bin/hdfs ]; then |
| 74 | + exec "${HADOOP_HDFS_HOME}"/bin/hdfs ${COMMAND/dfsgroups/groups} "$@" |
| 75 | + elif [ -f "${HADOOP_PREFIX}"/bin/hdfs ]; then |
| 76 | + exec "${HADOOP_PREFIX}"/bin/hdfs ${COMMAND/dfsgroups/groups} "$@" |
| 77 | + else |
| 78 | + echo "HADOOP_HDFS_HOME not found!" |
| 79 | + exit 1 |
| 80 | + fi |
| 81 | + ;; |
| 82 | + |
| 83 | + #mapred commands for backwards compatibility |
| 84 | + pipes|job|queue|mrgroups|mradmin|jobtracker|tasktracker) |
| 85 | + echo "DEPRECATED: Use of this script to execute mapred command is deprecated." 1>&2 |
| 86 | + echo "Instead use the mapred command for it." 1>&2 |
| 87 | + echo "" 1>&2 |
| 88 | + #try to locate mapred and if present, delegate to it. |
| 89 | + shift |
| 90 | + if [ -f "${HADOOP_MAPRED_HOME}"/bin/mapred ]; then |
| 91 | + exec "${HADOOP_MAPRED_HOME}"/bin/mapred ${COMMAND/mrgroups/groups} "$@" |
| 92 | + elif [ -f "${HADOOP_PREFIX}"/bin/mapred ]; then |
| 93 | + exec "${HADOOP_PREFIX}"/bin/mapred ${COMMAND/mrgroups/groups} "$@" |
| 94 | + else |
| 95 | + echo "HADOOP_MAPRED_HOME not found!" |
| 96 | + exit 1 |
| 97 | + fi |
| 98 | + ;; |
| 99 | + |
| 100 | + #core commands |
| 101 | + *) |
| 102 | + # the core commands |
| 103 | + if [ "$COMMAND" = "fs" ] ; then |
| 104 | + CLASS=org.apache.hadoop.fs.FsShell |
| 105 | + elif [ "$COMMAND" = "version" ] ; then |
| 106 | + CLASS=org.apache.hadoop.util.VersionInfo |
| 107 | + elif [ "$COMMAND" = "jar" ] ; then |
| 108 | + CLASS=org.apache.hadoop.util.RunJar |
| 109 | + elif [ "$COMMAND" = "key" ] ; then |
| 110 | + CLASS=org.apache.hadoop.crypto.key.KeyShell |
| 111 | + elif [ "$COMMAND" = "checknative" ] ; then |
| 112 | + CLASS=org.apache.hadoop.util.NativeLibraryChecker |
| 113 | + elif [ "$COMMAND" = "distcp" ] ; then |
| 114 | + CLASS=org.apache.hadoop.tools.DistCp |
| 115 | + CLASSPATH=${CLASSPATH}:${TOOL_PATH} |
| 116 | + elif [ "$COMMAND" = "daemonlog" ] ; then |
| 117 | + CLASS=org.apache.hadoop.log.LogLevel |
| 118 | + elif [ "$COMMAND" = "archive" ] ; then |
| 119 | + CLASS=org.apache.hadoop.tools.HadoopArchives |
| 120 | + CLASSPATH=${CLASSPATH}:${TOOL_PATH} |
| 121 | + elif [ "$COMMAND" = "credential" ] ; then |
| 122 | + CLASS=org.apache.hadoop.security.alias.CredentialShell |
| 123 | + elif [ "$COMMAND" = "trace" ] ; then |
| 124 | + CLASS=org.apache.hadoop.tracing.TraceAdmin |
| 125 | + elif [ "$COMMAND" = "classpath" ] ; then |
| 126 | + if [ "$#" -eq 1 ]; then |
| 127 | + # No need to bother starting up a JVM for this simple case. |
| 128 | + echo $CLASSPATH |
| 129 | + exit |
| 130 | + else |
| 131 | + CLASS=org.apache.hadoop.util.Classpath |
| 132 | + fi |
| 133 | + elif [[ "$COMMAND" = -* ]] ; then |
| 134 | + # class and package names cannot begin with a - |
| 135 | + echo "Error: No command named \`$COMMAND' was found. Perhaps you meant \`hadoop ${COMMAND#-}'" |
| 136 | + exit 1 |
| 137 | + else |
| 138 | + CLASS=$COMMAND |
| 139 | + fi |
| 140 | + shift |
| 141 | + |
| 142 | + # Always respect HADOOP_OPTS and HADOOP_CLIENT_OPTS |
| 143 | + HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS" |
| 144 | + |
| 145 | + #make sure security appender is turned off |
| 146 | + HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,NullAppender}" |
| 147 | + |
| 148 | + export CLASSPATH=$CLASSPATH |
| 149 | + exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS $CLASS "$@" |
| 150 | + ;; |
| 151 | + |
| 152 | +esac |
0 commit comments