|
| 1 | +#!/usr/bin/env bash |
| 2 | + |
| 3 | +# Licensed to the Apache Software Foundation (ASF) under one or more |
| 4 | +# contributor license agreements. See the NOTICE file distributed with |
| 5 | +# this work for additional information regarding copyright ownership. |
| 6 | +# The ASF licenses this file to You under the Apache License, Version 2.0 |
| 7 | +# (the "License"); you may not use this file except in compliance with |
| 8 | +# the License. You may obtain a copy of the License at |
| 9 | +# |
| 10 | +# http://www.apache.org/licenses/LICENSE-2.0 |
| 11 | +# |
| 12 | +# Unless required by applicable law or agreed to in writing, software |
| 13 | +# distributed under the License is distributed on an "AS IS" BASIS, |
| 14 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 15 | +# See the License for the specific language governing permissions and |
| 16 | +# limitations under the License. |
| 17 | + |
| 18 | +MYNAME="${BASH_SOURCE-$0}" |
| 19 | +HADOOP_SHELL_EXECNAME="${MYNAME##*/}" |
| 20 | + |
| 21 | +## @description build up the hadoop command's usage text. |
| 22 | +## @audience public |
| 23 | +## @stability stable |
| 24 | +## @replaceable no |
| 25 | +function hadoop_usage |
| 26 | +{ |
| 27 | + hadoop_add_option "buildpaths" "attempt to add class files from build tree" |
| 28 | + hadoop_add_option "hostnames list[,of,host,names]" "hosts to use in slave mode" |
| 29 | + hadoop_add_option "loglevel level" "set the log4j level for this command" |
| 30 | + hadoop_add_option "hosts filename" "list of hosts to use in slave mode" |
| 31 | + hadoop_add_option "workers" "turn on worker mode" |
| 32 | + |
| 33 | + hadoop_add_subcommand "checknative" client "check native Hadoop and compression libraries availability" |
| 34 | + hadoop_add_subcommand "classpath" client "prints the class path needed to get the Hadoop jar and the required libraries" |
| 35 | + hadoop_add_subcommand "conftest" client "validate configuration XML files" |
| 36 | + hadoop_add_subcommand "credential" client "interact with credential providers" |
| 37 | + hadoop_add_subcommand "daemonlog" admin "get/set the log level for each daemon" |
| 38 | + hadoop_add_subcommand "dtutil" client "operations related to delegation tokens" |
| 39 | + hadoop_add_subcommand "envvars" client "display computed Hadoop environment variables" |
| 40 | + hadoop_add_subcommand "fs" client "run a generic filesystem user client" |
| 41 | + hadoop_add_subcommand "jar <jar>" client "run a jar file. NOTE: please use \"yarn jar\" to launch YARN applications, not this command." |
| 42 | + hadoop_add_subcommand "jnipath" client "prints the java.library.path" |
| 43 | + hadoop_add_subcommand "kerbname" client "show auth_to_local principal conversion" |
| 44 | + hadoop_add_subcommand "key" client "manage keys via the KeyProvider" |
| 45 | + hadoop_add_subcommand "trace" client "view and modify Hadoop tracing settings" |
| 46 | + hadoop_add_subcommand "version" client "print the version" |
| 47 | + hadoop_generate_usage "${HADOOP_SHELL_EXECNAME}" true |
| 48 | +} |
| 49 | + |
| 50 | +## @description Default command handler for hadoop command |
| 51 | +## @audience public |
| 52 | +## @stability stable |
| 53 | +## @replaceable no |
| 54 | +## @param CLI arguments |
| 55 | +function hadoopcmd_case |
| 56 | +{ |
| 57 | + subcmd=$1 |
| 58 | + shift |
| 59 | + |
| 60 | + case ${subcmd} in |
| 61 | + balancer|datanode|dfs|dfsadmin|dfsgroups| \ |
| 62 | + namenode|secondarynamenode|fsck|fetchdt|oiv| \ |
| 63 | + portmap|nfs3) |
| 64 | + hadoop_error "WARNING: Use of this script to execute ${subcmd} is deprecated." |
| 65 | + subcmd=${subcmd/dfsgroups/groups} |
| 66 | + hadoop_error "WARNING: Attempting to execute replacement \"hdfs ${subcmd}\" instead." |
| 67 | + hadoop_error "" |
| 68 | + #try to locate hdfs and if present, delegate to it. |
| 69 | + if [[ -f "${HADOOP_HDFS_HOME}/bin/hdfs" ]]; then |
| 70 | + exec "${HADOOP_HDFS_HOME}/bin/hdfs" \ |
| 71 | + --config "${HADOOP_CONF_DIR}" "${subcmd}" "$@" |
| 72 | + elif [[ -f "${HADOOP_HOME}/bin/hdfs" ]]; then |
| 73 | + exec "${HADOOP_HOME}/bin/hdfs" \ |
| 74 | + --config "${HADOOP_CONF_DIR}" "${subcmd}" "$@" |
| 75 | + else |
| 76 | + hadoop_error "HADOOP_HDFS_HOME not found!" |
| 77 | + exit 1 |
| 78 | + fi |
| 79 | + ;; |
| 80 | + |
| 81 | + #mapred commands for backwards compatibility |
| 82 | + pipes|job|queue|mrgroups|mradmin|jobtracker|tasktracker) |
| 83 | + hadoop_error "WARNING: Use of this script to execute ${subcmd} is deprecated." |
| 84 | + subcmd=${subcmd/mrgroups/groups} |
| 85 | + hadoop_error "WARNING: Attempting to execute replacement \"mapred ${subcmd}\" instead." |
| 86 | + hadoop_error "" |
| 87 | + #try to locate mapred and if present, delegate to it. |
| 88 | + if [[ -f "${HADOOP_MAPRED_HOME}/bin/mapred" ]]; then |
| 89 | + exec "${HADOOP_MAPRED_HOME}/bin/mapred" \ |
| 90 | + --config "${HADOOP_CONF_DIR}" "${subcmd}" "$@" |
| 91 | + elif [[ -f "${HADOOP_HOME}/bin/mapred" ]]; then |
| 92 | + exec "${HADOOP_HOME}/bin/mapred" \ |
| 93 | + --config "${HADOOP_CONF_DIR}" "${subcmd}" "$@" |
| 94 | + else |
| 95 | + hadoop_error "HADOOP_MAPRED_HOME not found!" |
| 96 | + exit 1 |
| 97 | + fi |
| 98 | + ;; |
| 99 | + checknative) |
| 100 | + HADOOP_CLASSNAME=org.apache.hadoop.util.NativeLibraryChecker |
| 101 | + ;; |
| 102 | + classpath) |
| 103 | + hadoop_do_classpath_subcommand HADOOP_CLASSNAME "$@" |
| 104 | + ;; |
| 105 | + conftest) |
| 106 | + HADOOP_CLASSNAME=org.apache.hadoop.util.ConfTest |
| 107 | + ;; |
| 108 | + credential) |
| 109 | + HADOOP_CLASSNAME=org.apache.hadoop.security.alias.CredentialShell |
| 110 | + ;; |
| 111 | + daemonlog) |
| 112 | + HADOOP_CLASSNAME=org.apache.hadoop.log.LogLevel |
| 113 | + ;; |
| 114 | + dtutil) |
| 115 | + HADOOP_CLASSNAME=org.apache.hadoop.security.token.DtUtilShell |
| 116 | + ;; |
| 117 | + envvars) |
| 118 | + echo "JAVA_HOME='${JAVA_HOME}'" |
| 119 | + echo "HADOOP_COMMON_HOME='${HADOOP_COMMON_HOME}'" |
| 120 | + echo "HADOOP_COMMON_DIR='${HADOOP_COMMON_DIR}'" |
| 121 | + echo "HADOOP_COMMON_LIB_JARS_DIR='${HADOOP_COMMON_LIB_JARS_DIR}'" |
| 122 | + echo "HADOOP_COMMON_LIB_NATIVE_DIR='${HADOOP_COMMON_LIB_NATIVE_DIR}'" |
| 123 | + echo "HADOOP_CONF_DIR='${HADOOP_CONF_DIR}'" |
| 124 | + echo "HADOOP_TOOLS_HOME='${HADOOP_TOOLS_HOME}'" |
| 125 | + echo "HADOOP_TOOLS_DIR='${HADOOP_TOOLS_DIR}'" |
| 126 | + echo "HADOOP_TOOLS_LIB_JARS_DIR='${HADOOP_TOOLS_LIB_JARS_DIR}'" |
| 127 | + exit 0 |
| 128 | + ;; |
| 129 | + fs) |
| 130 | + HADOOP_CLASSNAME=org.apache.hadoop.fs.FsShell |
| 131 | + ;; |
| 132 | + jar) |
| 133 | + if [[ -n "${YARN_OPTS}" ]] || [[ -n "${YARN_CLIENT_OPTS}" ]]; then |
| 134 | + hadoop_error "WARNING: Use \"yarn jar\" to launch YARN applications." |
| 135 | + fi |
| 136 | + HADOOP_CLASSNAME=org.apache.hadoop.util.RunJar |
| 137 | + ;; |
| 138 | + jnipath) |
| 139 | + hadoop_finalize |
| 140 | + echo "${JAVA_LIBRARY_PATH}" |
| 141 | + exit 0 |
| 142 | + ;; |
| 143 | + kerbname) |
| 144 | + HADOOP_CLASSNAME=org.apache.hadoop.security.HadoopKerberosName |
| 145 | + ;; |
| 146 | + key) |
| 147 | + HADOOP_CLASSNAME=org.apache.hadoop.crypto.key.KeyShell |
| 148 | + ;; |
| 149 | + trace) |
| 150 | + HADOOP_CLASSNAME=org.apache.hadoop.tracing.TraceAdmin |
| 151 | + ;; |
| 152 | + version) |
| 153 | + HADOOP_CLASSNAME=org.apache.hadoop.util.VersionInfo |
| 154 | + ;; |
| 155 | + *) |
| 156 | + HADOOP_CLASSNAME="${subcmd}" |
| 157 | + if ! hadoop_validate_classname "${HADOOP_CLASSNAME}"; then |
| 158 | + hadoop_exit_with_usage 1 |
| 159 | + fi |
| 160 | + ;; |
| 161 | + esac |
| 162 | +} |
| 163 | + |
| 164 | +# This script runs the hadoop core commands. |
| 165 | + |
| 166 | +# let's locate libexec... |
| 167 | +if [[ -n "${HADOOP_HOME}" ]]; then |
| 168 | + HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" |
| 169 | +else |
| 170 | + bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P) |
| 171 | + HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec" |
| 172 | +fi |
| 173 | + |
| 174 | +HADOOP_LIBEXEC_DIR="${HADOOP_LIBEXEC_DIR:-$HADOOP_DEFAULT_LIBEXEC_DIR}" |
| 175 | +HADOOP_NEW_CONFIG=true |
| 176 | +if [[ -f "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" ]]; then |
| 177 | + # shellcheck source=./hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh |
| 178 | + . "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" |
| 179 | +else |
| 180 | + echo "ERROR: Cannot execute ${HADOOP_LIBEXEC_DIR}/hadoop-config.sh." 2>&1 |
| 181 | + exit 1 |
| 182 | +fi |
| 183 | + |
| 184 | +# now that we have support code, let's abs MYNAME so we can use it later |
| 185 | +MYNAME=$(hadoop_abs "${MYNAME}") |
| 186 | + |
| 187 | +if [[ $# = 0 ]]; then |
| 188 | + hadoop_exit_with_usage 1 |
| 189 | +fi |
| 190 | + |
| 191 | +HADOOP_SUBCMD=$1 |
| 192 | +shift |
| 193 | + |
| 194 | +if hadoop_need_reexec hadoop "${HADOOP_SUBCMD}"; then |
| 195 | + hadoop_uservar_su hadoop "${HADOOP_SUBCMD}" \ |
| 196 | + "${MYNAME}" \ |
| 197 | + "--reexec" \ |
| 198 | + "${HADOOP_USER_PARAMS[@]}" |
| 199 | + exit $? |
| 200 | +fi |
| 201 | + |
| 202 | +hadoop_verify_user_perm "${HADOOP_SHELL_EXECNAME}" "${HADOOP_SUBCMD}" |
| 203 | + |
| 204 | +HADOOP_SUBCMD_ARGS=("$@") |
| 205 | + |
| 206 | +if declare -f hadoop_subcommand_"${HADOOP_SUBCMD}" >/dev/null 2>&1; then |
| 207 | + hadoop_debug "Calling dynamically: hadoop_subcommand_${HADOOP_SUBCMD} ${HADOOP_SUBCMD_ARGS[*]}" |
| 208 | + "hadoop_subcommand_${HADOOP_SUBCMD}" "${HADOOP_SUBCMD_ARGS[@]}" |
| 209 | +else |
| 210 | + hadoopcmd_case "${HADOOP_SUBCMD}" "${HADOOP_SUBCMD_ARGS[@]}" |
| 211 | +fi |
| 212 | + |
| 213 | +hadoop_add_client_opts |
| 214 | + |
| 215 | +if [[ ${HADOOP_WORKER_MODE} = true ]]; then |
| 216 | + hadoop_common_worker_mode_execute "${HADOOP_COMMON_HOME}/bin/hadoop" "${HADOOP_USER_PARAMS[@]}" |
| 217 | + exit $? |
| 218 | +fi |
| 219 | + |
| 220 | +hadoop_subcommand_opts "${HADOOP_SHELL_EXECNAME}" "${HADOOP_SUBCMD}" |
| 221 | + |
| 222 | +# everything is in globals at this point, so call the generic handler |
| 223 | +hadoop_generic_java_subcmd_handler |
0 commit comments