本文共 1249 字,大约阅读时间需要 4 分钟。
spark-shell
function main() { export SPARK_SUBMIT_OPTS "${SPARK_HOME}"/bin/spark-submit --class org.apache.spark.repl.Main --name "Spark shell" "$@"}main "$@"
spark-sql
exec "${SPARK_HOME}"/bin/spark-submit --class org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver "$@"
spark-submit
exec "${SPARK_HOME}"/bin/spark-class org.apache.spark.deploy.SparkSubmit "$@"
spark-class
# For testsbuild_command() { "${JAVA_HOME}/bin/java" -Xmx128m -cp "="${SPARK_HOME}/jars/*" org.apache.spark.launcher.Main "$@" printf "%d\0" $?}CMD=()while IFS= read -d '' -r ARG; do CMD+=("$ARG")done < <(build_command "$@")COUNT=${#CMD[@]}LAST=$((COUNT - 1))LAUNCHER_EXIT_CODE=${CMD[$LAST]}CMD=("${CMD[@]:0:$LAST}")exec "${CMD[@]}"
scala类org.apache.spark.deploy.SparkSubmit -main函数
def main(args: Array[String]): Unit = { val appArgs = new SparkSubmitArguments(args) if (appArgs.verbose) { printStream.println(appArgs) } appArgs.action match { case SparkSubmitAction.SUBMIT => submit(appArgs) case SparkSubmitAction.KILL => kill(appArgs) case SparkSubmitAction.REQUEST_STATUS => requestStatus(appArgs) } }
转载地址:http://zvdef.baihongyu.com/