一. 前提條件
Zookeeper集羣正常運行
二. 部署步驟
- 下載Spark程序壓縮包
wget http://mirrors.shu.edu.cn/apache/spark/spark-2.4.0/spark-2.4.0-bin-hadoop2.7.tgz
- 解壓縮並重命名
tar -zxvf spark-2.4.0-bin-hadoop2.7.tgz -C /opt mv spark-2.4.0-bin-hadoop2.7 spark-2.4.0
- 配置環境變量
/etc/profileexport JAVA_HOME=/usr/lib/jdk1.8.0_172 export CLASSPATH=${JAVA_HOME}/jre/lib:${JAVA_HOME}/lib export HADOOP_HOME=/opt/hadoop-2.7.6 export SPARK_HOME=/opt/spark-2.4.0 export PATH=${JAVA_HOME}/bin:$HADOOP_HOME/bin:$SPARK_HOME/bin:$PATH
修改機器名稱
hostnamectl set-hostname res-spark-0001
執行命令使得環境變量生效
source /etc/profile
- 修改配置文件
cd /opt/spark-2.4.0/conf
cp log4j.properties.template log4j.properties
cp slaves.template slaves
cp spark-env.sh.template spark-env.sh
cp spark-defaults.conf.template spark-defaults.conf
4.1 slaves
res-spark-0003
res-spark-0004
res-spark-0005
4.2 spark-defaults.conf
spark.deploy.recoveryMode ZOOKEEPER
spark.deploy.zookeeper.url res-spark-0001:2181,res-spark-0002:2181,res-spark-0003:2181
spark.master spark://res-spark-0001:7077
spark.eventLog.enabled true
spark.eventLog.dir hdfs://cluster1/spark/eventLog
4.3 spark-env.sh
export JAVA_HOME=/usr/lib/jdk1.8.0_172
export HADOOP_HOME=/opt/hadoop-2.7.6
export HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop
export YARN_CONF_DIR=$HADOOP_HOME/etc/hadoop
export SPARK_HOME=/opt/spark-2.4.0
export SPARK_WORKER_CORES=6
export SPARK_WORKER_MEMORY=24g
4.4 log4j.properties
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Set everything to be logged to the console
log4j.rootCategory=INFO, console
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.target=System.err
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
# Set the default spark-shell log level to WARN. When running the spark-shell, the
# log level for this class is used to overwrite the root logger's log level, so that
# the user can have different defaults for the shell and regular Spark apps.
log4j.logger.org.apache.spark.repl.Main=WARN
# Settings to quiet third party logs that are too verbose
log4j.logger.org.spark_project.jetty=WARN
log4j.logger.org.spark_project.jetty.util.component.AbstractLifeCycle=ERROR
log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO
log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO
log4j.logger.org.apache.parquet=ERROR
log4j.logger.parquet=ERROR
# SPARK-9183: Settings to avoid annoying messages when looking up nonexistent UDFs in SparkSQL with Hive support
log4j.logger.org.apache.hadoop.hive.metastore.RetryingHMSHandler=FATAL
log4j.logger.org.apache.hadoop.hive.ql.exec.FunctionRegistry=ERROR
-
分發spark程序以及配置文件到其他節點
scp -r /opt/spark-2.4.0 res-spark-0002:/opt scp -r /opt/spark-2.4.0 res-spark-0003:/opt scp -r /opt/spark-2.4.0 res-spark-0004:/opt scp -r /opt/spark-2.4.0 res-spark-0005:/opt
-
修改 res-spark-0002節點的配置文件
6.1 spark-defaults.confspark.master spark://res-spark-0002:7077
- 啓動集羣
cd sbin
./start-all.sh
res-spark-0002節點
cd sbin
./start-master.sh
- 測試
res-spark-0001節點執行./stop-master.sh
得到如下結果