[root@node1 conf]# pwd
/opt/app/spark-2.3.1/conf
[root@node1 conf]# mv spark-defaults.conf.template spark-defaults.conf
[root@node1 conf]# vi spark-defaults.conf
{
spark.master spark://node1:7077
spark.eventLog.enabled true
spark.eventLog.dir hdfs://node1:9000/spark-history
spark.eventLong.compress true
}
[root@node1 conf]# vi spark-env.sh
{
export SPARK_HISTORY_OPTS="-Dspark.history.ui.port=4000 -Dspark.history.retainedApplications=3 -Dspark.history.fs.logDirectory=hdfs://node1:9000/spark-history"
}
# 日志目录必须提前存在
[root@node1 conf]# hdfs dfs -mkdir /spark-history
# 启动日志
[root@node1 conf]# start-history-server.sh
# 访问页面
http://node1:4000/
标签:hdfs,配置,conf,node1,服务器,spark,root,history
From: https://www.cnblogs.com/jsqup/p/16601478.html