需求
- 满足产品数据团队计算相关需求,搭建spark集群,本集群为一主两从(两台设备)
部署环境
-
服务器资源
-
docker-compose 安装
curl -L https://github.com/docker/compose/releases/download/1.24.1/docker-compose-`uname -s`-`uname -m` -o /usr/local/bin/docker-compose
- 主节点安装 10.0.1.2
[root@VM-1-2-centos spark]# cat /export/vm/spark/docker-compose.yaml
version: '3'
services:
spark:
image: docker.io/bitnami/spark:3
hostname: master
user: root
environment:
- SPARK_MODE=master
- SPARK_RPC_AUTHENTICATION_ENABLED=no
- SPARK_RPC_ENCRYPTION_ENABLED=no
- SPARK_LOCAL_STORAGE_ENCRYPTION_ENABLED=no
- SPARK_SSL_ENABLED=no
volumes:
- /export/spark:/export/spark
ports:
- '8083:8080'
- '4040:4040'
- '7077:7077'
spark-worker-1:
image: docker.io/bitnami/spark:3
hostname: worker1
user: root
environment:
- SPARK_MODE=worker
- SPARK_MASTER_URL=spark://master:7077
- SPARK_WORKER_MEMORY=16G
- SPARK_WORKER_CORES=8
- SPARK_RPC_AUTHENTICATION_ENABLED=no
- SPARK_RPC_ENCRYPTION_ENABLED=no
- SPARK_LOCAL_STORAGE_ENCRYPTION_ENABLED=no
- SPARK_SSL_ENABLED=no
volumes:
- /export/spark:/export/spark
ports:
- '8081:8081'
- worker节点安装 10.0.1.4
[root@VM-1-4-centos spark]# cat /export/vm/spark/docker-compose.yaml
version: '3'
services:
spark-worker-3:
image: docker.io/bitnami/spark:3
hostname: worker2
user: root
environment:
- SPARK_MODE=worker
- SPARK_MASTER_URL=spark://10.0.1.2:7077
- SPARK_WORKER_MEMORY=24G
- SPARK_WORKER_CORES=8
- SPARK_RPC_AUTHENTICATION_ENABLED=no
- SPARK_RPC_ENCRYPTION_ENABLED=no
- SPARK_LOCAL_STORAGE_ENCRYPTION_ENABLED=no
- SPARK_SSL_ENABLED=no
- SPARK_USER=spark
- SPARK_PASSWORD=Ya_17jMca9ud
volumes:
- /export/spark:/export/spark
ports:
- '8084:8081'
- 配置nginx 转发 10.0.1.2
[root@VM-1-2-centos conf.d]# cat spark.conf
server {
listen 18080;
server_name xxxxxxx;
location / {
proxy_pass http://127.0.0.1:8083;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
# 添加用户名和密码认证
auth_basic "Restricted Access";
auth_basic_user_file /etc/nginx/.htpasswd;
}
- 可视化查看节点状态
http://xxxxxxx:18080/
标签:compose,no,ENABLED,spark,SPARK,docker
From: https://www.cnblogs.com/lixinliang/p/18184155