配置好文件
路径(根据docker-compose.yml文件里面的路径来)
docker-compose.yml文件代码
version: "2"
services:
master:
image: zylctgu/spark2.4
command: /start-master
hostname: spark-master
container_name: spark-master
volumes:
- /d/documents/docker-files/spark/share_files:/root/spark
ports:
- "4040:4040"
- "8080:8080"
worker1:
image: zylctgu/spark2.4
command: /start-worker
hostname: worker1
container_name: spark-worker1
volumes:
- /d/documents/docker-files/spark/share_files:/root/spark
ports:
- "4041:4040"
- "8081:8081"
links:
- master
environment:
SPARK_WORKER_CORES: 1
SPARK_WORKER_MEMORY: 2g
worker2:
image: zylctgu/spark2.4
command: /start-worker
hostname: worker2
container_name: spark-worker2
volumes:
- /d/documents/docker-files/spark/share_files:/root/spark
ports:
- "4042:4040"
- "8082:8081"
links:
- master
environment:
SPARK_WORKER_CORES: 1
SPARK_WORKER_MEMORY: 2g
根据docker-compose.yml里面的文件路径配好文件
docker load --input spark.tar docker-compose up -d docker ps docker exec -it spark-master bash jps
新开一个窗口
进入文件夹里面再py pyspark --master spark://spark-master:7077
注意文件夹路径!!!!



