from pyspark.sql import SparkSession
# 调整spark.driver.memory 大小设置根据实际环境调整
spark = SparkSession.builder
.master('local[*]')
.config("spark.driver.memory", "15g")
.appName('my-cool-app')
.getOrCreate()
参考资料
https://stackoverflow.com/questions/32336915/pyspark-java-lang-outofmemoryerror-java-heap-space



