查询的分区情况
程序- Jupyter
# 导入信息
from pyspark.sql import SparkSession, Row
from pyspark import SQLContext
from pyspark.sql.functions import udf, col, explode, collect_set, get_json_object, concat_ws, split
from pyspark.sql.types import StringType, IntegerType, StructType, StructField, ArrayType, MapType
# from offline_verification_func import *
spark = SparkSession
.builder.master("local[50]")
.config("spark.executor.memory", "10g")
.config("spark.driver.memory", "20g")
.config("spark.driver.maxResultSize","4g")
.appName("test")
.enableHiveSupport()
.getOrCreate()
# 查询语句
spark.sql("""
show partitions 表名
""").show()
- Hive中
# 显示表分区: hive> show partitions table_name;
- 数据库中
show partitions table_name;



