-
1 元数据问题set spark.sql.hive.convertmetastoreParquet=true; # 改成直接使用spark的元数据
-
2 spark直接写路径insert overwrite directory 'path' stored as parquet => insert overwrite directory 'path' using parquet
-
3 读写同一个表a. 有rename的写法
insert overwrite table table_name partition(dt='20211118')
select
aa.gazj
,aa.cell_id
from
table_name aa
where
dt = '20211117'
;
b. 改进的写法(变相的实现overwrite功能)
alter table table_name drop partition(dt='20211118');
insert into table table_name partition(dt='20211118')
select
aa.gazj
,aa.cell_id
from
table_name aa
where
dt = '20211117'
;
注意: 不能读写同一个分区;
-
99 关键词1 spark写hive表慢
2 Error in query: Cannot overwrite a path that is also being read from.
读写同一个表overwrite不行, 就改成into方式
-
-
-



