一种方法是分别设置
b和
c作为帧的索引,并使用
join后跟
reset_index:
df1.set_index('b').join(df2.set_index('c')).reset_index() b a d0 a 0 Alex1 b 1 Alex2 c 2 Alex3 d 3 Alex这将比
merge/drop大数据帧上的方法快,主要是因为
drop它很慢。@Bill的方法比我的建议快,而@WB和@PiRsquared轻松超越了其他建议:
import timeitdf1 = pd.concat((df1 for _ in range(1000)))df2 = pd.concat((df2 for _ in range(1000)))def index_method(df1 = df1, df2 = df2): return df1.set_index('b').join(df2.set_index('c')).reset_index()def merge_method(df1 = df1, df2=df2): return df1.merge(df2, left_on='b', right_on='c').drop('c', axis='columns')def rename_method(df1 = df1, df2 = df2): return df1.rename({'b': 'c'}, axis=1).merge(df2)def index_method2(df1 = df1, df2 = df2): return df1.join(df2.set_index('c'), on='b')def assign_method(df1 = df1, df2 = df2): return df1.set_index('b').assign(c=df2.set_index('c').d).reset_index()def map_method(df1 = df1, df2 = df2): return df1.assign(d=df1.b.map(dict(df2.values)))>>> timeit.timeit(index_method, number=10) / 100.7853091600998596>>> timeit.timeit(merge_method, number=10) / 101.1696729859002517>>> timeit.timeit(rename_method, number=10) / 100.4291436871004407>>> timeit.timeit(index_method2, number=10) / 100.5037374985004135>>> timeit.timeit(assign_method, number=10) / 100.0038641377999738325>>> timeit.timeit(map_method, number=10) / 100.006620216699957382


