请尝试以下工作示例:
from datalab.context import Contextimport google.datalab.storage as storageimport google.datalab.bigquery as bqimport pandas as pd# Dataframe to writesimple_dataframe = pd.Dataframe(data=[{1,2,3},{4,5,6}],columns=['a','b','c'])sample_bucket_name = Context.default().project_id + '-datalab-example'sample_bucket_path = 'gs://' + sample_bucket_namesample_bucket_object = sample_bucket_path + '/Hello.txt'bigquery_dataset_name = 'TestDataSet'bigquery_table_name = 'TestTable'# Define storage bucketsample_bucket = storage.Bucket(sample_bucket_name)# Create storage bucket if it does not existif not sample_bucket.exists(): sample_bucket.create()# Define BigQuery dataset and tabledataset = bq.Dataset(bigquery_dataset_name)table = bq.Table(bigquery_dataset_name + '.' + bigquery_table_name)# Create BigQuery datasetif not dataset.exists(): dataset.create()# Create or overwrite the existing table if it existstable_schema = bq.Schema.from_data(simple_dataframe)table.create(schema = table_schema, overwrite = True)# Write the Dataframe to GCS (Google Cloud Storage)%storage write --variable simple_dataframe --object $sample_bucket_object# Write the Dataframe to a BigQuery tabletable.insert(simple_dataframe)我使用了这个示例,并将datalab
github站点的_table.py文件作为参考。您可以在此链接中找到其他源代码文件。
datalab



