您可以将文件上传到Google Cloud Storage中,然后使用BigQuery进行导入。以下是相关代码示例:
from google.cloud import storage
storage_client = storage.Client()
bucket_name = "your-bucket-name"
local_file_path = "path/to/local/file.csv"
destination_file_name = "file.csv"
bucket = storage_client.bucket(bucket_name) blob = bucket.blob(destination_file_name) blob.upload_from_filename(local_file_path)
from google.cloud import bigquery
bigquery_client = bigquery.Client()
project_id = "your-project-id"
dataset_id = "your-dataset-id"
table_id = "your-table-id"
gcs_uri = "gs://{}/{}".format(bucket_name, destination_file_name)
job_config = bigquery.LoadJobConfig() job_config.schema = [...] # Set the schema of the table job_config.write_disposition = "WRITE_TRUNCATE" # Set the write disposition
load_job = bigquery_client.load_table_from_uri(
gcs_uri,
bigquery.DatasetReference(project_id, dataset_id).table(table_id),
job_config=job_config,
)
load_job.result()