如果BigQuery作业的状态已完成,但在使用Airflow时未插入任何行,可能是由于以下原因:
from google.cloud import bigquery
def check_data_exists(project_id, dataset_id, table_id):
client = bigquery.Client(project=project_id)
table_ref = client.dataset(dataset_id).table(table_id)
table = client.get_table(table_ref)
return table.num_rows > 0
# 检查数据是否存在
data_exists = check_data_exists('your_project_id', 'your_dataset_id', 'your_table_id')
if data_exists:
print("数据源不为空")
else:
print("数据源为空")
from google.cloud import bigquery
def run_bigquery_job(project_id, dataset_id, table_id, query):
client = bigquery.Client(project=project_id)
job_config = bigquery.QueryJobConfig(destination=f'{project_id}.{dataset_id}.{table_id}')
job = client.query(query, job_config=job_config)
job.result() # 等待作业完成
return job.state
# 运行BigQuery作业
query = '''
SELECT * FROM `your_project_id.your_dataset_id.your_table_id`
'''
job_state = run_bigquery_job('your_project_id', 'your_dataset_id', 'your_table_id', query)
print(f"BigQuery作业状态: {job_state}")
请确保您的查询语句正确,可以在BigQuery控制台上测试它。
from airflow import DAG
from airflow.operators.python_operator import PythonOperator
from datetime import datetime
from google.cloud import bigquery
def check_data_exists(project_id, dataset_id, table_id):
client = bigquery.Client(project=project_id)
table_ref = client.dataset(dataset_id).table(table_id)
table = client.get_table(table_ref)
return table.num_rows > 0
def run_bigquery_job(project_id, dataset_id, table_id, query):
client = bigquery.Client(project=project_id)
job_config = bigquery.QueryJobConfig(destination=f'{project_id}.{dataset_id}.{table_id}')
job = client.query(query, job_config=job_config)
job.result() # 等待作业完成
return job.state
def check_data_source():
data_exists = check_data_exists('your_project_id', 'your_dataset_id', 'your_table_id')
if data_exists:
print("数据源不为空")
else:
print("数据源为空")
def run_bigquery_task():
query = '''
SELECT * FROM `your_project_id.your_dataset_id.your_table_id`
'''
job_state = run_bigquery_job('your_project_id', 'your_dataset_id', 'your_table_id', query)
print(f"BigQuery作业状态: {job_state}")
with DAG('example_dag', start_date=datetime(2021, 1, 1), schedule_interval='@once') as dag:
check_data_source_task = PythonOperator(
task_id='check_data_source',
python_callable=check_data_source
)
run_bigquery_task = PythonOperator(
task_id='run_bigquery_task',
python_callable=run_bigquery_task
)
check_data_source_task >> run_bigquery_task
确保您根据您的需求进行调整和修改。
这些是可能导致BigQuery作业状态已完成但未插入任何行的常见问题和解决方案。您可以根据您的具体情况进行调整和修改。