- 首先,需要安装airflow和psycopg2。可以使用以下命令进行安装:
pip install apache-airflow
pip install psycopg2
- 然后,在Airflow中创建一个DAG(有向无环图),以读取CSV文件和将其加载到PostgreSQL中。以下是一个示例DAG:
from airflow import DAG
from datetime import datetime
from airflow.operators.postgres_operator import PostgresOperator
from airflow.operators.python_operator import PythonOperator
from airflow.operators.bash_operator import BashOperator
from airflow.operators.dummy_operator import DummyOperator
import pandas as pd
import psycopg2
default_args = {
'owner': 'airflow',
'start_date': datetime(2021, 1, 1)
}
dag = DAG(
'csv_to_postgres',
default_args=default_args,
schedule_interval=None
)
def load_csv_to_postgres():
# 连接到数据库
conn = psycopg2.connect(dbname="postgres", host="localhost", port="5432", user="postgres", password="")
cursor = conn.cursor()
# 读取CSV文件
data = pd.read_csv('data.csv')
# 将数据插入PostgreSQL数据库
for index, row in data.iterrows():
cursor.execute("""
INSERT INTO table_name (column1, column2, column3)
VALUES (%s, %s, %s);
""", (row['column1'], row['column2'], row['column3']))
conn.commit()
conn.close()
# 读取CSV文件
t1 = BashOperator(
task_id='read_csv',
bash_command='curl -o /opt/airflow/data.csv https://example.com/data.csv',
dag=dag
)
# 将CSV数据加载到PostgreSQL
t2 = PythonOperator(
task_id='load_postgres',
provide_context=True,
python_callable=load_csv_to_postgres,
dag=dag
)
# DAG中的结束符号
t3 = DummyOperator(
task_id='finish',
dag=dag
)
# 构建DAG流
t1 >> t2 >> t3
- 替换DAG中的数据库连接和CSV文件的路径。在
load_csv_to_postgres
函数中,table_name
应该被替换为目标表的名称,column1
,column2
和columns
应该替换为目标表中的列名。提供正确的数据库凭据以连接到