问题的原因是Airflow Dataproc的ServerlessJobOperator(无服务器作业操作器)没有设计用于接受Python参数。但我们可以通过自定义运算符(operator)来解决这个问题。我们可以先定义一个函数,然后把函数名和参数作为字符串参数传递给自定义运算符,并在运算符中将其解析。
以下是一个代码示例:
from airflow.models.baseoperator import BaseOperator
from airflow.utils.decorators import apply_defaults
from google.cloud.dataproc_v1beta2 import Job
from google.protobuf import json_format
class CustomServerlessJobOperator(BaseOperator):
@apply_defaults
def __init__(
self,
*,
main_class,
main_jar,
dataproc_cluster,
project_id=None,
region=None,
data_proc_shared_package=None,
properties=None,
**kwargs,
):
super().__init__(**kwargs)
self.main_class = main_class
self.main_jar = main_jar
self.dataproc_cluster = dataproc_cluster
self.project_id = project_id or self.dag.default_args.get("project_id")
self.region = region or self.dag.default_args.get("region")
self.data_proc_shared_package = (
data_proc_shared_package or "com.google.cloud.spark"
)
self.properties = properties or {}
def execute(self, context):
job_client = JobServiceClient()
job = Job()
job.job_placement.cluster_name = self.dataproc_cluster
job.job_placement.project_id = self.project_id
job.job_placement.region = self.region
job.spark_job.spark_class_main = self.main_class
job.spark_job.main_jar_file_uri = self.main_jar
job.labels = {"airflow-version": airflow.__version__}
job.spark_job.properties["spark.jars.packages"] = self.data_proc_shared_package
job.spark_job.properties.update(self.properties)
job_json = json_format.MessageToJson(
job,
including_default_value_fields=True,
preserving_proto_field_name=True,
)
# 解析函数名和参数
command = f"pyspark {self.main_jar} {self.main_class} {self.parameters}"
job.spark_job.args.extend(command.split())
job_id = job_client.submit_job(
project_id=self.project_id,
job_region=self.region,
job=job,
).reference.job_id
return self.wait_for_done(job_id, job_client)
使用示例:
CustomServerlessJobOperator(
task_id='