@@ -53,7 +53,7 @@ class DataflowConfiguration:
53
53
:py:class:`~airflow.providers.apache.beam.operators.beam.BeamRunJavaPipelineOperator` and
54
54
:py:class:`~airflow.providers.apache.beam.operators.beam.BeamRunPythonPipelineOperator`.
55
55
56
- :param job_name: The 'jobName' to use when executing the DataFlow job
56
+ :param job_name: The 'jobName' to use when executing the Dataflow job
57
57
(templated). This ends up being set in the pipeline options, so any entry
58
58
with key ``'jobName'`` or ``'job_name'``in ``options`` will be overwritten.
59
59
:type job_name: str
@@ -173,7 +173,7 @@ def __init__(
173
173
# pylint: disable=too-many-instance-attributes
174
174
class DataflowCreateJavaJobOperator (BaseOperator ):
175
175
"""
176
- Start a Java Cloud DataFlow batch job. The parameters of the operation
176
+ Start a Java Cloud Dataflow batch job. The parameters of the operation
177
177
will be passed to the job.
178
178
179
179
This class is deprecated.
@@ -200,7 +200,7 @@ class DataflowCreateJavaJobOperator(BaseOperator):
200
200
201
201
dag = DAG('test-dag', default_args=default_args)
202
202
203
- task = DataFlowJavaOperator (
203
+ task = DataflowCreateJavaJobOperator (
204
204
gcp_conn_id='gcp_default',
205
205
task_id='normalize-cal',
206
206
jar='{{var.value.gcp_dataflow_base}}pipeline-ingress-cal-normalize-1.0.jar',
@@ -221,9 +221,9 @@ class DataflowCreateJavaJobOperator(BaseOperator):
221
221
For more information on how to use this operator, take a look at the guide:
222
222
:ref:`howto/operator:DataflowCreateJavaJobOperator`
223
223
224
- :param jar: The reference to a self executing DataFlow jar (templated).
224
+ :param jar: The reference to a self executing Dataflow jar (templated).
225
225
:type jar: str
226
- :param job_name: The 'jobName' to use when executing the DataFlow job
226
+ :param job_name: The 'jobName' to use when executing the Dataflow job
227
227
(templated). This ends up being set in the pipeline options, so any entry
228
228
with key ``'jobName'`` in ``options`` will be overwritten.
229
229
:type job_name: str
@@ -330,7 +330,7 @@ class DataflowCreateJavaJobOperator(BaseOperator):
330
330
331
331
.. code-block:: python
332
332
333
- t1 = DataFlowJavaOperator (
333
+ t1 = DataflowCreateJavaJobOperator (
334
334
task_id='dataflow_example',
335
335
jar='{{var.value.gcp_dataflow_base}}pipeline/build/libs/pipeline-example-1.0.jar',
336
336
options={
@@ -481,16 +481,16 @@ def on_kill(self) -> None:
481
481
# pylint: disable=too-many-instance-attributes
482
482
class DataflowTemplatedJobStartOperator (BaseOperator ):
483
483
"""
484
- Start a Templated Cloud DataFlow job. The parameters of the operation
484
+ Start a Templated Cloud Dataflow job. The parameters of the operation
485
485
will be passed to the job.
486
486
487
487
.. seealso::
488
488
For more information on how to use this operator, take a look at the guide:
489
489
:ref:`howto/operator:DataflowTemplatedJobStartOperator`
490
490
491
- :param template: The reference to the DataFlow template.
491
+ :param template: The reference to the Dataflow template.
492
492
:type template: str
493
- :param job_name: The 'jobName' to use when executing the DataFlow template
493
+ :param job_name: The 'jobName' to use when executing the Dataflow template
494
494
(templated).
495
495
:type job_name: Optional[str]
496
496
:param options: Map of job runtime environment options.
@@ -598,7 +598,7 @@ class DataflowTemplatedJobStartOperator(BaseOperator):
598
598
599
599
.. code-block:: python
600
600
601
- t1 = DataflowTemplateOperator (
601
+ t1 = DataflowTemplatedJobStartOperator (
602
602
task_id='dataflow_example',
603
603
template='{{var.value.gcp_dataflow_base}}',
604
604
parameters={
@@ -954,7 +954,7 @@ class DataflowCreatePythonJobOperator(BaseOperator):
954
954
:param py_file: Reference to the python dataflow pipeline file.py, e.g.,
955
955
/some/local/file/path/to/your/python/pipeline/file. (templated)
956
956
:type py_file: str
957
- :param job_name: The 'job_name' to use when executing the DataFlow job
957
+ :param job_name: The 'job_name' to use when executing the Dataflow job
958
958
(templated). This ends up being set in the pipeline options, so any entry
959
959
with key ``'jobName'`` or ``'job_name'`` in ``options`` will be overwritten.
960
960
:type job_name: str
0 commit comments