Skip to content

Commit f649905

Browse files
authored
fix docstring typos (#15392)
1 parent 54adc44 commit f649905

File tree

1 file changed

+11
-11
lines changed

1 file changed

+11
-11
lines changed

β€Žairflow/providers/google/cloud/operators/dataflow.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ class DataflowConfiguration:
5353
:py:class:`~airflow.providers.apache.beam.operators.beam.BeamRunJavaPipelineOperator` and
5454
:py:class:`~airflow.providers.apache.beam.operators.beam.BeamRunPythonPipelineOperator`.
5555
56-
:param job_name: The 'jobName' to use when executing the DataFlow job
56+
:param job_name: The 'jobName' to use when executing the Dataflow job
5757
(templated). This ends up being set in the pipeline options, so any entry
5858
with key ``'jobName'`` or ``'job_name'``in ``options`` will be overwritten.
5959
:type job_name: str
@@ -173,7 +173,7 @@ def __init__(
173173
# pylint: disable=too-many-instance-attributes
174174
class DataflowCreateJavaJobOperator(BaseOperator):
175175
"""
176-
Start a Java Cloud DataFlow batch job. The parameters of the operation
176+
Start a Java Cloud Dataflow batch job. The parameters of the operation
177177
will be passed to the job.
178178
179179
This class is deprecated.
@@ -200,7 +200,7 @@ class DataflowCreateJavaJobOperator(BaseOperator):
200200
201201
dag = DAG('test-dag', default_args=default_args)
202202
203-
task = DataFlowJavaOperator(
203+
task = DataflowCreateJavaJobOperator(
204204
gcp_conn_id='gcp_default',
205205
task_id='normalize-cal',
206206
jar='{{var.value.gcp_dataflow_base}}pipeline-ingress-cal-normalize-1.0.jar',
@@ -221,9 +221,9 @@ class DataflowCreateJavaJobOperator(BaseOperator):
221221
For more information on how to use this operator, take a look at the guide:
222222
:ref:`howto/operator:DataflowCreateJavaJobOperator`
223223
224-
:param jar: The reference to a self executing DataFlow jar (templated).
224+
:param jar: The reference to a self executing Dataflow jar (templated).
225225
:type jar: str
226-
:param job_name: The 'jobName' to use when executing the DataFlow job
226+
:param job_name: The 'jobName' to use when executing the Dataflow job
227227
(templated). This ends up being set in the pipeline options, so any entry
228228
with key ``'jobName'`` in ``options`` will be overwritten.
229229
:type job_name: str
@@ -330,7 +330,7 @@ class DataflowCreateJavaJobOperator(BaseOperator):
330330
331331
.. code-block:: python
332332
333-
t1 = DataFlowJavaOperator(
333+
t1 = DataflowCreateJavaJobOperator(
334334
task_id='dataflow_example',
335335
jar='{{var.value.gcp_dataflow_base}}pipeline/build/libs/pipeline-example-1.0.jar',
336336
options={
@@ -481,16 +481,16 @@ def on_kill(self) -> None:
481481
# pylint: disable=too-many-instance-attributes
482482
class DataflowTemplatedJobStartOperator(BaseOperator):
483483
"""
484-
Start a Templated Cloud DataFlow job. The parameters of the operation
484+
Start a Templated Cloud Dataflow job. The parameters of the operation
485485
will be passed to the job.
486486
487487
.. seealso::
488488
For more information on how to use this operator, take a look at the guide:
489489
:ref:`howto/operator:DataflowTemplatedJobStartOperator`
490490
491-
:param template: The reference to the DataFlow template.
491+
:param template: The reference to the Dataflow template.
492492
:type template: str
493-
:param job_name: The 'jobName' to use when executing the DataFlow template
493+
:param job_name: The 'jobName' to use when executing the Dataflow template
494494
(templated).
495495
:type job_name: Optional[str]
496496
:param options: Map of job runtime environment options.
@@ -598,7 +598,7 @@ class DataflowTemplatedJobStartOperator(BaseOperator):
598598
599599
.. code-block:: python
600600
601-
t1 = DataflowTemplateOperator(
601+
t1 = DataflowTemplatedJobStartOperator(
602602
task_id='dataflow_example',
603603
template='{{var.value.gcp_dataflow_base}}',
604604
parameters={
@@ -954,7 +954,7 @@ class DataflowCreatePythonJobOperator(BaseOperator):
954954
:param py_file: Reference to the python dataflow pipeline file.py, e.g.,
955955
/some/local/file/path/to/your/python/pipeline/file. (templated)
956956
:type py_file: str
957-
:param job_name: The 'job_name' to use when executing the DataFlow job
957+
:param job_name: The 'job_name' to use when executing the Dataflow job
958958
(templated). This ends up being set in the pipeline options, so any entry
959959
with key ``'jobName'`` or ``'job_name'`` in ``options`` will be overwritten.
960960
:type job_name: str

0 commit comments

Comments
 (0)