Skip to content

Commit 55976af

Browse files
Replace sequence concatination by unpacking in Airflow providers (#33933)
1 parent f63a94d commit 55976af

File tree

13 files changed

+19
-29
lines changed

13 files changed

+19
-29
lines changed

β€Žairflow/providers/amazon/aws/transfers/redshift_to_s3.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -131,9 +131,7 @@ def __init__(
131131
)
132132

133133
if self.include_header and "HEADER" not in [uo.upper().strip() for uo in self.unload_options]:
134-
self.unload_options = list(self.unload_options) + [
135-
"HEADER",
136-
]
134+
self.unload_options = [*self.unload_options, "HEADER"]
137135

138136
if self.redshift_data_api_kwargs:
139137
for arg in ["sql", "parameters"]:

β€Žairflow/providers/apache/beam/hooks/beam.py

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -188,9 +188,7 @@ def _start_pipeline(
188188
process_line_callback: Callable[[str], None] | None = None,
189189
working_directory: str | None = None,
190190
) -> None:
191-
cmd = command_prefix + [
192-
f"--runner={self.runner}",
193-
]
191+
cmd = [*command_prefix, f"--runner={self.runner}"]
194192
if variables:
195193
cmd.extend(beam_options_to_args(variables))
196194
run_beam_command(
@@ -261,7 +259,7 @@ def start_python_pipeline(
261259
requirements=py_requirements,
262260
)
263261

264-
command_prefix = [py_interpreter] + py_options + [py_file]
262+
command_prefix = [py_interpreter, *py_options, py_file]
265263

266264
beam_version = (
267265
subprocess.check_output(
@@ -506,9 +504,7 @@ async def start_pipeline_async(
506504
command_prefix: list[str],
507505
working_directory: str | None = None,
508506
) -> int:
509-
cmd = command_prefix + [
510-
f"--runner={self.runner}",
511-
]
507+
cmd = [*command_prefix, f"--runner={self.runner}"]
512508
if variables:
513509
cmd.extend(beam_options_to_args(variables))
514510
return await self.run_beam_command_async(

β€Žairflow/providers/apache/hive/hooks/hive.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -163,7 +163,7 @@ def _prepare_cli_cmd(self) -> list[Any]:
163163

164164
hive_params_list = self.hive_cli_params.split()
165165

166-
return [hive_bin] + cmd_extra + hive_params_list
166+
return [hive_bin, *cmd_extra, *hive_params_list]
167167

168168
def _validate_beeline_parameters(self, conn):
169169
if ":" in conn.host or "/" in conn.host or ";" in conn.host:

β€Žairflow/providers/apache/pig/hooks/pig.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,7 @@ def run_cli(self, pig: str, pig_opts: str | None = None, verbose: bool = True) -
7979
pig_opts_list = pig_opts.split()
8080
pig_cmd.extend(pig_opts_list)
8181

82-
pig_cmd.extend(["-f", fname] + cmd_extra)
82+
pig_cmd.extend(["-f", fname, *cmd_extra])
8383

8484
if verbose:
8585
self.log.info("%s", " ".join(pig_cmd))

β€Žairflow/providers/cncf/kubernetes/pod_generator.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -345,9 +345,10 @@ def reconcile_containers(
345345
client_container = extend_object_field(base_container, client_container, "volume_devices")
346346
client_container = merge_objects(base_container, client_container)
347347

348-
return [client_container] + PodGenerator.reconcile_containers(
349-
base_containers[1:], client_containers[1:]
350-
)
348+
return [
349+
client_container,
350+
*PodGenerator.reconcile_containers(base_containers[1:], client_containers[1:]),
351+
]
351352

352353
@classmethod
353354
def construct_pod(

β€Žairflow/providers/databricks/hooks/databricks_sql.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -83,7 +83,7 @@ def __init__(
8383

8484
def _get_extra_config(self) -> dict[str, Any | None]:
8585
extra_params = copy(self.databricks_conn.extra_dejson)
86-
for arg in ["http_path", "session_configuration"] + self.extra_parameters:
86+
for arg in ["http_path", "session_configuration", *self.extra_parameters]:
8787
if arg in extra_params:
8888
del extra_params[arg]
8989

β€Žairflow/providers/docker/operators/docker.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -335,7 +335,7 @@ def _run_image(self) -> list[str] | str | None:
335335
with TemporaryDirectory(prefix="airflowtmp", dir=self.host_tmp_dir) as host_tmp_dir_generated:
336336
tmp_mount = Mount(self.tmp_dir, host_tmp_dir_generated, "bind")
337337
try:
338-
return self._run_image_with_mounts(self.mounts + [tmp_mount], add_tmp_variable=True)
338+
return self._run_image_with_mounts([*self.mounts, tmp_mount], add_tmp_variable=True)
339339
except APIError as e:
340340
if host_tmp_dir_generated in str(e):
341341
self.log.warning(

β€Žairflow/providers/elasticsearch/log/es_task_handler.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -364,7 +364,7 @@ def set_context(self, ti: TaskInstance) -> None:
364364
if self.json_format:
365365
self.formatter = ElasticsearchJSONFormatter(
366366
fmt=self.formatter._fmt,
367-
json_fields=self.json_fields + [self.offset_field],
367+
json_fields=[*self.json_fields, self.offset_field],
368368
extras={
369369
"dag_id": str(ti.dag_id),
370370
"task_id": str(ti.task_id),

β€Žairflow/providers/google/cloud/operators/datafusion.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -783,7 +783,7 @@ def __init__(
783783
if success_states:
784784
self.success_states = success_states
785785
else:
786-
self.success_states = SUCCESS_STATES + [PipelineStates.RUNNING]
786+
self.success_states = [*SUCCESS_STATES, PipelineStates.RUNNING]
787787

788788
def execute(self, context: Context) -> str:
789789
hook = DataFusionHook(

β€Žairflow/providers/google/cloud/transfers/bigquery_to_mssql.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,8 @@ class BigQueryToMsSqlOperator(BigQueryToSqlBaseOperator):
4545
:param mssql_conn_id: reference to a specific mssql hook
4646
"""
4747

48-
template_fields: Sequence[str] = tuple(BigQueryToSqlBaseOperator.template_fields) + (
48+
template_fields: Sequence[str] = (
49+
*BigQueryToSqlBaseOperator.template_fields,
4950
"source_project_dataset_table",
5051
)
5152
operator_extra_links = (BigQueryTableLink(),)

0 commit comments

Comments
 (0)