Skip to content

Commit 4b06fde

Browse files
authored
Fix Flake8 errors (#8841)
1 parent 1d12c34 commit 4b06fde

File tree

14 files changed

+47
-47
lines changed

14 files changed

+47
-47
lines changed

β€Žairflow/api/common/experimental/mark_tasks.py

Lines changed: 19 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -136,34 +136,34 @@ def set_state(
136136
# Flake and pylint disagree about correct indents here
137137
def all_subdag_tasks_query(sub_dag_run_ids, session, state, confirmed_dates): # noqa: E123
138138
"""Get *all* tasks of the sub dags"""
139-
qry_sub_dag = session.query(TaskInstance).\
139+
qry_sub_dag = session.query(TaskInstance). \
140140
filter(
141-
TaskInstance.dag_id.in_(sub_dag_run_ids),
142-
TaskInstance.execution_date.in_(confirmed_dates) # noqa: E123
143-
).\
141+
TaskInstance.dag_id.in_(sub_dag_run_ids),
142+
TaskInstance.execution_date.in_(confirmed_dates)
143+
). \
144144
filter(
145-
or_(
146-
TaskInstance.state.is_(None),
147-
TaskInstance.state != state
148-
)
149-
) # noqa: E123
145+
or_(
146+
TaskInstance.state.is_(None),
147+
TaskInstance.state != state
148+
)
149+
) # noqa: E123
150150
return qry_sub_dag
151151

152152

153-
def get_all_dag_task_query(dag, session, state, task_ids, confirmed_dates): # noqa: E123
153+
def get_all_dag_task_query(dag, session, state, task_ids, confirmed_dates):
154154
"""Get all tasks of the main dag that will be affected by a state change"""
155-
qry_dag = session.query(TaskInstance).\
155+
qry_dag = session.query(TaskInstance). \
156156
filter(
157-
TaskInstance.dag_id == dag.dag_id,
158-
TaskInstance.execution_date.in_(confirmed_dates),
159-
TaskInstance.task_id.in_(task_ids) # noqa: E123
160-
).\
157+
TaskInstance.dag_id == dag.dag_id,
158+
TaskInstance.execution_date.in_(confirmed_dates),
159+
TaskInstance.task_id.in_(task_ids) # noqa: E123
160+
). \
161161
filter(
162-
or_(
163-
TaskInstance.state.is_(None),
164-
TaskInstance.state != state
165-
)
162+
or_(
163+
TaskInstance.state.is_(None),
164+
TaskInstance.state != state
166165
)
166+
)
167167
return qry_dag
168168

169169

β€Žairflow/jobs/scheduler_job.py

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -561,12 +561,11 @@ def create_dag_run(self, dag, dag_runs=None, session=None):
561561
# this query should be replaced by find dagrun
562562
qry = (
563563
session.query(func.max(DagRun.execution_date))
564-
.filter_by(dag_id=dag.dag_id)
565-
.filter(or_(
566-
DagRun.external_trigger == False, # noqa: E712 pylint: disable=singleton-comparison
567-
# add % as a wildcard for the like query
568-
DagRun.run_id.like(f"{DagRunType.SCHEDULED.value}__%")
569-
)
564+
.filter_by(dag_id=dag.dag_id)
565+
.filter(or_(
566+
DagRun.external_trigger == False, # noqa: E712 pylint: disable=singleton-comparison
567+
# add % as a wildcard for the like query
568+
DagRun.run_id.like(f"{DagRunType.SCHEDULED.value}__%"))
570569
)
571570
)
572571
last_scheduled_run = qry.scalar()

β€Žairflow/providers/google/cloud/example_dags/example_bigquery_to_gcs.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@
5656
)
5757

5858
create_table = BigQueryCreateEmptyTableOperator(
59-
task_id=f"create_table",
59+
task_id="create_table",
6060
dataset_id=DATASET_NAME,
6161
table_id=TABLE,
6262
schema_fields=[

β€Žairflow/providers/google/cloud/example_dags/example_presto_to_gcs.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ def safe_name(s: str) -> str:
4747
default_args = {"start_date": days_ago(1)}
4848

4949
with models.DAG(
50-
dag_id=f"example_presto_to_gcs",
50+
dag_id="example_presto_to_gcs",
5151
default_args=default_args,
5252
schedule_interval=None, # Override to match your needs
5353
tags=["example"],

β€Žairflow/providers/google/cloud/hooks/functions.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -147,7 +147,7 @@ def upload_function_zip(self, location: str, zip_path: str, project_id: str) ->
147147
"""
148148
response = \
149149
self.get_conn().projects().locations().functions().generateUploadUrl( # pylint: disable=no-member # noqa
150-
parent=self._full_location(project_id, location)
150+
parent=self._full_location(project_id, location)
151151
).execute(num_retries=self.num_retries)
152152

153153
upload_url = response.get('uploadUrl')

β€Žairflow/providers/qubole/operators/qubole_check.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -229,10 +229,11 @@ def handle_airflow_exception(airflow_exception, hook):
229229
if cmd.is_success(cmd.status):
230230
qubole_command_results = hook.get_query_results()
231231
qubole_command_id = cmd.id
232-
exception_message = '\nQubole Command Id: {qubole_command_id}' \
233-
'\nQubole Command Results:' \
234-
'\n{qubole_command_results}'.format(
235-
qubole_command_id=qubole_command_id, # noqa: E122
236-
qubole_command_results=qubole_command_results)
232+
exception_message = \
233+
'\nQubole Command Id: {qubole_command_id}' \
234+
'\nQubole Command Results:' \
235+
'\n{qubole_command_results}'.format(
236+
qubole_command_id=qubole_command_id,
237+
qubole_command_results=qubole_command_results)
237238
raise AirflowException(str(airflow_exception) + exception_message)
238239
raise AirflowException(str(airflow_exception))

β€Žairflow/secrets/local_filesystem.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -139,7 +139,7 @@ def _parse_secret_file(file_path: str) -> Dict[str, Any]:
139139

140140
if parse_errors:
141141
raise AirflowFileParseException(
142-
f"Failed to load the secret file.", file_path=file_path, parse_errors=parse_errors
142+
"Failed to load the secret file.", file_path=file_path, parse_errors=parse_errors
143143
)
144144

145145
return secrets

β€Žairflow/www/views.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1673,11 +1673,11 @@ def duration(self, session=None):
16731673
TF = TaskFail
16741674
ti_fails = (
16751675
session.query(TF)
1676-
.filter(TF.dag_id == dag.dag_id,
1677-
TF.execution_date >= min_date,
1678-
TF.execution_date <= base_date,
1679-
TF.task_id.in_([t.task_id for t in dag.tasks]))
1680-
.all() # noqa
1676+
.filter(TF.dag_id == dag.dag_id,
1677+
TF.execution_date >= min_date,
1678+
TF.execution_date <= base_date,
1679+
TF.task_id.in_([t.task_id for t in dag.tasks]))
1680+
.all()
16811681
)
16821682

16831683
fails_totals = defaultdict(int)

β€Žbackport_packages/setup_backport_packages.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -416,7 +416,7 @@ def usage():
416416
for package in packages:
417417
out += f"{package} "
418418
out_array = textwrap.wrap(out, 80)
419-
print(f"Available packages: ")
419+
print("Available packages: ")
420420
print()
421421
for text in out_array:
422422
print(text)

β€Ždev/send_email.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -38,8 +38,8 @@
3838
SMTP_PORT = 587
3939
SMTP_SERVER = "mail-relay.apache.org"
4040
MAILING_LIST = {
41-
"dev": f"dev@airflow.apache.org",
42-
"users": f"users@airflow.apache.org"
41+
"dev": "dev@airflow.apache.org",
42+
"users": "users@airflow.apache.org"
4343
}
4444

4545

0 commit comments

Comments
 (0)