Skip to content

Commit dd2095f

Browse files
authored
Simplify string expressions & Use f-string (#12216)
* Simplify string expressions & Use f-string This is a follow-up clean-up work for the minor issues caused in the process of introducing Black * Fixup
1 parent 0443f04 commit dd2095f

File tree

17 files changed

+33
-37
lines changed

17 files changed

+33
-37
lines changed

β€Žairflow/cli/cli_parser.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -363,7 +363,7 @@ def positive_int(value):
363363
# instead.
364364
ARG_INTERACTIVE = Arg(
365365
('-N', '--interactive'),
366-
help='Do not capture standard output and error streams ' '(useful for interactive debugging)',
366+
help='Do not capture standard output and error streams (useful for interactive debugging)',
367367
action='store_true',
368368
)
369369
ARG_FORCE = Arg(
@@ -573,15 +573,15 @@ def positive_int(value):
573573
ARG_LASTNAME = Arg(('-l', '--lastname'), help='Last name of the user', required=True, type=str)
574574
ARG_ROLE = Arg(
575575
('-r', '--role'),
576-
help='Role of the user. Existing roles include Admin, ' 'User, Op, Viewer, and Public',
576+
help='Role of the user. Existing roles include Admin, User, Op, Viewer, and Public',
577577
required=True,
578578
type=str,
579579
)
580580
ARG_EMAIL = Arg(('-e', '--email'), help='Email of the user', required=True, type=str)
581581
ARG_EMAIL_OPTIONAL = Arg(('-e', '--email'), help='Email of the user', type=str)
582582
ARG_PASSWORD = Arg(
583583
('-p', '--password'),
584-
help='Password of the user, required to create a user ' 'without --use-random-password',
584+
help='Password of the user, required to create a user without --use-random-password',
585585
type=str,
586586
)
587587
ARG_USE_RANDOM_PASSWORD = Arg(
@@ -628,7 +628,7 @@ def positive_int(value):
628628
# info
629629
ARG_ANONYMIZE = Arg(
630630
('--anonymize',),
631-
help=('Minimize any personal identifiable information. ' 'Use it when sharing output with others.'),
631+
help='Minimize any personal identifiable information. Use it when sharing output with others.',
632632
action='store_true',
633633
)
634634
ARG_FILE_IO = Arg(

β€Žairflow/cli/commands/user_command.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,7 @@ def users_manage_role(args, remove=False):
9292
raise SystemExit('Missing args: must supply one of --username or --email')
9393

9494
if args.username and args.email:
95-
raise SystemExit('Conflicting args: must supply either --username' ' or --email, but not both')
95+
raise SystemExit('Conflicting args: must supply either --username or --email, but not both')
9696

9797
appbuilder = cached_app().appbuilder # pylint: disable=no-member
9898
user = appbuilder.sm.find_user(username=args.username) or appbuilder.sm.find_user(email=args.email)

β€Žairflow/executors/kubernetes_executor.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -112,7 +112,7 @@ def run(self) -> None:
112112
raise
113113
else:
114114
self.log.warning(
115-
'Watch died gracefully, starting back up with: ' 'last resource_version: %s',
115+
'Watch died gracefully, starting back up with: last resource_version: %s',
116116
self.resource_version,
117117
)
118118

@@ -263,7 +263,7 @@ def _health_check_kube_watcher(self):
263263
self.log.debug("KubeJobWatcher alive, continuing")
264264
else:
265265
self.log.error(
266-
'Error while health checking kube watcher process. ' 'Process died for unknown reasons'
266+
'Error while health checking kube watcher process. Process died for unknown reasons'
267267
)
268268
self.kube_watcher = self._make_kube_watcher()
269269

@@ -478,7 +478,7 @@ def clear_not_launched_queued_tasks(self, session=None) -> None:
478478
pod_list = self.kube_client.list_namespaced_pod(self.kube_config.kube_namespace, **kwargs)
479479
if not pod_list.items:
480480
self.log.info(
481-
'TaskInstance: %s found in queued state but was not launched, ' 'rescheduling', task
481+
'TaskInstance: %s found in queued state but was not launched, rescheduling', task
482482
)
483483
session.query(TaskInstance).filter(
484484
TaskInstance.dag_id == task.dag_id,
@@ -509,7 +509,7 @@ def _create_or_update_secret(secret_name, secret_path):
509509
**self.kube_config.kube_client_request_args,
510510
)
511511
self.log.exception(
512-
'Exception while trying to inject secret. ' 'Secret name: %s, error details: %s',
512+
'Exception while trying to inject secret. Secret name: %s, error details: %s',
513513
secret_name,
514514
e,
515515
)
@@ -605,7 +605,7 @@ def sync(self) -> None:
605605
self.change_state(key, State.FAILED, e)
606606
else:
607607
self.log.warning(
608-
'ApiException when attempting to run task, re-queueing. ' 'Message: %s',
608+
'ApiException when attempting to run task, re-queueing. Message: %s',
609609
json.loads(e.body)['message'],
610610
)
611611
self.task_queue.put(task)
@@ -729,7 +729,7 @@ def _flush_result_queue(self) -> None:
729729
self._change_state(key, state, pod_id, namespace)
730730
except Exception as e: # pylint: disable=broad-except
731731
self.log.exception(
732-
'Ignoring exception: %s when attempting to change state of %s ' 'to %s.',
732+
'Ignoring exception: %s when attempting to change state of %s to %s.',
733733
e,
734734
results,
735735
state,

β€Žairflow/models/baseoperator.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1194,7 +1194,7 @@ def _set_relatives(
11941194

11951195
if len(dags) > 1:
11961196
raise AirflowException(
1197-
'Tried to set relationships between tasks in ' 'more than one DAG: {}'.format(dags.values())
1197+
f'Tried to set relationships between tasks in more than one DAG: {dags.values()}'
11981198
)
11991199
elif len(dags) == 1:
12001200
dag = dags.popitem()[1]

β€Žairflow/models/dagrun.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -135,7 +135,7 @@ def __init__(
135135

136136
def __repr__(self):
137137
return (
138-
'<DagRun {dag_id} @ {execution_date}: {run_id}, ' 'externally triggered: {external_trigger}>'
138+
'<DagRun {dag_id} @ {execution_date}: {run_id}, externally triggered: {external_trigger}>'
139139
).format(
140140
dag_id=self.dag_id,
141141
execution_date=self.execution_date,

β€Žairflow/providers/apache/druid/hooks/druid.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -93,7 +93,7 @@ def submit_indexing_job(self, json_index_spec: Dict[str, Any]) -> None:
9393
self.log.info("Druid ingestion spec: %s", json_index_spec)
9494
req_index = requests.post(url, data=json_index_spec, headers=self.header, auth=self.get_auth())
9595
if req_index.status_code != 200:
96-
raise AirflowException('Did not get 200 when ' 'submitting the Druid job to {}'.format(url))
96+
raise AirflowException(f'Did not get 200 when submitting the Druid job to {url}')
9797

9898
req_json = req_index.json()
9999
# Wait until the job is completed

β€Žairflow/providers/google/cloud/hooks/bigquery.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2067,7 +2067,7 @@ def run_query(
20672067
_validate_value("api_resource_configs['query']", configuration['query'], dict)
20682068

20692069
if sql is None and not configuration['query'].get('query', None):
2070-
raise TypeError('`BigQueryBaseCursor.run_query` ' 'missing 1 required positional argument: `sql`')
2070+
raise TypeError('`BigQueryBaseCursor.run_query` missing 1 required positional argument: `sql`')
20712071

20722072
# BigQuery also allows you to define how you want a table's schema to change
20732073
# as a side effect of a query job
@@ -2801,9 +2801,7 @@ def _split_tablename(
28012801
) -> Tuple[str, str, str]:
28022802

28032803
if '.' not in table_input:
2804-
raise ValueError(
2805-
'Expected target table name in the format of ' '<dataset>.<table>. Got: {}'.format(table_input)
2806-
)
2804+
raise ValueError(f'Expected table name in the format of <dataset>.<table>. Got: {table_input}')
28072805

28082806
if not default_project_id:
28092807
raise ValueError("INTERNAL: No default project is specified")

β€Žairflow/providers/google/cloud/hooks/gcs.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -630,7 +630,7 @@ def get_crc32c(self, bucket_name: str, object_name: str):
630630
:type object_name: str
631631
"""
632632
self.log.info(
633-
'Retrieving the crc32c checksum of ' 'object_name: %s in bucket_name: %s',
633+
'Retrieving the crc32c checksum of object_name: %s in bucket_name: %s',
634634
object_name,
635635
bucket_name,
636636
)
@@ -651,7 +651,7 @@ def get_md5hash(self, bucket_name: str, object_name: str) -> str:
651651
storage bucket_name.
652652
:type object_name: str
653653
"""
654-
self.log.info('Retrieving the MD5 hash of ' 'object: %s in bucket: %s', object_name, bucket_name)
654+
self.log.info('Retrieving the MD5 hash of object: %s in bucket: %s', object_name, bucket_name)
655655
client = self.get_conn()
656656
bucket = client.bucket(bucket_name)
657657
blob = bucket.get_blob(blob_name=object_name)

β€Žairflow/providers/google/cloud/hooks/mlengine.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -140,7 +140,7 @@ def create_job(self, job: dict, project_id: str, use_existing_job_fn: Optional[C
140140
existing_job = self._get_job(project_id, job_id)
141141
if not use_existing_job_fn(existing_job):
142142
self.log.error(
143-
'Job with job_id %s already exist, but it does ' 'not match our expectation: %s',
143+
'Job with job_id %s already exist, but it does not match our expectation: %s',
144144
job_id,
145145
existing_job,
146146
)

β€Žairflow/providers/microsoft/azure/hooks/azure_batch.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -148,7 +148,7 @@ def configure_pool( # pylint: disable=too-many-arguments
148148

149149
elif os_family:
150150
self.log.info(
151-
'Using cloud service configuration to create pool, ' 'virtual machine configuration ignored'
151+
'Using cloud service configuration to create pool, virtual machine configuration ignored'
152152
)
153153
pool = batch_models.PoolAddParameter(
154154
id=pool_id,

0 commit comments

Comments
 (0)