Skip to content

Commit cf9437d

Browse files
authored
Simplify string expressions (#12123)
Black has trouble formatting strings that are too long and produces unusual sring expressions.
1 parent f097ae3 commit cf9437d

File tree

15 files changed

+36
-56
lines changed

15 files changed

+36
-56
lines changed

β€Žairflow/cli/cli_parser.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -304,9 +304,7 @@ def positive_int(value):
304304
)
305305
ARG_SAVE_DAGRUN = Arg(
306306
("--save-dagrun",),
307-
help=(
308-
"After completing the backfill, saves the diagram for current DAG Run to the indicated file.\n" "\n"
309-
),
307+
help=("After completing the backfill, saves the diagram for current DAG Run to the indicated file.\n\n"),
310308
)
311309

312310
# list_tasks
@@ -453,12 +451,12 @@ def positive_int(value):
453451
ARG_ACCESS_LOGFILE = Arg(
454452
("-A", "--access-logfile"),
455453
default=conf.get('webserver', 'ACCESS_LOGFILE'),
456-
help="The logfile to store the webserver access log. Use '-' to print to " "stderr",
454+
help="The logfile to store the webserver access log. Use '-' to print to stderr",
457455
)
458456
ARG_ERROR_LOGFILE = Arg(
459457
("-E", "--error-logfile"),
460458
default=conf.get('webserver', 'ERROR_LOGFILE'),
461-
help="The logfile to store the webserver error log. Use '-' to print to " "stderr",
459+
help="The logfile to store the webserver error log. Use '-' to print to stderr",
462460
)
463461

464462
# scheduler
@@ -493,7 +491,7 @@ def positive_int(value):
493491
)
494492
ARG_CELERY_HOSTNAME = Arg(
495493
("-H", "--celery-hostname"),
496-
help=("Set the hostname of celery worker " "if you have multiple workers on a single machine"),
494+
help=("Set the hostname of celery worker if you have multiple workers on a single machine"),
497495
)
498496
ARG_UMASK = Arg(
499497
("-u", "--umask"),

β€Žairflow/cli/commands/dag_command.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -158,9 +158,7 @@ def dag_delete(args):
158158
api_client = get_current_api_client()
159159
if (
160160
args.yes
161-
or input(
162-
"This will drop all existing records related to the specified DAG. " "Proceed? (y/n)"
163-
).upper()
161+
or input("This will drop all existing records related to the specified DAG. Proceed? (y/n)").upper()
164162
== "Y"
165163
):
166164
try:

β€Žairflow/cli/commands/db_command.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ def initdb(args):
3535
def resetdb(args):
3636
"""Resets the metadata database"""
3737
print("DB: " + repr(settings.engine.url))
38-
if args.yes or input("This will drop existing tables " "if they exist. Proceed? " "(y/n)").upper() == "Y":
38+
if args.yes or input("This will drop existing tables if they exist. Proceed? (y/n)").upper() == "Y":
3939
db.resetdb()
4040
else:
4141
print("Cancelled")

β€Žairflow/cli/commands/user_command.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -189,7 +189,7 @@ def _import_users(users_list): # pylint: disable=redefined-outer-name
189189
required_fields = ['username', 'firstname', 'lastname', 'email', 'roles']
190190
for field in required_fields:
191191
if not user.get(field):
192-
print("Error: '{}' is a required field, but was not " "specified".format(field))
192+
print(f"Error: '{field}' is a required field, but was not specified")
193193
sys.exit(1)
194194

195195
existing_user = appbuilder.sm.find_user(email=user['email'])

β€Žairflow/cli/commands/webserver_command.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -250,7 +250,7 @@ def _check_workers(self) -> None:
250250
# to increase number of workers
251251
if num_workers_running < self.num_workers_expected:
252252
self.log.error(
253-
"[%d / %d] Some workers seem to have died and gunicorn did not restart " "them as expected",
253+
"[%d / %d] Some workers seem to have died and gunicorn did not restart them as expected",
254254
num_ready_workers_running,
255255
num_workers_running,
256256
)

β€Žairflow/providers/amazon/aws/hooks/sagemaker.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -215,7 +215,7 @@ def check_s3_url(self, s3url: str) -> bool:
215215
# or if s3 prefix exists in the case user provides multiple files in
216216
# a prefix
217217
raise AirflowException(
218-
"The input S3 Key " "or Prefix {} does not exist in the Bucket {}".format(s3url, bucket)
218+
f"The input S3 Key or Prefix {s3url} does not exist in the Bucket {bucket}"
219219
)
220220
return True
221221

β€Žairflow/providers/google/cloud/hooks/bigquery.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2139,7 +2139,7 @@ def run_query(
21392139
_validate_value(param_name, configuration['query'][param_name], param_type)
21402140

21412141
if param_name == 'schemaUpdateOptions' and param:
2142-
self.log.info("Adding experimental 'schemaUpdateOptions': " "%s", schema_update_options)
2142+
self.log.info("Adding experimental 'schemaUpdateOptions': %s", schema_update_options)
21432143

21442144
if param_name != 'destinationTable':
21452145
continue
@@ -2167,7 +2167,7 @@ def run_query(
21672167
and configuration['query']['useLegacySql']
21682168
and 'queryParameters' in configuration['query']
21692169
):
2170-
raise ValueError("Query parameters are not allowed " "when using legacy SQL")
2170+
raise ValueError("Query parameters are not allowed when using legacy SQL")
21712171

21722172
if labels:
21732173
_api_resource_configs_duplication_check('labels', labels, configuration)

β€Žairflow/providers/google/cloud/hooks/cloud_sql.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -646,10 +646,10 @@ def get_socket_path(self) -> str:
646646
"mysql": {
647647
"proxy": {
648648
"tcp": "mysql://{user}:{password}@127.0.0.1:{proxy_port}/{database}",
649-
"socket": "mysql://{user}:{password}@localhost/{database}?" "unix_socket={socket_path}",
649+
"socket": "mysql://{user}:{password}@localhost/{database}?unix_socket={socket_path}",
650650
},
651651
"public": {
652-
"ssl": "mysql://{user}:{password}@{public_ip}:{public_port}/{database}?" "ssl={ssl_spec}",
652+
"ssl": "mysql://{user}:{password}@{public_ip}:{public_port}/{database}?ssl={ssl_spec}",
653653
"non-ssl": "mysql://{user}:{password}@{public_ip}:{public_port}/{database}",
654654
},
655655
},

β€Žairflow/providers/google/cloud/hooks/gcs.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -383,7 +383,7 @@ def upload(
383383
blob.upload_from_string(data, content_type=mime_type)
384384
self.log.info('Data stream uploaded to %s in %s bucket', object_name, bucket_name)
385385
else:
386-
raise ValueError("'filename' and 'data' parameter missing. " "One is required to upload to gcs.")
386+
raise ValueError("'filename' and 'data' parameter missing. One is required to upload to gcs.")
387387

388388
def exists(self, bucket_name: str, object_name: str) -> bool:
389389
"""

β€Žairflow/providers/google/cloud/operators/functions.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -198,9 +198,7 @@ def _update_function(self, hook) -> None:
198198
def _check_if_function_exists(self, hook) -> bool:
199199
name = self.body.get('name')
200200
if not name:
201-
raise GcpFieldValidationException(
202-
"The 'name' field should be present in " "body: '{}'.".format(self.body)
203-
)
201+
raise GcpFieldValidationException(f"The 'name' field should be present in body: '{self.body}'.")
204202
try:
205203
hook.get_function(name)
206204
except HttpError as e:

0 commit comments

Comments
 (0)