Skip to content

Commit ca501ef

Browse files
authored
fix: try002 for provider google (#38803)
1 parent df4ce54 commit ca501ef

File tree

9 files changed

+31
-37
lines changed

9 files changed

+31
-37
lines changed

β€Žairflow/providers/google/cloud/hooks/bigquery.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2295,7 +2295,7 @@ def var_print(var_name):
22952295
return f"Format exception for {var_name}: "
22962296

22972297
if table_input.count(".") + table_input.count(":") > 3:
2298-
raise Exception(f"{var_print(var_name)}Use either : or . to specify project got {table_input}")
2298+
raise ValueError(f"{var_print(var_name)}Use either : or . to specify project got {table_input}")
22992299
cmpt = table_input.rsplit(":", 1)
23002300
project_id = None
23012301
rest = table_input
@@ -2307,7 +2307,7 @@ def var_print(var_name):
23072307
project_id = cmpt[0]
23082308
rest = cmpt[1]
23092309
else:
2310-
raise Exception(
2310+
raise ValueError(
23112311
f"{var_print(var_name)}Expect format of (<project:)<dataset>.<table>, got {table_input}"
23122312
)
23132313

@@ -2323,7 +2323,7 @@ def var_print(var_name):
23232323
dataset_id = cmpt[0]
23242324
table_id = cmpt[1]
23252325
else:
2326-
raise Exception(
2326+
raise ValueError(
23272327
f"{var_print(var_name)} Expect format of (<project.|<project:)<dataset>.<table>, "
23282328
f"got {table_input}"
23292329
)
@@ -3107,7 +3107,7 @@ def var_print(var_name):
31073107
return f"Format exception for {var_name}: "
31083108

31093109
if table_input.count(".") + table_input.count(":") > 3:
3110-
raise Exception(f"{var_print(var_name)}Use either : or . to specify project got {table_input}")
3110+
raise ValueError(f"{var_print(var_name)}Use either : or . to specify project got {table_input}")
31113111
cmpt = table_input.rsplit(":", 1)
31123112
project_id = None
31133113
rest = table_input
@@ -3119,7 +3119,7 @@ def var_print(var_name):
31193119
project_id = cmpt[0]
31203120
rest = cmpt[1]
31213121
else:
3122-
raise Exception(
3122+
raise ValueError(
31233123
f"{var_print(var_name)}Expect format of (<project:)<dataset>.<table>, got {table_input}"
31243124
)
31253125

@@ -3135,7 +3135,7 @@ def var_print(var_name):
31353135
dataset_id = cmpt[0]
31363136
table_id = cmpt[1]
31373137
else:
3138-
raise Exception(
3138+
raise ValueError(
31393139
f"{var_print(var_name)}Expect format of (<project.|<project:)<dataset>.<table>, got {table_input}"
31403140
)
31413141

β€Žairflow/providers/google/cloud/hooks/dataflow.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -252,7 +252,7 @@ def _get_current_jobs(self) -> list[dict]:
252252
self._job_id = jobs[0]["id"]
253253
return jobs
254254
else:
255-
raise Exception("Missing both dataflow job ID and name.")
255+
raise ValueError("Missing both dataflow job ID and name.")
256256

257257
def fetch_job_by_id(self, job_id: str) -> dict:
258258
"""
@@ -410,18 +410,18 @@ def _check_dataflow_job_state(self, job) -> bool:
410410
else:
411411
terminal_states = DataflowJobStatus.TERMINAL_STATES | {DataflowJobStatus.JOB_STATE_RUNNING}
412412
if self._expected_terminal_state not in terminal_states:
413-
raise Exception(
413+
raise AirflowException(
414414
f"Google Cloud Dataflow job's expected terminal state "
415415
f"'{self._expected_terminal_state}' is invalid."
416416
f" The value should be any of the following: {terminal_states}"
417417
)
418418
elif is_streaming and self._expected_terminal_state == DataflowJobStatus.JOB_STATE_DONE:
419-
raise Exception(
419+
raise AirflowException(
420420
"Google Cloud Dataflow job's expected terminal state cannot be "
421421
"JOB_STATE_DONE while it is a streaming job"
422422
)
423423
elif not is_streaming and self._expected_terminal_state == DataflowJobStatus.JOB_STATE_DRAINED:
424-
raise Exception(
424+
raise AirflowException(
425425
"Google Cloud Dataflow job's expected terminal state cannot be "
426426
"JOB_STATE_DRAINED while it is a batch job"
427427
)
@@ -435,7 +435,7 @@ def _check_dataflow_job_state(self, job) -> bool:
435435
return self._wait_until_finished is False
436436

437437
self.log.debug("Current job: %s", job)
438-
raise Exception(
438+
raise AirflowException(
439439
f"Google Cloud Dataflow job {job['name']} is in an unexpected terminal state: {current_state}, "
440440
f"expected terminal state: {self._expected_terminal_state}"
441441
)

β€Žairflow/providers/google/cloud/hooks/dataproc.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -158,10 +158,10 @@ def set_main(self, main_jar: str | None = None, main_class: str | None = None) -
158158
159159
:param main_jar: URI for the main file.
160160
:param main_class: Name of the main class.
161-
:raises: Exception
161+
:raises: ValueError
162162
"""
163163
if main_class is not None and main_jar is not None:
164-
raise Exception("Set either main_jar or main_class")
164+
raise ValueError("Set either main_jar or main_class")
165165
if main_jar:
166166
self.job["job"][self.job_type]["main_jar_file_uri"] = main_jar
167167
else:

β€Žairflow/providers/google/cloud/operators/dataproc.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1430,13 +1430,13 @@ def _generate_job_template(self) -> str:
14301430
if self.job_template:
14311431
job = self.job_template.build()
14321432
return job["job"]
1433-
raise Exception("Create a job template before")
1433+
raise AirflowException("Create a job template before")
14341434

14351435
def execute(self, context: Context):
14361436
if self.job_template:
14371437
self.job = self.job_template.build()
14381438
if self.job is None:
1439-
raise Exception("The job should be set here.")
1439+
raise AirflowException("The job should be set here.")
14401440
self.dataproc_job_id = self.job["job"]["reference"]["job_id"]
14411441
self.log.info("Submitting %s job %s", self.job_type, self.dataproc_job_id)
14421442
job_object = self.hook.submit_job(

β€Žairflow/providers/google/leveldb/hooks/leveldb.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -94,17 +94,17 @@ def run(
9494
"""
9595
if command == "put":
9696
if not value:
97-
raise Exception("Please provide `value`!")
97+
raise ValueError("Please provide `value`!")
9898
return self.put(key, value)
9999
elif command == "get":
100100
return self.get(key)
101101
elif command == "delete":
102102
return self.delete(key)
103103
elif command == "write_batch":
104104
if not keys:
105-
raise Exception("Please provide `keys`!")
105+
raise ValueError("Please provide `keys`!")
106106
if not values:
107-
raise Exception("Please provide `values`!")
107+
raise ValueError("Please provide `values`!")
108108
return self.write_batch(keys, values)
109109
else:
110110
raise LevelDBHookException("Unknown command for LevelDB hook")
@@ -117,7 +117,7 @@ def put(self, key: bytes, value: bytes):
117117
:param value: value for put execution e.g. ``b'value'``, ``b'another-value'``
118118
"""
119119
if not self.db:
120-
raise Exception(DB_NOT_INITIALIZED_BEFORE)
120+
raise AirflowException(DB_NOT_INITIALIZED_BEFORE)
121121
self.db.put(key, value)
122122

123123
def get(self, key: bytes) -> bytes:
@@ -128,7 +128,7 @@ def get(self, key: bytes) -> bytes:
128128
:returns: value of key from db.get
129129
"""
130130
if not self.db:
131-
raise Exception(DB_NOT_INITIALIZED_BEFORE)
131+
raise AirflowException(DB_NOT_INITIALIZED_BEFORE)
132132
return self.db.get(key)
133133

134134
def delete(self, key: bytes):
@@ -138,7 +138,7 @@ def delete(self, key: bytes):
138138
:param key: key for delete execution, e.g. ``b'key'``, ``b'another-key'``
139139
"""
140140
if not self.db:
141-
raise Exception(DB_NOT_INITIALIZED_BEFORE)
141+
raise AirflowException(DB_NOT_INITIALIZED_BEFORE)
142142
self.db.delete(key)
143143

144144
def write_batch(self, keys: list[bytes], values: list[bytes]):
@@ -149,7 +149,7 @@ def write_batch(self, keys: list[bytes], values: list[bytes]):
149149
:param values: values for write_batch execution e.g. ``[b'value', b'another-value']``
150150
"""
151151
if not self.db:
152-
raise Exception(DB_NOT_INITIALIZED_BEFORE)
152+
raise AirflowException(DB_NOT_INITIALIZED_BEFORE)
153153
with self.db.write_batch() as batch:
154154
for i, key in enumerate(keys):
155155
batch.put(key, values[i])

β€Žpyproject.toml

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -381,12 +381,6 @@ combine-as-imports = true
381381
# All the providers modules which do not follow TRY002 yet
382382
# cncf.kubernetes
383383
"airflow/providers/cncf/kubernetes/operators/pod.py" = ["TRY002"]
384-
# google
385-
"airflow/providers/google/cloud/hooks/bigquery.py" = ["TRY002"]
386-
"airflow/providers/google/cloud/hooks/dataflow.py" = ["TRY002"]
387-
"airflow/providers/google/cloud/hooks/dataproc.py" = ["TRY002"]
388-
"airflow/providers/google/cloud/operators/dataproc.py" = ["TRY002"]
389-
"airflow/providers/google/leveldb/hooks/leveldb.py" = ["TRY002"]
390384
# imap
391385
"airflow/providers/imap/hooks/imap.py" = ["TRY002"]
392386

β€Žtests/providers/google/cloud/hooks/test_bigquery.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -155,7 +155,7 @@ def test_get_pandas_df(self, mock_read_gbq):
155155
@mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_service")
156156
def test_invalid_schema_update_options(self, mock_get_service):
157157
with pytest.raises(
158-
Exception,
158+
ValueError,
159159
match=(
160160
r"\['THIS IS NOT VALID'\] contains invalid schema update options. "
161161
r"Please only use one or more of the following options: "
@@ -172,7 +172,7 @@ def test_invalid_schema_update_options(self, mock_get_service):
172172
@mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_service")
173173
def test_invalid_schema_update_and_write_disposition(self, mock_get_service):
174174
with pytest.raises(
175-
Exception,
175+
ValueError,
176176
match="schema_update_options is only allowed if"
177177
" write_disposition is 'WRITE_APPEND' or 'WRITE_TRUNCATE'.",
178178
):
@@ -846,7 +846,7 @@ def test_update_table_schema_without_policy_tags(self, mock_update, mock_get_sch
846846
@mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_service")
847847
def test_invalid_source_format(self, mock_get_service):
848848
with pytest.raises(
849-
Exception,
849+
ValueError,
850850
match=r"JSON is not a valid source format. Please use one of the following types: \['CSV', "
851851
r"'NEWLINE_DELIMITED_JSON', 'AVRO', 'GOOGLE_SHEETS', 'DATASTORE_BACKUP', 'PARQUET'\]",
852852
):
@@ -959,7 +959,7 @@ def test_job_id_validity(self, mock_md5, test_dag_id, expected_job_id):
959959

960960
class TestBigQueryTableSplitter:
961961
def test_internal_need_default_project(self):
962-
with pytest.raises(Exception, match="INTERNAL: No default project is specified"):
962+
with pytest.raises(ValueError, match="INTERNAL: No default project is specified"):
963963
split_tablename("dataset.table", None)
964964

965965
@pytest.mark.parametrize(
@@ -1008,7 +1008,7 @@ def test_split_tablename(self, project_expected, dataset_expected, table_expecte
10081008
)
10091009
def test_invalid_syntax(self, table_input, var_name, exception_message):
10101010
default_project_id = "project"
1011-
with pytest.raises(Exception, match=exception_message.format(table_input)):
1011+
with pytest.raises(ValueError, match=exception_message.format(table_input)):
10121012
split_tablename(table_input, default_project_id, var_name)
10131013

10141014

β€Žtests/providers/google/cloud/hooks/test_dataflow.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1284,7 +1284,7 @@ def test_dataflow_job_wait_for_multiple_jobs_and_one_in_terminal_state(self, sta
12841284
num_retries=20,
12851285
multiple_jobs=True,
12861286
)
1287-
with pytest.raises(Exception, match=exception_regex):
1287+
with pytest.raises(AirflowException, match=exception_regex):
12881288
dataflow_job.wait_for_done()
12891289

12901290
def test_dataflow_job_wait_for_multiple_jobs_and_streaming_jobs(self):
@@ -1517,7 +1517,7 @@ def test_check_dataflow_job_state_terminal_state(self, job_type, job_state, exce
15171517
num_retries=20,
15181518
multiple_jobs=True,
15191519
)
1520-
with pytest.raises(Exception, match=exception_regex):
1520+
with pytest.raises(AirflowException, match=exception_regex):
15211521
dataflow_job._check_dataflow_job_state(job)
15221522

15231523
@pytest.mark.parametrize(
@@ -1558,7 +1558,7 @@ def test_check_dataflow_job_state__invalid_expected_state(self, job_type, expect
15581558
multiple_jobs=False,
15591559
expected_terminal_state=expected_terminal_state,
15601560
)
1561-
with pytest.raises(Exception, match=match):
1561+
with pytest.raises(AirflowException, match=match):
15621562
dataflow_job._check_dataflow_job_state(job)
15631563

15641564
def test_dataflow_job_cancel_job(self):

β€Žtests/providers/google/cloud/hooks/test_dataproc.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1084,7 +1084,7 @@ def test_add_python_file_uris(self):
10841084
assert python_file_uris == self.builder.job["job"][self.job_type]["python_file_uris"]
10851085

10861086
def test_set_main_error(self):
1087-
with pytest.raises(Exception):
1087+
with pytest.raises(ValueError):
10881088
self.builder.set_main("test", "test")
10891089

10901090
def test_set_main_class(self):

0 commit comments

Comments
 (0)