Skip to content

Commit 1c1ef7e

Browse files
authored
Add project_id to client inside BigQuery hook update_table method (#13018)
1 parent 4d3300c commit 1c1ef7e

File tree

2 files changed

+53
-1
lines changed

2 files changed

+53
-1
lines changed

β€Žairflow/providers/google/cloud/hooks/bigquery.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -714,7 +714,7 @@ def update_table(
714714

715715
table = Table.from_api_repr(table_resource)
716716
self.log.info('Updating table: %s', table_resource["tableReference"])
717-
table_object = self.get_client().update_table(table=table, fields=fields)
717+
table_object = self.get_client(project_id=project_id).update_table(table=table, fields=fields)
718718
self.log.info('Table %s.%s.%s updated successfully', project_id, dataset_id, table_id)
719719
return table_object.to_api_repr()
720720

β€Žtests/providers/google/cloud/hooks/test_bigquery.py

Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1609,6 +1609,58 @@ def test_create_external_table_with_kms(self, mock_create):
16091609
exists_ok=True,
16101610
)
16111611

1612+
@mock.patch("airflow.providers.google.cloud.hooks.bigquery.Table")
1613+
@mock.patch("airflow.providers.google.cloud.hooks.bigquery.Client")
1614+
def test_update_table(self, mock_client, mock_table):
1615+
description_patched = 'Test description.'
1616+
expiration_time_patched = 2524608000000
1617+
friendly_name_patched = 'Test friendly name.'
1618+
labels_patched = {'label1': 'test1', 'label2': 'test2'}
1619+
schema_patched = [
1620+
{'name': 'id', 'type': 'STRING', 'mode': 'REQUIRED'},
1621+
{'name': 'name', 'type': 'STRING', 'mode': 'NULLABLE'},
1622+
{'name': 'balance', 'type': 'FLOAT', 'mode': 'NULLABLE'},
1623+
{'name': 'new_field', 'type': 'STRING', 'mode': 'NULLABLE'},
1624+
]
1625+
time_partitioning_patched = {'expirationMs': 10000000}
1626+
require_partition_filter_patched = True
1627+
view_patched = {
1628+
'query': "SELECT * FROM `test-project-id.test_dataset_id.test_table_prefix*` LIMIT 500",
1629+
'useLegacySql': False,
1630+
}
1631+
1632+
body = {
1633+
"tableReference": {
1634+
"projectId": PROJECT_ID,
1635+
"datasetId": DATASET_ID,
1636+
"tableId": TABLE_ID,
1637+
},
1638+
"description": description_patched,
1639+
"expirationTime": expiration_time_patched,
1640+
"friendlyName": friendly_name_patched,
1641+
"labels": labels_patched,
1642+
"schema": {"fields": schema_patched},
1643+
"timePartitioning": time_partitioning_patched,
1644+
"view": view_patched,
1645+
"requirePartitionFilter": require_partition_filter_patched,
1646+
}
1647+
1648+
fields = list(body.keys())
1649+
1650+
self.hook.update_table(
1651+
table_resource=body,
1652+
fields=fields,
1653+
dataset_id=DATASET_ID,
1654+
table_id=TABLE_ID,
1655+
project_id=PROJECT_ID,
1656+
)
1657+
1658+
mock_table.from_api_repr.assert_called_once_with(body)
1659+
1660+
mock_client.return_value.update_table.assert_called_once_with(
1661+
table=mock_table.from_api_repr.return_value, fields=fields
1662+
)
1663+
16121664
@mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.insert_job")
16131665
def test_run_query_with_kms(self, mock_insert):
16141666
encryption_configuration = {"kms_key_name": "projects/p/locations/l/keyRings/k/cryptoKeys/c"}

0 commit comments

Comments
 (0)