Skip to content

Commit e25eea0

Browse files
Inclusive Language (#18349)
1 parent e81f14b commit e25eea0

File tree

23 files changed

+34
-34
lines changed

23 files changed

+34
-34
lines changed

β€ŽCOMMITTERS.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@ can become the Mentor and guide the proposed candidates on how they can become a
158158

159159
If the committee does not have enough information, requires more time, or requires more evidence of
160160
candidate's eligibility, a mentor, who is not the proposer, is selected to help mentor the candidate
161-
The mentor should try to remain impartial -- his/her goal is to provide the missing evidence and to
161+
The mentor should try to remain impartial -- their goal is to provide the missing evidence and to
162162
try to coach/mentor the candidate to success.
163163

164164
In order to re-raise a candidate vote, both Proposer and Mentor must be in favor. Again,

β€Žairflow/models/serialized_dag.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -160,7 +160,7 @@ def read_all_dags(cls, session: Session = None) -> Dict[str, 'SerializedDAG']:
160160
log.debug("Deserializing DAG: %s", row.dag_id)
161161
dag = row.dag
162162

163-
# Sanity check.
163+
# Coherence check
164164
if dag.dag_id == row.dag_id:
165165
dags[row.dag_id] = dag
166166
else:

β€Žairflow/providers/amazon/aws/example_dags/example_emr_job_flow_automatic_steps.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@
4444
'Instances': {
4545
'InstanceGroups': [
4646
{
47-
'Name': 'Master node',
47+
'Name': 'Primary node',
4848
'Market': 'SPOT',
4949
'InstanceRole': 'MASTER',
5050
'InstanceType': 'm1.medium',

β€Žairflow/providers/amazon/aws/example_dags/example_emr_job_flow_manual_steps.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@
4848
'Instances': {
4949
'InstanceGroups': [
5050
{
51-
'Name': 'Master node',
51+
'Name': 'Primary node',
5252
'Market': 'SPOT',
5353
'InstanceRole': 'MASTER',
5454
'InstanceType': 'm1.medium',

β€Žairflow/providers/google/cloud/hooks/cloud_memorystore.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -291,7 +291,7 @@ def failover_instance(
291291
metadata: Optional[Sequence[Tuple[str, str]]] = None,
292292
):
293293
"""
294-
Initiates a failover of the master node to current replica node for a specific STANDARD tier Cloud
294+
Initiates a failover of the primary node to current replica node for a specific STANDARD tier Cloud
295295
Memorystore for Redis instance.
296296
297297
:param location: The location of the Cloud Memorystore instance (for example europe-west1)

β€Žairflow/providers/google/cloud/operators/cloud_memorystore.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -315,7 +315,7 @@ def execute(self, context: dict) -> None:
315315

316316
class CloudMemorystoreFailoverInstanceOperator(BaseOperator):
317317
"""
318-
Initiates a failover of the master node to current replica node for a specific STANDARD tier Cloud
318+
Initiates a failover of the primary node to current replica node for a specific STANDARD tier Cloud
319319
Memorystore for Redis instance.
320320
321321
.. seealso::

β€Žairflow/providers/google/cloud/operators/dataproc.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -135,14 +135,14 @@ class ClusterGenerator:
135135
:type optional_components: list[str]
136136
:param num_masters: The # of master nodes to spin up
137137
:type num_masters: int
138-
:param master_machine_type: Compute engine machine type to use for the master node
138+
:param master_machine_type: Compute engine machine type to use for the primary node
139139
:type master_machine_type: str
140-
:param master_disk_type: Type of the boot disk for the master node
140+
:param master_disk_type: Type of the boot disk for the primary node
141141
(default is ``pd-standard``).
142142
Valid values: ``pd-ssd`` (Persistent Disk Solid State Drive) or
143143
``pd-standard`` (Persistent Disk Hard Disk Drive).
144144
:type master_disk_type: str
145-
:param master_disk_size: Disk size for the master node
145+
:param master_disk_size: Disk size for the primary node
146146
:type master_disk_size: int
147147
:param worker_machine_type: Compute engine machine type to use for the worker nodes
148148
:type worker_machine_type: str

β€Žairflow/providers/yandex/operators/yandexcloud_dataproc.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ class DataprocCreateClusterOperator(BaseOperator):
4949
Service account can be created inside the folder.
5050
:type service_account_id: Optional[str]
5151
:param masternode_resource_preset: Resources preset (CPU+RAM configuration)
52-
for the master node of the cluster.
52+
for the primary node of the cluster.
5353
:type masternode_resource_preset: str
5454
:param masternode_disk_size: Masternode storage size in GiB.
5555
:type masternode_disk_size: int

β€Žairflow/providers_manager.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -107,7 +107,7 @@ def _create_customized_form_field_behaviours_schema_validator():
107107

108108
def _sanity_check(provider_package: str, class_name: str) -> bool:
109109
"""
110-
Performs sanity check on provider classes.
110+
Performs coherence check on provider classes.
111111
For apache-airflow providers - it checks if it starts with appropriate package. For all providers
112112
it tries to import the provider - checking that there are no exceptions during importing.
113113
It logs appropriate warning in case it detects any problems.
@@ -121,7 +121,7 @@ def _sanity_check(provider_package: str, class_name: str) -> bool:
121121
provider_path = provider_package[len("apache-") :].replace("-", ".")
122122
if not class_name.startswith(provider_path):
123123
log.warning(
124-
"Sanity check failed when importing '%s' from '%s' package. It should start with '%s'",
124+
"Coherence check failed when importing '%s' from '%s' package. It should start with '%s'",
125125
class_name,
126126
provider_package,
127127
provider_path,

β€Žairflow/utils/db.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -231,7 +231,7 @@ def create_default_connections(session=None):
231231
"InstanceCount": 1
232232
},
233233
{
234-
"Name": "Slave nodes",
234+
"Name": "Core nodes",
235235
"Market": "ON_DEMAND",
236236
"InstanceRole": "CORE",
237237
"InstanceType": "r3.2xlarge",

0 commit comments

Comments
 (0)