Skip to content

Commit 9c2bd8b

Browse files
Fix: resource quotas (#377)
* fix: adding a noxfile_config.py to snippets to balance load between different projects * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * disabling type hint enforcement in noxfile_config * changing the method of disabling type checking * linter on samples * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * changing transport logic for submit_job_to_cluster.py * updating transport method in list_clusters.py * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
1 parent 6cde59e commit 9c2bd8b

File tree

4 files changed

+54
-36
lines changed

4 files changed

+54
-36
lines changed

dataproc/snippets/list_clusters.py

Lines changed: 3 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@
1919
import argparse
2020

2121
from google.cloud import dataproc_v1
22-
from google.cloud.dataproc_v1.gapic.transports import cluster_controller_grpc_transport
2322

2423

2524
# [START dataproc_list_clusters]
@@ -28,14 +27,7 @@ def list_clusters(dataproc, project, region):
2827
for cluster in dataproc.list_clusters(
2928
request={"project_id": project, "region": region}
3029
):
31-
print(
32-
(
33-
"{} - {}".format(
34-
cluster.cluster_name,
35-
cluster.status.state.name
36-
)
37-
)
38-
)
30+
print(("{} - {}".format(cluster.cluster_name, cluster.status.state.name)))
3931

4032

4133
# [END dataproc_list_clusters]
@@ -49,12 +41,9 @@ def main(project_id, region):
4941
else:
5042
# Use a regional gRPC endpoint. See:
5143
# https://cloud.google.com/dataproc/docs/concepts/regional-endpoints
52-
client_transport = (
53-
cluster_controller_grpc_transport.ClusterControllerGrpcTransport(
54-
address="{}-dataproc.googleapis.com:443".format(region)
55-
)
44+
dataproc_cluster_client = dataproc_v1.ClusterControllerClient(
45+
client_options={"api_endpoint": f"{region}-dataproc.googleapis.com:443"}
5646
)
57-
dataproc_cluster_client = dataproc_v1.ClusterControllerClient(client_transport)
5847

5948
list_clusters(dataproc_cluster_client, project_id, region)
6049

dataproc/snippets/noxfile_config.py

Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
# Copyright 2022 Google LLC
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
15+
# Default TEST_CONFIG_OVERRIDE for python repos.
16+
17+
# You can copy this file into your directory, then it will be imported from
18+
# the noxfile.py.
19+
20+
# The source of truth:
21+
# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/noxfile_config.py
22+
23+
TEST_CONFIG_OVERRIDE = {
24+
# You can opt out from the test for specific Python versions.
25+
"ignored_versions": ["2.7", "3.6"],
26+
# Old samples are opted out of enforcing Python type hints
27+
# All new samples should feature them
28+
# "enforce_type_hints": True,
29+
# An envvar key for determining the project id to use. Change it
30+
# to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
31+
# build specific Cloud project. You can also use your own string
32+
# to use your own Cloud project.
33+
# "gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
34+
"gcloud_project_env": "BUILD_SPECIFIC_GCLOUD_PROJECT",
35+
# If you need to use a specific version of pip,
36+
# change pip_version_override to the string representation
37+
# of the version number, for example, "20.2.4"
38+
"pip_version_override": None,
39+
# A dictionary you want to inject into your test. Don't put any
40+
# secrets here. These values will override predefined values.
41+
"envs": {},
42+
}

dataproc/snippets/submit_job_to_cluster.py

Lines changed: 7 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -34,8 +34,6 @@
3434

3535
from google.cloud import dataproc_v1
3636
from google.cloud import storage
37-
from google.cloud.dataproc_v1.gapic.transports import cluster_controller_grpc_transport
38-
from google.cloud.dataproc_v1.gapic.transports import job_controller_grpc_transport
3937

4038

4139
DEFAULT_FILENAME = "pyspark_sort.py"
@@ -77,10 +75,8 @@ def download_output(project, cluster_id, output_bucket, job_id):
7775
print("Downloading output file.")
7876
client = storage.Client(project=project)
7977
bucket = client.get_bucket(output_bucket)
80-
output_blob = (
81-
"google-cloud-dataproc-metainfo/{}/jobs/{}/driveroutput.000000000".format(
82-
cluster_id, job_id
83-
)
78+
output_blob = "google-cloud-dataproc-metainfo/{}/jobs/{}/driveroutput.000000000".format(
79+
cluster_id, job_id
8480
)
8581
return bucket.blob(output_blob).download_as_string()
8682

@@ -135,14 +131,7 @@ def list_clusters_with_details(dataproc, project, region):
135131
for cluster in dataproc.list_clusters(
136132
request={"project_id": project, "region": region}
137133
):
138-
print(
139-
(
140-
"{} - {}".format(
141-
cluster.cluster_name,
142-
cluster.status.state.name,
143-
)
144-
)
145-
)
134+
print(("{} - {}".format(cluster.cluster_name, cluster.status.state.name,)))
146135

147136

148137
# [END dataproc_list_clusters_with_detail]
@@ -232,16 +221,12 @@ def main(
232221
region = get_region_from_zone(zone)
233222
# Use a regional gRPC endpoint. See:
234223
# https://cloud.google.com/dataproc/docs/concepts/regional-endpoints
235-
client_transport = (
236-
cluster_controller_grpc_transport.ClusterControllerGrpcTransport(
237-
address="{}-dataproc.googleapis.com:443".format(region)
238-
)
224+
dataproc_cluster_client = dataproc_v1.ClusterControllerClient(
225+
client_options={"api_endpoint": f"{region}-dataproc.googleapis.com:443"}
239226
)
240-
job_transport = job_controller_grpc_transport.JobControllerGrpcTransport(
241-
address="{}-dataproc.googleapis.com:443".format(region)
227+
dataproc_job_client = dataproc_v1.ClusterControllerClient(
228+
client_options={"api_endpoint": f"{region}-dataproc.googleapis.com:443"}
242229
)
243-
dataproc_cluster_client = dataproc_v1.ClusterControllerClient(client_transport)
244-
dataproc_job_client = dataproc_v1.JobControllerClient(job_transport)
245230
# [END dataproc_get_client]
246231

247232
try:

dataproc/snippets/update_cluster.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
#!/usr/bin/env python
2+
13
# Copyright 2021 Google LLC
24
#
35
# Licensed under the Apache License, Version 2.0 (the "License");

0 commit comments

Comments
 (0)