Skip to content

Commit 816fbb4

Browse files
authored
test(samples): use try/finally for clusters and use pytest-xdist (#360)
1 parent c3ac64a commit 816fbb4

File tree

5 files changed

+77
-53
lines changed

5 files changed

+77
-53
lines changed

dataproc/snippets/create_cluster_test.py

Lines changed: 13 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
import os
1616
import uuid
1717

18+
from google.api_core.exceptions import NotFound
1819
from google.cloud import dataproc_v1 as dataproc
1920
import pytest
2021

@@ -34,15 +35,18 @@ def teardown():
3435
client_options={"api_endpoint": f"{REGION}-dataproc.googleapis.com:443"}
3536
)
3637
# Client library function
37-
operation = cluster_client.delete_cluster(
38-
request={
39-
"project_id": PROJECT_ID,
40-
"region": REGION,
41-
"cluster_name": CLUSTER_NAME,
42-
}
43-
)
44-
# Wait for cluster to delete
45-
operation.result()
38+
try:
39+
operation = cluster_client.delete_cluster(
40+
request={
41+
"project_id": PROJECT_ID,
42+
"region": REGION,
43+
"cluster_name": CLUSTER_NAME,
44+
}
45+
)
46+
# Wait for cluster to delete
47+
operation.result()
48+
except NotFound:
49+
print("Cluster already deleted")
4650

4751

4852
def test_cluster_create(capsys):

dataproc/snippets/quickstart/quickstart_test.py

Lines changed: 12 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -39,12 +39,10 @@
3939

4040

4141
@pytest.fixture(autouse=True)
42-
def setup_teardown():
42+
def blob():
4343
storage_client = storage.Client()
4444

45-
@backoff.on_exception(backoff.expo,
46-
ServiceUnavailable,
47-
max_tries=5)
45+
@backoff.on_exception(backoff.expo, ServiceUnavailable, max_tries=5)
4846
def create_bucket():
4947
return storage_client.create_bucket(STAGING_BUCKET)
5048

@@ -54,12 +52,20 @@ def create_bucket():
5452

5553
yield
5654

55+
blob.delete()
56+
bucket.delete()
57+
58+
59+
@pytest.fixture(autouse=True)
60+
def cluster():
61+
yield
62+
63+
# The quickstart sample deletes the cluster, but if the test fails
64+
# before cluster deletion occurs, it can be manually deleted here.
5765
cluster_client = dataproc.ClusterControllerClient(
5866
client_options={"api_endpoint": "{}-dataproc.googleapis.com:443".format(REGION)}
5967
)
6068

61-
# The quickstart sample deletes the cluster, but if the test fails
62-
# before cluster deletion occurs, it can be manually deleted here.
6369
clusters = cluster_client.list_clusters(
6470
request={"project_id": PROJECT_ID, "region": REGION}
6571
)
@@ -74,9 +80,6 @@ def create_bucket():
7480
}
7581
)
7682

77-
blob.delete()
78-
bucket.delete()
79-
8083

8184
def test_quickstart(capsys):
8285
quickstart.quickstart(PROJECT_ID, REGION, CLUSTER_NAME, JOB_FILE_PATH)
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1 +1,2 @@
11
pytest==7.0.1
2+
pytest-xdist==2.5.0

dataproc/snippets/submit_job_test.py

Lines changed: 26 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
import os
1616
import uuid
1717

18+
from google.api_core.exceptions import NotFound
1819
from google.cloud import dataproc_v1 as dataproc
1920
import pytest
2021

@@ -36,25 +37,34 @@
3637

3738
@pytest.fixture(autouse=True)
3839
def setup_teardown():
39-
cluster_client = dataproc.ClusterControllerClient(
40-
client_options={"api_endpoint": "{}-dataproc.googleapis.com:443".format(REGION)}
41-
)
40+
try:
41+
cluster_client = dataproc.ClusterControllerClient(
42+
client_options={
43+
"api_endpoint": "{}-dataproc.googleapis.com:443".format(REGION)
44+
}
45+
)
4246

43-
# Create the cluster.
44-
operation = cluster_client.create_cluster(
45-
request={"project_id": PROJECT_ID, "region": REGION, "cluster": CLUSTER}
46-
)
47-
operation.result()
47+
# Create the cluster.
48+
operation = cluster_client.create_cluster(
49+
request={"project_id": PROJECT_ID, "region": REGION, "cluster": CLUSTER}
50+
)
51+
operation.result()
4852

49-
yield
53+
yield
5054

51-
cluster_client.delete_cluster(
52-
request={
53-
"project_id": PROJECT_ID,
54-
"region": REGION,
55-
"cluster_name": CLUSTER_NAME,
56-
}
57-
)
55+
finally:
56+
try:
57+
operation = cluster_client.delete_cluster(
58+
request={
59+
"project_id": PROJECT_ID,
60+
"region": REGION,
61+
"cluster_name": CLUSTER_NAME,
62+
}
63+
)
64+
operation.result()
65+
66+
except NotFound:
67+
print("Cluster already deleted")
5868

5969

6070
def test_submit_job(capsys):

dataproc/snippets/update_cluster_test.py

Lines changed: 25 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
import os
2020
import uuid
2121

22+
from google.api_core.exceptions import NotFound
2223
from google.cloud.dataproc_v1.services.cluster_controller.client import (
2324
ClusterControllerClient,
2425
)
@@ -41,25 +42,6 @@
4142
}
4243

4344

44-
@pytest.fixture(autouse=True)
45-
def setup_teardown(cluster_client):
46-
# Create the cluster.
47-
operation = cluster_client.create_cluster(
48-
request={"project_id": PROJECT_ID, "region": REGION, "cluster": CLUSTER}
49-
)
50-
operation.result()
51-
52-
yield
53-
54-
cluster_client.delete_cluster(
55-
request={
56-
"project_id": PROJECT_ID,
57-
"region": REGION,
58-
"cluster_name": CLUSTER_NAME,
59-
}
60-
)
61-
62-
6345
@pytest.fixture
6446
def cluster_client():
6547
cluster_client = ClusterControllerClient(
@@ -68,6 +50,30 @@ def cluster_client():
6850
return cluster_client
6951

7052

53+
@pytest.fixture(autouse=True)
54+
def setup_teardown(cluster_client):
55+
try:
56+
# Create the cluster.
57+
operation = cluster_client.create_cluster(
58+
request={"project_id": PROJECT_ID, "region": REGION, "cluster": CLUSTER}
59+
)
60+
operation.result()
61+
62+
yield
63+
finally:
64+
try:
65+
operation = cluster_client.delete_cluster(
66+
request={
67+
"project_id": PROJECT_ID,
68+
"region": REGION,
69+
"cluster_name": CLUSTER_NAME,
70+
}
71+
)
72+
operation.result()
73+
except NotFound:
74+
print("Cluster already deleted")
75+
76+
7177
def test_update_cluster(capsys, cluster_client: ClusterControllerClient):
7278
# Wrapper function for client library function
7379
update_cluster.update_cluster(PROJECT_ID, REGION, CLUSTER_NAME, NEW_NUM_INSTANCES)

0 commit comments

Comments
 (0)