@@ -35,15 +35,7 @@ def test_dataset_create_import_delete(capsys):
35
35
out , _ = capsys .readouterr ()
36
36
create_dataset_output = out .splitlines ()
37
37
assert "Dataset id: " in create_dataset_output [1 ]
38
-
39
- # import data
40
38
dataset_id = create_dataset_output [1 ].split ()[2 ]
41
- data = "gs://{}-lcm/happiness.csv" .format (project_id )
42
- automl_natural_language_dataset .import_data (
43
- project_id , compute_region , dataset_id , data
44
- )
45
- out , _ = capsys .readouterr ()
46
- assert "Data imported." in out
47
39
48
40
# delete dataset
49
41
automl_natural_language_dataset .delete_dataset (
@@ -53,6 +45,28 @@ def test_dataset_create_import_delete(capsys):
53
45
assert "Dataset deleted." in out
54
46
55
47
48
+ def test_import_data (capsys ):
49
+ # As importing a dataset can take a long time and only four operations can
50
+ # be run on a dataset at once. Try to import into a nonexistent dataset and
51
+ # confirm that the dataset was not found, but other elements of the request
52
+ # were valid.
53
+ try :
54
+ data = "gs://{}-lcm/happiness.csv" .format (project_id )
55
+ automl_natural_language_dataset .import_data (
56
+ project_id , compute_region , "TEN0000000000000000000" , data
57
+ )
58
+ out , _ = capsys .readouterr ()
59
+ assert (
60
+ "Dataset doesn't exist or is inaccessible for use with AutoMl."
61
+ in out
62
+ )
63
+ except Exception as e :
64
+ assert (
65
+ "Dataset doesn't exist or is inaccessible for use with AutoMl."
66
+ in e .message
67
+ )
68
+
69
+
56
70
def test_dataset_list_get (capsys ):
57
71
# list datasets
58
72
automl_natural_language_dataset .list_datasets (
0 commit comments