11#!/usr/bin/env python
22
3- # Copyright 2018 Google Inc. All Rights Reserved.
3+ # Copyright 2018 Google LLC
44#
55# Licensed under the Apache License, Version 2.0 (the "License");
66# you may not use this file except in compliance with the License.
2121https://cloud.google.com/natural-language/automl/docs/
2222"""
2323
24- # [START automl_natural_language_import]
2524import argparse
2625import os
2726
28- from google .cloud import automl_v1beta1 as automl
2927
30- # [END automl_natural_language_import]
28+ def create_dataset (project_id , compute_region , dataset_name , multilabel = False ):
29+ """Create a dataset."""
30+ # [START automl_natural_language_create_dataset]
31+ # TODO(developer): Uncomment and set the following variables
32+ # project_id = 'PROJECT_ID_HERE'
33+ # compute_region = 'COMPUTE_REGION_HERE'
34+ # dataset_name = 'DATASET_NAME_HERE'
35+ # multilabel = True for multilabel or False for multiclass
3136
37+ from google .cloud import automl_v1beta1 as automl
3238
33- # [START automl_natural_language_create_dataset]
34- def create_dataset (project_id , compute_region , dataset_name , multilabel = False ):
35- """Create a dataset.
36- Args:
37- project_id: Id of the project.
38- compute_region: Region name.
39- dataset_name: Name of the dataset.
40- multilabel: Type of the classification problem.
41- False - MULTICLASS, True - MULTILABEL.
42- Default is False.
43- """
4439 client = automl .AutoMlClient ()
4540
4641 # A resource that represents Google Cloud Platform location.
@@ -74,18 +69,19 @@ def create_dataset(project_id, compute_region, dataset_name, multilabel=False):
7469 print ("\t seconds: {}" .format (dataset .create_time .seconds ))
7570 print ("\t nanos: {}" .format (dataset .create_time .nanos ))
7671
77-
78- # [END automl_natural_language_create_dataset]
72+ # [END automl_natural_language_create_dataset]
7973
8074
81- # [START automl_natural_language_list_datasets]
8275def list_datasets (project_id , compute_region , filter_ ):
83- """List all datasets.
84- Args:
85- project_id: Id of the project.
86- compute_region: Region name.
87- filter_: Filter expression.
88- """
76+ """List all datasets."""
77+ # [START automl_natural_language_list_datasets]
78+ # TODO(developer): Uncomment and set the following variables
79+ # project_id = 'PROJECT_ID_HERE'
80+ # compute_region = 'COMPUTE_REGION_HERE'
81+ # filter_ = 'filter expression here'
82+
83+ from google .cloud import automl_v1beta1 as automl
84+
8985 client = automl .AutoMlClient ()
9086
9187 # A resource that represents Google Cloud Platform location.
@@ -107,18 +103,19 @@ def list_datasets(project_id, compute_region, filter_):
107103 print ("\t seconds: {}" .format (dataset .create_time .seconds ))
108104 print ("\t nanos: {}" .format (dataset .create_time .nanos ))
109105
106+ # [END automl_natural_language_list_datasets]
110107
111- # [END automl_natural_language_list_datasets]
112108
113-
114- # [START automl_natural_language_get_dataset]
115109def get_dataset (project_id , compute_region , dataset_id ):
116- """Get the dataset.
117- Args:
118- project_id: Id of the project.
119- compute_region: Region name.
120- dataset_id: Id of the dataset.
121- """
110+ """Get the dataset."""
111+ # [START automl_natural_language_get_dataset]
112+ # TODO(developer): Uncomment and set the following variables
113+ # project_id = 'PROJECT_ID_HERE'
114+ # compute_region = 'COMPUTE_REGION_HERE'
115+ # dataset_id = 'DATASET_ID_HERE'
116+
117+ from google .cloud import automl_v1beta1 as automl
118+
122119 client = automl .AutoMlClient ()
123120
124121 # Get the full path of the dataset
@@ -140,21 +137,20 @@ def get_dataset(project_id, compute_region, dataset_id):
140137 print ("\t seconds: {}" .format (dataset .create_time .seconds ))
141138 print ("\t nanos: {}" .format (dataset .create_time .nanos ))
142139
143-
144- # [END automl_natural_language_get_dataset]
140+ # [END automl_natural_language_get_dataset]
145141
146142
147- # [START automl_natural_language_import_data]
148143def import_data (project_id , compute_region , dataset_id , path ):
149- """Import labeled items.
150- Args:
151- project_id: Id of the project.
152- compute_region: Region name.
153- dataset_id: ID of the dataset into which the training content are to
154- be imported.
155- path: Google Cloud Storage URIs.
156- Target files must be in AutoML Natural Language CSV format.
157- """
144+ """Import labelled items."""
145+ # [START automl_natural_language_import_data]
146+ # TODO(developer): Uncomment and set the following variables
147+ # project_id = 'PROJECT_ID_HERE'
148+ # compute_region = 'COMPUTE_REGION_HERE'
149+ # dataset_id = 'DATASET_ID_HERE'
150+ # path = 'gs://path/to/file.csv'
151+
152+ from google .cloud import automl_v1beta1 as automl
153+
158154 client = automl .AutoMlClient ()
159155
160156 # Get the full path of the dataset.
@@ -173,19 +169,20 @@ def import_data(project_id, compute_region, dataset_id, path):
173169 # synchronous check of operation status.
174170 print ("Data imported. {}" .format (response .result ()))
175171
176-
177- # [END automl_natural_language_import_data]
172+ # [END automl_natural_language_import_data]
178173
179174
180- # [START automl_natural_language_export_data]
181175def export_data (project_id , compute_region , dataset_id , output_uri ):
182- """Export a dataset to a Google Cloud Storage bucket.
183- Args:
184- project_id: Id of the project.
185- compute_region: Region name.
186- dataset_id: Id of the dataset to which will be exported.
187- output_uri: Google Cloud Storage URI for the export directory.
188- """
176+ """Export a dataset to a Google Cloud Storage bucket."""
177+ # [START automl_natural_language_export_data]
178+ # TODO(developer): Uncomment and set the following variables
179+ # project_id = 'PROJECT_ID_HERE'
180+ # compute_region = 'COMPUTE_REGION_HERE'
181+ # dataset_id = 'DATASET_ID_HERE'
182+ # output_uri: 'gs://location/to/export/data'
183+
184+ from google .cloud import automl_v1beta1 as automl
185+
189186 client = automl .AutoMlClient ()
190187
191188 # Get the full path of the dataset.
@@ -203,18 +200,19 @@ def export_data(project_id, compute_region, dataset_id, output_uri):
203200 # synchronous check of operation status.
204201 print ("Data exported. {}" .format (response .result ()))
205202
203+ # [END automl_natural_language_export_data]
206204
207- # [END automl_natural_language_export_data]
208205
209-
210- # [START automl_natural_language_delete_dataset]
211206def delete_dataset (project_id , compute_region , dataset_id ):
212- """Delete a dataset.
213- Args:
214- project_id: Id of the project.
215- compute_region: Region name.
216- dataset_id: Id of the dataset.
217- """
207+ """Delete a dataset."""
208+ # [START automl_natural_language_delete_dataset]
209+ # TODO(developer): Uncomment and set the following variables
210+ # project_id = 'PROJECT_ID_HERE'
211+ # compute_region = 'COMPUTE_REGION_HERE'
212+ # dataset_id = 'DATASET_ID_HERE'
213+
214+ from google .cloud import automl_v1beta1 as automl
215+
218216 client = automl .AutoMlClient ()
219217
220218 # Get the full path of the dataset.
@@ -228,8 +226,7 @@ def delete_dataset(project_id, compute_region, dataset_id):
228226 # synchronous check of operation status.
229227 print ("Dataset deleted. {}" .format (response .result ()))
230228
231-
232- # [END automl_natural_language_delete_dataset]
229+ # [END automl_natural_language_delete_dataset]
233230
234231
235232if __name__ == "__main__" :
0 commit comments