@@ -79,23 +79,38 @@ def list_datasets(project_id, compute_region, filter_=None):
79
79
print ("Dataset id: {}" .format (dataset .name .split ("/" )[- 1 ]))
80
80
print ("Dataset display name: {}" .format (dataset .display_name ))
81
81
metadata = dataset .tables_dataset_metadata
82
- print ("Dataset primary table spec id: {}" .format (
83
- metadata .primary_table_spec_id ))
84
- print ("Dataset target column spec id: {}" .format (
85
- metadata .target_column_spec_id ))
86
- print ("Dataset target column spec id: {}" .format (
87
- metadata .target_column_spec_id ))
88
- print ("Dataset weight column spec id: {}" .format (
89
- metadata .weight_column_spec_id ))
90
- print ("Dataset ml use column spec id: {}" .format (
91
- metadata .ml_use_column_spec_id ))
82
+ print (
83
+ "Dataset primary table spec id: {}" .format (
84
+ metadata .primary_table_spec_id
85
+ )
86
+ )
87
+ print (
88
+ "Dataset target column spec id: {}" .format (
89
+ metadata .target_column_spec_id
90
+ )
91
+ )
92
+ print (
93
+ "Dataset target column spec id: {}" .format (
94
+ metadata .target_column_spec_id
95
+ )
96
+ )
97
+ print (
98
+ "Dataset weight column spec id: {}" .format (
99
+ metadata .weight_column_spec_id
100
+ )
101
+ )
102
+ print (
103
+ "Dataset ml use column spec id: {}" .format (
104
+ metadata .ml_use_column_spec_id
105
+ )
106
+ )
92
107
print ("Dataset example count: {}" .format (dataset .example_count ))
93
108
print ("Dataset create time:" )
94
109
print ("\t seconds: {}" .format (dataset .create_time .seconds ))
95
110
print ("\t nanos: {}" .format (dataset .create_time .nanos ))
96
111
print ("\n " )
97
112
98
- # [END automl_tables_list_datasets]
113
+ # [END automl_tables_list_datasets]
99
114
result .append (dataset )
100
115
101
116
return result
@@ -119,28 +134,31 @@ def list_table_specs(
119
134
120
135
# List all the table specs in the dataset by applying filter.
121
136
response = client .list_table_specs (
122
- dataset_display_name = dataset_display_name , filter_ = filter_ )
137
+ dataset_display_name = dataset_display_name , filter_ = filter_
138
+ )
123
139
124
140
print ("List of table specs:" )
125
141
for table_spec in response :
126
142
# Display the table_spec information.
127
143
print ("Table spec name: {}" .format (table_spec .name ))
128
144
print ("Table spec id: {}" .format (table_spec .name .split ("/" )[- 1 ]))
129
- print ("Table spec time column spec id: {}" .format (
130
- table_spec .time_column_spec_id ))
145
+ print (
146
+ "Table spec time column spec id: {}" .format (
147
+ table_spec .time_column_spec_id
148
+ )
149
+ )
131
150
print ("Table spec row count: {}" .format (table_spec .row_count ))
132
151
print ("Table spec column count: {}" .format (table_spec .column_count ))
133
152
134
- # [END automl_tables_list_specs]
153
+ # [END automl_tables_list_specs]
135
154
result .append (table_spec )
136
155
137
156
return result
138
157
139
158
140
- def list_column_specs (project_id ,
141
- compute_region ,
142
- dataset_display_name ,
143
- filter_ = None ):
159
+ def list_column_specs (
160
+ project_id , compute_region , dataset_display_name , filter_ = None
161
+ ):
144
162
"""List all column specs."""
145
163
result = []
146
164
# [START automl_tables_list_column_specs]
@@ -156,7 +174,8 @@ def list_column_specs(project_id,
156
174
157
175
# List all the table specs in the dataset by applying filter.
158
176
response = client .list_column_specs (
159
- dataset_display_name = dataset_display_name , filter_ = filter_ )
177
+ dataset_display_name = dataset_display_name , filter_ = filter_
178
+ )
160
179
161
180
print ("List of column specs:" )
162
181
for column_spec in response :
@@ -166,7 +185,7 @@ def list_column_specs(project_id,
166
185
print ("Column spec display name: {}" .format (column_spec .display_name ))
167
186
print ("Column spec data type: {}" .format (column_spec .data_type ))
168
187
169
- # [END automl_tables_list_column_specs]
188
+ # [END automl_tables_list_column_specs]
170
189
result .append (column_spec )
171
190
172
191
return result
@@ -227,19 +246,20 @@ def get_table_spec(project_id, compute_region, dataset_id, table_spec_id):
227
246
# Display the table spec information.
228
247
print ("Table spec name: {}" .format (table_spec .name ))
229
248
print ("Table spec id: {}" .format (table_spec .name .split ("/" )[- 1 ]))
230
- print ("Table spec time column spec id: {}" .format (
231
- table_spec .time_column_spec_id ))
249
+ print (
250
+ "Table spec time column spec id: {}" .format (
251
+ table_spec .time_column_spec_id
252
+ )
253
+ )
232
254
print ("Table spec row count: {}" .format (table_spec .row_count ))
233
255
print ("Table spec column count: {}" .format (table_spec .column_count ))
234
256
235
257
# [END automl_tables_get_table_spec]
236
258
237
259
238
- def get_column_spec (project_id ,
239
- compute_region ,
240
- dataset_id ,
241
- table_spec_id ,
242
- column_spec_id ):
260
+ def get_column_spec (
261
+ project_id , compute_region , dataset_id , table_spec_id , column_spec_id
262
+ ):
243
263
"""Get the column spec."""
244
264
# [START automl_tables_get_column_spec]
245
265
# TODO(developer): Uncomment and set the following variables
@@ -288,7 +308,7 @@ def import_data(project_id, compute_region, dataset_display_name, path):
288
308
client = automl .TablesClient (project = project_id , region = compute_region )
289
309
290
310
response = None
291
- if path .startswith ('bq' ):
311
+ if path .startswith ("bq" ):
292
312
response = client .import_data (
293
313
dataset_display_name = dataset_display_name , bigquery_input_uri = path
294
314
)
@@ -297,7 +317,7 @@ def import_data(project_id, compute_region, dataset_display_name, path):
297
317
input_uris = path .split ("," )
298
318
response = client .import_data (
299
319
dataset_display_name = dataset_display_name ,
300
- gcs_input_uris = input_uris
320
+ gcs_input_uris = input_uris ,
301
321
)
302
322
303
323
print ("Processing import..." )
@@ -321,8 +341,10 @@ def export_data(project_id, compute_region, dataset_display_name, gcs_uri):
321
341
client = automl .TablesClient (project = project_id , region = compute_region )
322
342
323
343
# Export the dataset to the output URI.
324
- response = client .export_data (dataset_display_name = dataset_display_name ,
325
- gcs_output_uri_prefix = gcs_uri )
344
+ response = client .export_data (
345
+ dataset_display_name = dataset_display_name ,
346
+ gcs_output_uri_prefix = gcs_uri ,
347
+ )
326
348
327
349
print ("Processing export..." )
328
350
# synchronous check of operation status.
@@ -331,12 +353,14 @@ def export_data(project_id, compute_region, dataset_display_name, gcs_uri):
331
353
# [END automl_tables_export_data]
332
354
333
355
334
- def update_dataset (project_id ,
335
- compute_region ,
336
- dataset_display_name ,
337
- target_column_spec_name = None ,
338
- weight_column_spec_name = None ,
339
- test_train_column_spec_name = None ):
356
+ def update_dataset (
357
+ project_id ,
358
+ compute_region ,
359
+ dataset_display_name ,
360
+ target_column_spec_name = None ,
361
+ weight_column_spec_name = None ,
362
+ test_train_column_spec_name = None ,
363
+ ):
340
364
"""Update dataset."""
341
365
# [START automl_tables_update_dataset]
342
366
# TODO(developer): Uncomment and set the following variables
@@ -354,29 +378,31 @@ def update_dataset(project_id,
354
378
if target_column_spec_name is not None :
355
379
response = client .set_target_column (
356
380
dataset_display_name = dataset_display_name ,
357
- column_spec_display_name = target_column_spec_name
381
+ column_spec_display_name = target_column_spec_name ,
358
382
)
359
383
print ("Target column updated. {}" .format (response ))
360
384
if weight_column_spec_name is not None :
361
385
response = client .set_weight_column (
362
386
dataset_display_name = dataset_display_name ,
363
- column_spec_display_name = weight_column_spec_name
387
+ column_spec_display_name = weight_column_spec_name ,
364
388
)
365
389
print ("Weight column updated. {}" .format (response ))
366
390
if test_train_column_spec_name is not None :
367
391
response = client .set_test_train_column (
368
392
dataset_display_name = dataset_display_name ,
369
- column_spec_display_name = test_train_column_spec_name
393
+ column_spec_display_name = test_train_column_spec_name ,
370
394
)
371
395
print ("Test/train column updated. {}" .format (response ))
372
396
373
397
# [END automl_tables_update_dataset]
374
398
375
399
376
- def update_table_spec (project_id ,
377
- compute_region ,
378
- dataset_display_name ,
379
- time_column_spec_display_name ):
400
+ def update_table_spec (
401
+ project_id ,
402
+ compute_region ,
403
+ dataset_display_name ,
404
+ time_column_spec_display_name ,
405
+ ):
380
406
"""Update table spec."""
381
407
# [START automl_tables_update_table_spec]
382
408
# TODO(developer): Uncomment and set the following variables
@@ -391,20 +417,22 @@ def update_table_spec(project_id,
391
417
392
418
response = client .set_time_column (
393
419
dataset_display_name = dataset_display_name ,
394
- column_spec_display_name = time_column_spec_display_name
420
+ column_spec_display_name = time_column_spec_display_name ,
395
421
)
396
422
397
423
# synchronous check of operation status.
398
424
print ("Table spec updated. {}" .format (response ))
399
425
# [END automl_tables_update_table_spec]
400
426
401
427
402
- def update_column_spec (project_id ,
403
- compute_region ,
404
- dataset_display_name ,
405
- column_spec_display_name ,
406
- type_code ,
407
- nullable = None ):
428
+ def update_column_spec (
429
+ project_id ,
430
+ compute_region ,
431
+ dataset_display_name ,
432
+ column_spec_display_name ,
433
+ type_code ,
434
+ nullable = None ,
435
+ ):
408
436
"""Update column spec."""
409
437
# [START automl_tables_update_column_spec]
410
438
# TODO(developer): Uncomment and set the following variables
@@ -423,7 +451,8 @@ def update_column_spec(project_id,
423
451
response = client .update_column_spec (
424
452
dataset_display_name = dataset_display_name ,
425
453
column_spec_display_name = column_spec_display_name ,
426
- type_code = type_code , nullable = nullable
454
+ type_code = type_code ,
455
+ nullable = nullable ,
427
456
)
428
457
429
458
# synchronous check of operation status.
@@ -546,56 +575,62 @@ def delete_dataset(project_id, compute_region, dataset_display_name):
546
575
if args .command == "list_datasets" :
547
576
list_datasets (project_id , compute_region , args .filter_ )
548
577
if args .command == "list_table_specs" :
549
- list_table_specs (project_id ,
550
- compute_region ,
551
- args .dataset_display_name ,
552
- args .filter_ )
578
+ list_table_specs (
579
+ project_id , compute_region , args .dataset_display_name , args .filter_
580
+ )
553
581
if args .command == "list_column_specs" :
554
- list_column_specs (project_id ,
555
- compute_region ,
556
- args .dataset_display_name ,
557
- args .filter_ )
582
+ list_column_specs (
583
+ project_id , compute_region , args .dataset_display_name , args .filter_
584
+ )
558
585
if args .command == "get_dataset" :
559
586
get_dataset (project_id , compute_region , args .dataset_display_name )
560
587
if args .command == "get_table_spec" :
561
- get_table_spec (project_id ,
562
- compute_region ,
563
- args .dataset_display_name ,
564
- args .table_spec_id )
588
+ get_table_spec (
589
+ project_id ,
590
+ compute_region ,
591
+ args .dataset_display_name ,
592
+ args .table_spec_id ,
593
+ )
565
594
if args .command == "get_column_spec" :
566
- get_column_spec (project_id ,
567
- compute_region ,
568
- args .dataset_display_name ,
569
- args .table_spec_id ,
570
- args .column_spec_id )
595
+ get_column_spec (
596
+ project_id ,
597
+ compute_region ,
598
+ args .dataset_display_name ,
599
+ args .table_spec_id ,
600
+ args .column_spec_id ,
601
+ )
571
602
if args .command == "import_data" :
572
- import_data (project_id ,
573
- compute_region ,
574
- args .dataset_display_name ,
575
- args .path )
603
+ import_data (
604
+ project_id , compute_region , args .dataset_display_name , args .path
605
+ )
576
606
if args .command == "export_data" :
577
- export_data (project_id ,
578
- compute_region ,
579
- args .dataset_display_name ,
580
- args .gcs_uri )
607
+ export_data (
608
+ project_id , compute_region , args .dataset_display_name , args .gcs_uri
609
+ )
581
610
if args .command == "update_dataset" :
582
- update_dataset (project_id ,
583
- compute_region ,
584
- args .dataset_display_name ,
585
- args .target_column_spec_name ,
586
- args .weight_column_spec_name ,
587
- args .ml_use_column_spec_name )
611
+ update_dataset (
612
+ project_id ,
613
+ compute_region ,
614
+ args .dataset_display_name ,
615
+ args .target_column_spec_name ,
616
+ args .weight_column_spec_name ,
617
+ args .ml_use_column_spec_name ,
618
+ )
588
619
if args .command == "update_table_spec" :
589
- update_table_spec (project_id ,
590
- compute_region ,
591
- args .dataset_display_name ,
592
- args .time_column_spec_display_name )
620
+ update_table_spec (
621
+ project_id ,
622
+ compute_region ,
623
+ args .dataset_display_name ,
624
+ args .time_column_spec_display_name ,
625
+ )
593
626
if args .command == "update_column_spec" :
594
- update_column_spec (project_id ,
595
- compute_region ,
596
- args .dataset_display_name ,
597
- args .column_spec_display_name ,
598
- args .type_code ,
599
- args .nullable )
627
+ update_column_spec (
628
+ project_id ,
629
+ compute_region ,
630
+ args .dataset_display_name ,
631
+ args .column_spec_display_name ,
632
+ args .type_code ,
633
+ args .nullable ,
634
+ )
600
635
if args .command == "delete_dataset" :
601
636
delete_dataset (project_id , compute_region , args .dataset_display_name )
0 commit comments