diff --git a/gcloud/bigquery/__init__.py b/gcloud/bigquery/__init__.py index 5b8efe9b4499..30207de37489 100644 --- a/gcloud/bigquery/__init__.py +++ b/gcloud/bigquery/__init__.py @@ -24,3 +24,5 @@ from gcloud.bigquery.client import Client from gcloud.bigquery.connection import SCOPE from gcloud.bigquery.dataset import Dataset +from gcloud.bigquery.table import SchemaField +from gcloud.bigquery.table import Table diff --git a/gcloud/bigquery/client.py b/gcloud/bigquery/client.py index 0079e8c5142b..891e773a1f0b 100644 --- a/gcloud/bigquery/client.py +++ b/gcloud/bigquery/client.py @@ -43,6 +43,49 @@ class Client(JSONClient): _connection_class = Connection + def list_datasets(self, include_all=False, max_results=None, + page_token=None): + """List datasets for the project associated with this client. + + See: + https://cloud.google.com/bigquery/docs/reference/v2/datasets/list + + :type include_all: boolean + :param include_all: True if results include hidden datasets. + + :type max_results: int + :param max_results: maximum number of datasets to return, If not + passed, defaults to a value set by the API. + + :type page_token: string + :param page_token: opaque marker for the next "page" of datasets. If + not passed, the API will return the first page of + datasets. + + :rtype: tuple, (list, str) + :returns: list of :class:`gcloud.bigquery.dataset.Dataset`, plus a + "next page token" string: if the token is not None, + indicates that more datasets can be retrieved with another + call (pass that value as ``page_token``). + """ + params = {} + + if include_all: + params['all'] = True + + if max_results is not None: + params['maxResults'] = max_results + + if page_token is not None: + params['pageToken'] = page_token + + path = '/projects/%s/datasets' % (self.project,) + resp = self.connection.api_request(method='GET', path=path, + query_params=params) + datasets = [Dataset.from_api_repr(resource, self) + for resource in resp['datasets']] + return datasets, resp.get('nextPageToken') + def dataset(self, name): """Construct a dataset bound to this client. diff --git a/gcloud/bigquery/dataset.py b/gcloud/bigquery/dataset.py index c725b8200462..db5cfb70f216 100644 --- a/gcloud/bigquery/dataset.py +++ b/gcloud/bigquery/dataset.py @@ -190,6 +190,29 @@ def location(self, value): raise ValueError("Pass a string, or None") self._properties['location'] = value + @classmethod + def from_api_repr(cls, resource, client): + """Factory: construct a dataset given its API representation + + :type resource: dict + :param resource: dataset resource representation returned from the API + + :type client: :class:`gcloud.bigquery.client.Client` + :param client: Client which holds credentials and project + configuration for the dataset. + + :rtype: :class:`gcloud.bigquery.dataset.Dataset` + :returns: Dataset parsed from ``resource``. + """ + if ('datasetReference' not in resource or + 'datasetId' not in resource['datasetReference']): + raise KeyError('Resource lacks required identity information:' + '["datasetReference"]["datasetId"]') + name = resource['datasetReference']['datasetId'] + dataset = cls(name, client=client) + dataset._set_properties(resource) + return dataset + def _require_client(self, client): """Check client or verify over-ride. @@ -357,6 +380,43 @@ def delete(self, client=None): client = self._require_client(client) client.connection.api_request(method='DELETE', path=self.path) + def list_tables(self, max_results=None, page_token=None): + """List tables for the project associated with this client. + + See: + https://cloud.google.com/bigquery/docs/reference/v2/tables/list + + :type max_results: int + :param max_results: maximum number of tables to return, If not + passed, defaults to a value set by the API. + + :type page_token: string + :param page_token: opaque marker for the next "page" of datasets. If + not passed, the API will return the first page of + datasets. + + :rtype: tuple, (list, str) + :returns: list of :class:`gcloud.bigquery.table.Table`, plus a + "next page token" string: if not None, indicates that + more tables can be retrieved with another call (pass that + value as ``page_token``). + """ + params = {} + + if max_results is not None: + params['maxResults'] = max_results + + if page_token is not None: + params['pageToken'] = page_token + + path = '/projects/%s/datasets/%s/tables' % (self.project, self.name) + connection = self._client.connection + resp = connection.api_request(method='GET', path=path, + query_params=params) + tables = [Table.from_api_repr(resource, self) + for resource in resp['tables']] + return tables, resp.get('nextPageToken') + def table(self, name, schema=()): """Construct a table bound to this dataset. diff --git a/gcloud/bigquery/table.py b/gcloud/bigquery/table.py index 67ac13e09a78..ee2d40cb8a2a 100644 --- a/gcloud/bigquery/table.py +++ b/gcloud/bigquery/table.py @@ -298,6 +298,28 @@ def view_query(self): """Delete SQL query defining the table as a view.""" self._properties.pop('view', None) + @classmethod + def from_api_repr(cls, resource, dataset): + """Factory: construct a table given its API representation + + :type resource: dict + :param resource: table resource representation returned from the API + + :type dataset: :class:`gcloud.bigquery.dataset.Dataset` + :param dataset: The dataset containing the table. + + :rtype: :class:`gcloud.bigquery.table.Table` + :returns: Table parsed from ``resource``. + """ + if ('tableReference' not in resource or + 'tableId' not in resource['tableReference']): + raise KeyError('Resource lacks required identity information:' + '["tableReference"]["tableId"]') + table_name = resource['tableReference']['tableId'] + table = cls(table_name, dataset=dataset) + table._set_properties(resource) + return table + def _require_client(self, client): """Check client or verify over-ride. @@ -344,7 +366,7 @@ def _set_properties(self, api_response): """ self._properties.clear() cleaned = api_response.copy() - schema = cleaned.pop('schema', {}) + schema = cleaned.pop('schema', {'fields': ()}) self.schema = self._parse_schema_resource(schema) if 'creationTime' in cleaned: cleaned['creationTime'] = float(cleaned['creationTime']) diff --git a/gcloud/bigquery/test_client.py b/gcloud/bigquery/test_client.py index b9d547fd359a..554314fa9515 100644 --- a/gcloud/bigquery/test_client.py +++ b/gcloud/bigquery/test_client.py @@ -46,6 +46,88 @@ def test_dataset(self): self.assertEqual(dataset.name, DATASET) self.assertTrue(dataset._client is client) + def test_list_datasets_defaults(self): + from gcloud.bigquery.dataset import Dataset + PROJECT = 'PROJECT' + DATASET_1 = 'dataset_one' + DATASET_2 = 'dataset_two' + PATH = 'projects/%s/datasets' % PROJECT + TOKEN = 'TOKEN' + DATA = { + 'nextPageToken': TOKEN, + 'datasets': [ + {'kind': 'bigquery#dataset', + 'id': '%s:%s' % (PROJECT, DATASET_1), + 'datasetReference': {'datasetId': DATASET_1, + 'projectId': PROJECT}, + 'friendlyName': None}, + {'kind': 'bigquery#dataset', + 'id': '%s:%s' % (PROJECT, DATASET_2), + 'datasetReference': {'datasetId': DATASET_2, + 'projectId': PROJECT}, + 'friendlyName': 'Two'}, + ] + } + creds = _Credentials() + client = self._makeOne(PROJECT, creds) + conn = client.connection = _Connection(DATA) + + datasets, token = client.list_datasets() + + self.assertEqual(len(datasets), len(DATA['datasets'])) + for found, expected in zip(datasets, DATA['datasets']): + self.assertTrue(isinstance(found, Dataset)) + self.assertEqual(found.dataset_id, expected['id']) + self.assertEqual(found.friendly_name, expected['friendlyName']) + self.assertEqual(token, TOKEN) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + + def test_list_datasets_explicit(self): + from gcloud.bigquery.dataset import Dataset + PROJECT = 'PROJECT' + DATASET_1 = 'dataset_one' + DATASET_2 = 'dataset_two' + PATH = 'projects/%s/datasets' % PROJECT + TOKEN = 'TOKEN' + DATA = { + 'datasets': [ + {'kind': 'bigquery#dataset', + 'id': '%s:%s' % (PROJECT, DATASET_1), + 'datasetReference': {'datasetId': DATASET_1, + 'projectId': PROJECT}, + 'friendlyName': None}, + {'kind': 'bigquery#dataset', + 'id': '%s:%s' % (PROJECT, DATASET_2), + 'datasetReference': {'datasetId': DATASET_2, + 'projectId': PROJECT}, + 'friendlyName': 'Two'}, + ] + } + creds = _Credentials() + client = self._makeOne(PROJECT, creds) + conn = client.connection = _Connection(DATA) + + datasets, token = client.list_datasets( + include_all=True, max_results=3, page_token=TOKEN) + + self.assertEqual(len(datasets), len(DATA['datasets'])) + for found, expected in zip(datasets, DATA['datasets']): + self.assertTrue(isinstance(found, Dataset)) + self.assertEqual(found.dataset_id, expected['id']) + self.assertEqual(found.friendly_name, expected['friendlyName']) + self.assertEqual(token, None) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self.assertEqual(req['query_params'], + {'all': True, 'maxResults': 3, 'pageToken': TOKEN}) + class _Credentials(object): @@ -58,3 +140,15 @@ def create_scoped_required(): def create_scoped(self, scope): self._scopes = scope return self + + +class _Connection(object): + + def __init__(self, *responses): + self._responses = responses + self._requested = [] + + def api_request(self, **kw): + self._requested.append(kw) + response, self._responses = self._responses[0], self._responses[1:] + return response diff --git a/gcloud/bigquery/test_dataset.py b/gcloud/bigquery/test_dataset.py index 1ae3699c2e57..71562c762d92 100644 --- a/gcloud/bigquery/test_dataset.py +++ b/gcloud/bigquery/test_dataset.py @@ -26,7 +26,7 @@ def _getTargetClass(self): def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) - def _makeResource(self): + def _setUpConstants(self): import datetime import pytz self.WHEN_TS = 1437767599.006 @@ -35,11 +35,14 @@ def _makeResource(self): self.ETAG = 'ETAG' self.DS_ID = '%s:%s' % (self.PROJECT, self.DS_NAME) self.RESOURCE_URL = 'http://example.com/path/to/resource' + + def _makeResource(self): + self._setUpConstants() return { 'creationTime': self.WHEN_TS * 1000, 'datasetReference': {'projectId': self.PROJECT, 'datasetId': self.DS_NAME}, - 'etag': 'ETAG', + 'etag': self.ETAG, 'id': self.DS_ID, 'lastModifiedTime': self.WHEN_TS * 1000, 'location': 'US', @@ -47,11 +50,23 @@ def _makeResource(self): } def _verifyResourceProperties(self, dataset, resource): - self.assertEqual(dataset.created, self.WHEN) self.assertEqual(dataset.dataset_id, self.DS_ID) - self.assertEqual(dataset.etag, self.ETAG) - self.assertEqual(dataset.modified, self.WHEN) - self.assertEqual(dataset.self_link, self.RESOURCE_URL) + if 'creationTime' in resource: + self.assertEqual(dataset.created, self.WHEN) + else: + self.assertEqual(dataset.created, None) + if 'etag' in resource: + self.assertEqual(dataset.etag, self.ETAG) + else: + self.assertEqual(dataset.etag, None) + if 'lastModifiedTime' in resource: + self.assertEqual(dataset.modified, self.WHEN) + else: + self.assertEqual(dataset.modified, None) + if 'selfLink' in resource: + self.assertEqual(dataset.self_link, self.RESOURCE_URL) + else: + self.assertEqual(dataset.self_link, None) self.assertEqual(dataset.default_table_expiration_ms, resource.get('defaultTableExpirationMs')) @@ -128,12 +143,43 @@ def test_location_setter(self): dataset.location = 'LOCATION' self.assertEqual(dataset.location, 'LOCATION') + def test_from_api_repr_missing_identity(self): + self._setUpConstants() + client = _Client(self.PROJECT) + RESOURCE = {} + klass = self._getTargetClass() + with self.assertRaises(KeyError): + klass.from_api_repr(RESOURCE, client=client) + + def test_from_api_repr_bare(self): + self._setUpConstants() + client = _Client(self.PROJECT) + RESOURCE = { + 'id': '%s:%s' % (self.PROJECT, self.DS_NAME), + 'datasetReference': { + 'projectId': self.PROJECT, + 'datasetId': self.DS_NAME, + } + } + klass = self._getTargetClass() + dataset = klass.from_api_repr(RESOURCE, client=client) + self.assertTrue(dataset._client is client) + self._verifyResourceProperties(dataset, RESOURCE) + + def test_from_api_repr_w_properties(self): + client = _Client(self.PROJECT) + RESOURCE = self._makeResource() + klass = self._getTargetClass() + dataset = klass.from_api_repr(RESOURCE, client=client) + self.assertTrue(dataset._client is client) + self._verifyResourceProperties(dataset, RESOURCE) + def test_create_w_bound_client(self): PATH = 'projects/%s/datasets' % self.PROJECT RESOURCE = self._makeResource() conn = _Connection(RESOURCE) - CLIENT = _Client(project=self.PROJECT, connection=conn) - dataset = self._makeOne(self.DS_NAME, client=CLIENT) + client = _Client(project=self.PROJECT, connection=conn) + dataset = self._makeOne(self.DS_NAME, client=client) dataset.create() @@ -188,8 +234,8 @@ def test_create_w_missing_output_properties(self): del RESOURCE['lastModifiedTime'] self.WHEN = None conn = _Connection(RESOURCE) - CLIENT = _Client(project=self.PROJECT, connection=conn) - dataset = self._makeOne(self.DS_NAME, client=CLIENT) + client = _Client(project=self.PROJECT, connection=conn) + dataset = self._makeOne(self.DS_NAME, client=client) dataset.create() @@ -207,8 +253,8 @@ def test_create_w_missing_output_properties(self): def test_exists_miss_w_bound_client(self): PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME) conn = _Connection() - CLIENT = _Client(project=self.PROJECT, connection=conn) - dataset = self._makeOne(self.DS_NAME, client=CLIENT) + client = _Client(project=self.PROJECT, connection=conn) + dataset = self._makeOne(self.DS_NAME, client=client) self.assertFalse(dataset.exists()) @@ -239,8 +285,8 @@ def test_reload_w_bound_client(self): PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME) RESOURCE = self._makeResource() conn = _Connection(RESOURCE) - CLIENT = _Client(project=self.PROJECT, connection=conn) - dataset = self._makeOne(self.DS_NAME, client=CLIENT) + client = _Client(project=self.PROJECT, connection=conn) + dataset = self._makeOne(self.DS_NAME, client=client) dataset.reload() @@ -271,8 +317,8 @@ def test_reload_w_alternate_client(self): def test_patch_w_invalid_expiration(self): RESOURCE = self._makeResource() conn = _Connection(RESOURCE) - CLIENT = _Client(project=self.PROJECT, connection=conn) - dataset = self._makeOne(self.DS_NAME, client=CLIENT) + client = _Client(project=self.PROJECT, connection=conn) + dataset = self._makeOne(self.DS_NAME, client=client) with self.assertRaises(ValueError): dataset.patch(default_table_expiration_ms='BOGUS') @@ -285,8 +331,8 @@ def test_patch_w_bound_client(self): RESOURCE['description'] = DESCRIPTION RESOURCE['friendlyName'] = TITLE conn = _Connection(RESOURCE) - CLIENT = _Client(project=self.PROJECT, connection=conn) - dataset = self._makeOne(self.DS_NAME, client=CLIENT) + client = _Client(project=self.PROJECT, connection=conn) + dataset = self._makeOne(self.DS_NAME, client=client) dataset.patch(description=DESCRIPTION, friendly_name=TITLE) @@ -338,8 +384,8 @@ def test_update_w_bound_client(self): RESOURCE['description'] = DESCRIPTION RESOURCE['friendlyName'] = TITLE conn = _Connection(RESOURCE) - CLIENT = _Client(project=self.PROJECT, connection=conn) - dataset = self._makeOne(self.DS_NAME, client=CLIENT) + client = _Client(project=self.PROJECT, connection=conn) + dataset = self._makeOne(self.DS_NAME, client=client) dataset.description = DESCRIPTION dataset.friendly_name = TITLE @@ -392,8 +438,8 @@ def test_update_w_alternate_client(self): def test_delete_w_bound_client(self): PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME) conn = _Connection({}) - CLIENT = _Client(project=self.PROJECT, connection=conn) - dataset = self._makeOne(self.DS_NAME, client=CLIENT) + client = _Client(project=self.PROJECT, connection=conn) + dataset = self._makeOne(self.DS_NAME, client=client) dataset.delete() @@ -418,11 +464,98 @@ def test_delete_w_alternate_client(self): self.assertEqual(req['method'], 'DELETE') self.assertEqual(req['path'], '/%s' % PATH) + def test_list_tables_defaults(self): + from gcloud.bigquery.table import Table + conn = _Connection({}) + TABLE_1 = 'table_one' + TABLE_2 = 'table_two' + PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_NAME) + TOKEN = 'TOKEN' + DATA = { + 'nextPageToken': TOKEN, + 'tables': [ + {'kind': 'bigquery#table', + 'id': '%s:%s.%s' % (self.PROJECT, self.DS_NAME, TABLE_1), + 'tableReference': {'tableId': TABLE_1, + 'datasetId': self.DS_NAME, + 'projectId': self.PROJECT}, + 'type': 'TABLE'}, + {'kind': 'bigquery#table', + 'id': '%s:%s.%s' % (self.PROJECT, self.DS_NAME, TABLE_2), + 'tableReference': {'tableId': TABLE_2, + 'datasetId': self.DS_NAME, + 'projectId': self.PROJECT}, + 'type': 'TABLE'}, + ] + } + + conn = _Connection(DATA) + client = _Client(project=self.PROJECT, connection=conn) + dataset = self._makeOne(self.DS_NAME, client=client) + + tables, token = dataset.list_tables() + + self.assertEqual(len(tables), len(DATA['tables'])) + for found, expected in zip(tables, DATA['tables']): + self.assertTrue(isinstance(found, Table)) + self.assertEqual(found.table_id, expected['id']) + self.assertEqual(found.table_type, expected['type']) + self.assertEqual(token, TOKEN) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + + def test_list_tables_explicit(self): + from gcloud.bigquery.table import Table + conn = _Connection({}) + TABLE_1 = 'table_one' + TABLE_2 = 'table_two' + PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_NAME) + TOKEN = 'TOKEN' + DATA = { + 'tables': [ + {'kind': 'bigquery#dataset', + 'id': '%s:%s.%s' % (self.PROJECT, self.DS_NAME, TABLE_1), + 'tableReference': {'tableId': TABLE_1, + 'datasetId': self.DS_NAME, + 'projectId': self.PROJECT}, + 'type': 'TABLE'}, + {'kind': 'bigquery#dataset', + 'id': '%s:%s.%s' % (self.PROJECT, self.DS_NAME, TABLE_2), + 'tableReference': {'tableId': TABLE_2, + 'datasetId': self.DS_NAME, + 'projectId': self.PROJECT}, + 'type': 'TABLE'}, + ] + } + + conn = _Connection(DATA) + client = _Client(project=self.PROJECT, connection=conn) + dataset = self._makeOne(self.DS_NAME, client=client) + + tables, token = dataset.list_tables(max_results=3, page_token=TOKEN) + + self.assertEqual(len(tables), len(DATA['tables'])) + for found, expected in zip(tables, DATA['tables']): + self.assertTrue(isinstance(found, Table)) + self.assertEqual(found.table_id, expected['id']) + self.assertEqual(found.table_type, expected['type']) + self.assertEqual(token, None) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self.assertEqual(req['query_params'], + {'maxResults': 3, 'pageToken': TOKEN}) + def test_table_wo_schema(self): from gcloud.bigquery.table import Table conn = _Connection({}) - CLIENT = _Client(project=self.PROJECT, connection=conn) - dataset = self._makeOne(self.DS_NAME, client=CLIENT) + client = _Client(project=self.PROJECT, connection=conn) + dataset = self._makeOne(self.DS_NAME, client=client) table = dataset.table('table_name') self.assertTrue(isinstance(table, Table)) self.assertEqual(table.name, 'table_name') @@ -433,8 +566,8 @@ def test_table_w_schema(self): from gcloud.bigquery.table import SchemaField from gcloud.bigquery.table import Table conn = _Connection({}) - CLIENT = _Client(project=self.PROJECT, connection=conn) - dataset = self._makeOne(self.DS_NAME, client=CLIENT) + client = _Client(project=self.PROJECT, connection=conn) + dataset = self._makeOne(self.DS_NAME, client=client) full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') age = SchemaField('age', 'INTEGER', mode='REQUIRED') table = dataset.table('table_name', schema=[full_name, age]) diff --git a/gcloud/bigquery/test_table.py b/gcloud/bigquery/test_table.py index 5c678ebe545f..b798d66a6572 100644 --- a/gcloud/bigquery/test_table.py +++ b/gcloud/bigquery/test_table.py @@ -74,7 +74,7 @@ def _getTargetClass(self): def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) - def _makeResource(self): + def _setUpConstants(self): import datetime import pytz self.WHEN_TS = 1437767599.006 @@ -86,6 +86,9 @@ def _makeResource(self): self.RESOURCE_URL = 'http://example.com/path/to/resource' self.NUM_BYTES = 12345 self.NUM_ROWS = 67 + + def _makeResource(self): + self._setUpConstants() return { 'creationTime': self.WHEN_TS * 1000, 'tableReference': @@ -117,29 +120,58 @@ def _verifySchema(self, schema, resource): for field, r_field in zip(schema, r_fields): self._verify_field(field, r_field) - def _verifyResourceProperties(self, table, resource): - self.assertEqual(table.created, self.WHEN) - self.assertEqual(table.etag, self.ETAG) - self.assertEqual(table.num_rows, self.NUM_ROWS) - self.assertEqual(table.num_bytes, self.NUM_BYTES) - self.assertEqual(table.self_link, self.RESOURCE_URL) + def _verifyReadonlyResourceProperties(self, table, resource): + if 'creationTime' in resource: + self.assertEqual(table.created, self.WHEN) + else: + self.assertEqual(table.created, None) + + if 'etag' in resource: + self.assertEqual(table.etag, self.ETAG) + else: + self.assertEqual(table.etag, None) + + if 'numRows' in resource: + self.assertEqual(table.num_rows, self.NUM_ROWS) + else: + self.assertEqual(table.num_rows, None) + + if 'numBytes' in resource: + self.assertEqual(table.num_bytes, self.NUM_BYTES) + else: + self.assertEqual(table.num_bytes, None) + + if 'selfLink' in resource: + self.assertEqual(table.self_link, self.RESOURCE_URL) + else: + self.assertEqual(table.self_link, None) + self.assertEqual(table.table_id, self.TABLE_ID) self.assertEqual(table.table_type, 'TABLE' if 'view' not in resource else 'VIEW') + def _verifyResourceProperties(self, table, resource): + + self._verifyReadonlyResourceProperties(table, resource) + if 'expirationTime' in resource: self.assertEqual(table.expires, self.EXP_TIME) else: self.assertEqual(table.expires, None) + self.assertEqual(table.description, resource.get('description')) self.assertEqual(table.friendly_name, resource.get('friendlyName')) self.assertEqual(table.location, resource.get('location')) + if 'view' in resource: self.assertEqual(table.view_query, resource['view']['query']) else: self.assertEqual(table.view_query, None) - self._verifySchema(table.schema, resource) + if 'schema' in resource: + self._verifySchema(table.schema, resource) + else: + self.assertEqual(table.schema, []) def test_ctor(self): client = _Client(self.PROJECT) @@ -316,6 +348,43 @@ def test_view_query_deleter(self): del table.view_query self.assertEqual(table.view_query, None) + def test_from_api_repr_missing_identity(self): + self._setUpConstants() + client = _Client(self.PROJECT) + dataset = _Dataset(client) + RESOURCE = {} + klass = self._getTargetClass() + with self.assertRaises(KeyError): + klass.from_api_repr(RESOURCE, dataset) + + def test_from_api_repr_bare(self): + self._setUpConstants() + client = _Client(self.PROJECT) + dataset = _Dataset(client) + RESOURCE = { + 'id': '%s:%s:%s' % (self.PROJECT, self.DS_NAME, self.TABLE_NAME), + 'tableReference': { + 'projectId': self.PROJECT, + 'datasetId': self.DS_NAME, + 'tableId': self.TABLE_NAME, + }, + 'type': 'TABLE', + } + klass = self._getTargetClass() + table = klass.from_api_repr(RESOURCE, dataset) + self.assertEqual(table.name, self.TABLE_NAME) + self.assertTrue(table._dataset is dataset) + self._verifyResourceProperties(table, RESOURCE) + + def test_from_api_repr_w_properties(self): + client = _Client(self.PROJECT) + dataset = _Dataset(client) + RESOURCE = self._makeResource() + klass = self._getTargetClass() + table = klass.from_api_repr(RESOURCE, dataset) + self.assertTrue(table._dataset._client is client) + self._verifyResourceProperties(table, RESOURCE) + def test__parse_schema_resource_defaults(self): client = _Client(self.PROJECT) dataset = _Dataset(client)