diff --git a/.circleci/config.yml b/.circleci/config.yml index 01242a41d15b..2352d187edc1 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -3,14 +3,13 @@ version: 2 jobs: build: docker: - - image: googleapis/nox:0.11.2 + - image: googleapis/nox:0.17.0 steps: - checkout - run: name: Decrypt credentials command: | if [ -n "$GOOGLE_APPLICATION_CREDENTIALS" ]; then - apt-get update && apt-get install -y openssl openssl aes-256-cbc -d -a -k "$GOOGLE_CREDENTIALS_PASSPHRASE" \ -in /var/code/gcp/test_utils/credentials.json.enc \ -out "$GOOGLE_APPLICATION_CREDENTIALS" diff --git a/.gitignore b/.gitignore index df4fe06fa5ae..dbce921dd1eb 100644 --- a/.gitignore +++ b/.gitignore @@ -56,7 +56,8 @@ coverage.xml system_tests/local_test_setup # Make sure a generated file isn't accidentally committed. -scripts/pylintrc_reduced +pylintrc +pylintrc.test # Directories used for creating generated PB2 files generated_python/ diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index a9f654a52c37..95a4dd13cfdb 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -402,7 +402,7 @@ instead of ``https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/CONTRIBUTING.rst``) may cause problems creating links or rendering the description. -.. _description on PyPI: https://pypi.python.org/pypi/google-cloud +.. _description on PyPI: https://pypi.org/project/google-cloud/ ******************************************** Travis Configuration and Build Optimizations diff --git a/README.rst b/README.rst index b2fcb47df468..3de445aba762 100644 --- a/README.rst +++ b/README.rst @@ -20,6 +20,7 @@ The following client libraries have **GA** support: - `Google Cloud Datastore`_ (`Datastore README`_) - `Stackdriver Logging`_ (`Logging README`_) - `Google Cloud Storage`_ (`Storage README`_) +- `Google Cloud Translation`_ (`Translation README`_) **GA** (general availability) indicates that the client library for a particular service is stable, and that the code surface will not change in @@ -33,7 +34,7 @@ The following client libraries have **beta** support: - `Google BigQuery`_ (`BigQuery README`_) - `Google Cloud Vision`_ (`Vision README`_) - `Google Cloud Natural Language`_ (`Natural Language README`_) -- `Google Cloud Translation`_ (`Translation README`_) +- `Google Cloud Video Intelligence`_ (`Video Intelligence README`_) **Beta** indicates that the client library for a particular service is mostly stable and is being prepared for release. Issues and requests @@ -57,40 +58,42 @@ Cloud Platform services: still a work-in-progress and is more likely to get backwards-incompatible updates. See `versioning`_ for more details. -.. _Google Cloud Datastore: https://pypi.python.org/pypi/google-cloud-datastore +.. _Google Cloud Datastore: https://pypi.org/project/google-cloud-datastore/ .. _Datastore README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/datastore -.. _Google Cloud Storage: https://pypi.python.org/pypi/google-cloud-storage +.. _Google Cloud Storage: https://pypi.org/project/google-cloud-storage/ .. _Storage README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/storage -.. _Google Cloud Pub/Sub: https://pypi.python.org/pypi/google-cloud-pubsub +.. _Google Cloud Pub/Sub: https://pypi.org/project/google-cloud-pubsub/ .. _Pub/Sub README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/pubsub -.. _Google BigQuery: https://pypi.python.org/pypi/google-cloud-bigquery +.. _Google BigQuery: https://pypi.org/project/google-cloud-bigquery/ .. _BigQuery README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/bigquery -.. _Google Cloud Resource Manager: https://pypi.python.org/pypi/google-cloud-resource-manager +.. _Google Cloud Resource Manager: https://pypi.org/project/google-cloud-resource-manager/ .. _Resource Manager README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/resource_manager -.. _Stackdriver Logging: https://pypi.python.org/pypi/google-cloud-logging +.. _Stackdriver Logging: https://pypi.org/project/google-cloud-logging/ .. _Logging README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/logging -.. _Stackdriver Monitoring: https://pypi.python.org/pypi/google-cloud-monitoring +.. _Stackdriver Monitoring: https://pypi.org/project/google-cloud-monitoring/ .. _Monitoring README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/monitoring -.. _Google Cloud Bigtable: https://pypi.python.org/pypi/google-cloud-bigtable +.. _Google Cloud Bigtable: https://pypi.org/project/google-cloud-bigtable/ .. _Bigtable README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/bigtable -.. _Google Cloud DNS: https://pypi.python.org/pypi/google-cloud-dns +.. _Google Cloud DNS: https://pypi.org/project/google-cloud-dns/ .. _DNS README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/dns -.. _Stackdriver Error Reporting: https://pypi.python.org/pypi/google-cloud-error-reporting +.. _Stackdriver Error Reporting: https://pypi.org/project/google-cloud-error-reporting/ .. _Error Reporting README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/error_reporting -.. _Google Cloud Natural Language: https://pypi.python.org/pypi/google-cloud-language +.. _Google Cloud Natural Language: https://pypi.org/project/google-cloud-language/ .. _Natural Language README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/language -.. _Google Cloud Translation: https://pypi.python.org/pypi/google-cloud-translate +.. _Google Cloud Translation: https://pypi.org/project/google-cloud-translate/ .. _Translation README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/translate -.. _Google Cloud Speech: https://pypi.python.org/pypi/google-cloud-speech +.. _Google Cloud Speech: https://pypi.org/project/google-cloud-speech/ .. _Speech README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/speech -.. _Google Cloud Vision: https://pypi.python.org/pypi/google-cloud-vision +.. _Google Cloud Vision: https://pypi.org/project/google-cloud-vision/ .. _Vision README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/vision -.. _Google Cloud Bigtable - HappyBase: https://pypi.python.org/pypi/google-cloud-happybase/ +.. _Google Cloud Bigtable - HappyBase: https://pypi.org/project/google-cloud-happybase/ .. _HappyBase README: https://github.com/GoogleCloudPlatform/google-cloud-python-happybase .. _Google Cloud Runtime Configuration: https://cloud.google.com/deployment-manager/runtime-configurator/ .. _Runtime Config README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/runtimeconfig -.. _Cloud Spanner: https://cloud.google.com/spanner/ +.. _Cloud Spanner: https://pypi.python.org/pypi/google-cloud-spanner .. _Cloud Spanner README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/spanner +.. _Google Cloud Video Intelligence: https://pypi.python.org/pypi/google-cloud-videointelligence +.. _Video Intelligence README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/videointelligence .. _versioning: https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/CONTRIBUTING.rst#versioning If you need support for other Google APIs, check out the @@ -159,6 +162,6 @@ Apache 2.0 - See `the LICENSE`_ for more information. .. |coverage| image:: https://coveralls.io/repos/GoogleCloudPlatform/google-cloud-python/badge.svg?branch=master :target: https://coveralls.io/r/GoogleCloudPlatform/google-cloud-python?branch=master .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud.svg - :target: https://pypi.python.org/pypi/google-cloud + :target: https://pypi.org/project/google-cloud/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud.svg - :target: https://pypi.python.org/pypi/google-cloud + :target: https://pypi.org/project/google-cloud/ diff --git a/appveyor/requirements.txt b/appveyor/requirements.txt index 24cc58840e72..45a4e4c25627 100644 --- a/appveyor/requirements.txt +++ b/appveyor/requirements.txt @@ -3,4 +3,4 @@ # pip will build them from source using the MSVC compiler matching the # target Python version and architecture wheel -nox-automation==0.11.2 +nox-automation>=0.17.0 diff --git a/bigquery/MANIFEST.in b/bigquery/MANIFEST.in index 24aa72fb370b..1fbc0d0b321e 100644 --- a/bigquery/MANIFEST.in +++ b/bigquery/MANIFEST.in @@ -1,3 +1,3 @@ include README.rst LICENSE -recursive-include unit_tests * +recursive-include tests * global-exclude *.pyc __pycache__ diff --git a/bigquery/README.rst b/bigquery/README.rst index 1dcea16e0cc5..7e4f0cb72dae 100644 --- a/bigquery/README.rst +++ b/bigquery/README.rst @@ -9,7 +9,7 @@ Python Client for Google BigQuery - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/bigquery-usage.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/bigquery/usage.html Quick Start ----------- @@ -26,7 +26,7 @@ possible. Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html .. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication Using the API @@ -86,9 +86,9 @@ Perform a synchronous query See the ``google-cloud-python`` API `BigQuery documentation`_ to learn how to connect to BigQuery using this Client Library. -.. _BigQuery documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/bigquery-usage.html +.. _BigQuery documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/bigquery/usage.html .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-bigquery.svg - :target: https://pypi.python.org/pypi/google-cloud-bigquery + :target: https://pypi.org/project/google-cloud-bigquery/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-bigquery.svg - :target: https://pypi.python.org/pypi/google-cloud-bigquery + :target: https://pypi.org/project/google-cloud-bigquery/ diff --git a/bigquery/google/cloud/bigquery/_helpers.py b/bigquery/google/cloud/bigquery/_helpers.py index 7557111d100e..6641fbe01b42 100644 --- a/bigquery/google/cloud/bigquery/_helpers.py +++ b/bigquery/google/cloud/bigquery/_helpers.py @@ -318,7 +318,7 @@ def _validate(self, value): :raises: ValueError if value is not allowed. """ if value not in self.ALLOWED: - raise ValueError('Pass one of: %s' ', '.join(self.ALLOWED)) + raise ValueError('Pass one of: %s' % ', '.join(self.ALLOWED)) class UDFResource(object): diff --git a/bigquery/google/cloud/bigquery/dbapi/__init__.py b/bigquery/google/cloud/bigquery/dbapi/__init__.py new file mode 100644 index 000000000000..4e9c9a810da4 --- /dev/null +++ b/bigquery/google/cloud/bigquery/dbapi/__init__.py @@ -0,0 +1,70 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google BigQuery implementation of the Database API Specification v2.0. + +This module implements the `Python Database API Specification v2.0 (DB-API)`_ +for Google BigQuery. + +.. _Python Database API Specification v2.0 (DB-API): + https://www.python.org/dev/peps/pep-0249/ + +.. warning:: + The ``dbapi`` module is **alpha**. The implementation is not complete. It + might be changed in backward-incompatible ways and is not subject to any SLA + or deprecation policy. +""" + +from google.cloud.bigquery.dbapi.connection import connect +from google.cloud.bigquery.dbapi.connection import Connection +from google.cloud.bigquery.dbapi.cursor import Cursor +from google.cloud.bigquery.dbapi.exceptions import Warning +from google.cloud.bigquery.dbapi.exceptions import Error +from google.cloud.bigquery.dbapi.exceptions import InterfaceError +from google.cloud.bigquery.dbapi.exceptions import DatabaseError +from google.cloud.bigquery.dbapi.exceptions import DataError +from google.cloud.bigquery.dbapi.exceptions import OperationalError +from google.cloud.bigquery.dbapi.exceptions import IntegrityError +from google.cloud.bigquery.dbapi.exceptions import InternalError +from google.cloud.bigquery.dbapi.exceptions import ProgrammingError +from google.cloud.bigquery.dbapi.exceptions import NotSupportedError +from google.cloud.bigquery.dbapi.types import Binary +from google.cloud.bigquery.dbapi.types import Date +from google.cloud.bigquery.dbapi.types import DateFromTicks +from google.cloud.bigquery.dbapi.types import Time +from google.cloud.bigquery.dbapi.types import TimeFromTicks +from google.cloud.bigquery.dbapi.types import Timestamp +from google.cloud.bigquery.dbapi.types import TimestampFromTicks +from google.cloud.bigquery.dbapi.types import BINARY +from google.cloud.bigquery.dbapi.types import DATETIME +from google.cloud.bigquery.dbapi.types import NUMBER +from google.cloud.bigquery.dbapi.types import ROWID +from google.cloud.bigquery.dbapi.types import STRING + + +apilevel = '2.0' + +# Threads may share the module, but not connections. +threadsafety = 1 + +paramstyle = 'pyformat' + +__all__ = [ + 'apilevel', 'threadsafety', 'paramstyle', 'connect', 'Connection', + 'Cursor', 'Warning', 'Error', 'InterfaceError', 'DatabaseError', + 'DataError', 'OperationalError', 'IntegrityError', 'InternalError', + 'ProgrammingError', 'NotSupportedError', 'Binary', 'Date', 'DateFromTicks', + 'Time', 'TimeFromTicks', 'Timestamp', 'TimestampFromTicks', 'BINARY', + 'DATETIME', 'NUMBER', 'ROWID', 'STRING', +] diff --git a/bigquery/google/cloud/bigquery/dbapi/_helpers.py b/bigquery/google/cloud/bigquery/dbapi/_helpers.py new file mode 100644 index 000000000000..a9a358cbf0f5 --- /dev/null +++ b/bigquery/google/cloud/bigquery/dbapi/_helpers.py @@ -0,0 +1,108 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import collections +import datetime +import numbers + +import six + +from google.cloud import bigquery +from google.cloud.bigquery.dbapi import exceptions + + +def scalar_to_query_parameter(value, name=None): + """Convert a scalar value into a query parameter. + + :type value: any + :param value: A scalar value to convert into a query parameter. + + :type name: str + :param name: (Optional) Name of the query parameter. + + :rtype: :class:`~google.cloud.bigquery.ScalarQueryParameter` + :returns: + A query parameter corresponding with the type and value of the plain + Python object. + :raises: :class:`~google.cloud.bigquery.dbapi.exceptions.ProgrammingError` + if the type cannot be determined. + """ + parameter_type = None + + if isinstance(value, bool): + parameter_type = 'BOOL' + elif isinstance(value, numbers.Integral): + parameter_type = 'INT64' + elif isinstance(value, numbers.Real): + parameter_type = 'FLOAT64' + elif isinstance(value, six.text_type): + parameter_type = 'STRING' + elif isinstance(value, six.binary_type): + parameter_type = 'BYTES' + elif isinstance(value, datetime.datetime): + parameter_type = 'DATETIME' if value.tzinfo is None else 'TIMESTAMP' + elif isinstance(value, datetime.date): + parameter_type = 'DATE' + elif isinstance(value, datetime.time): + parameter_type = 'TIME' + else: + raise exceptions.ProgrammingError( + 'encountered parameter {} with value {} of unexpected type'.format( + name, value)) + return bigquery.ScalarQueryParameter(name, parameter_type, value) + + +def to_query_parameters_list(parameters): + """Converts a sequence of parameter values into query parameters. + + :type parameters: Sequence[Any] + :param parameters: Sequence of query parameter values. + + :rtype: List[google.cloud.bigquery._helpers.AbstractQueryParameter] + :returns: A list of query parameters. + """ + return [scalar_to_query_parameter(value) for value in parameters] + + +def to_query_parameters_dict(parameters): + """Converts a dictionary of parameter values into query parameters. + + :type parameters: Mapping[str, Any] + :param parameters: Dictionary of query parameter values. + + :rtype: List[google.cloud.bigquery._helpers.AbstractQueryParameter] + :returns: A list of named query parameters. + """ + return [ + scalar_to_query_parameter(value, name=name) + for name, value + in six.iteritems(parameters)] + + +def to_query_parameters(parameters): + """Converts DB-API parameter values into query parameters. + + :type parameters: Mapping[str, Any] or Sequence[Any] + :param parameters: A dictionary or sequence of query parameter values. + + :rtype: List[google.cloud.bigquery._helpers.AbstractQueryParameter] + :returns: A list of query parameters. + """ + if parameters is None: + return [] + + if isinstance(parameters, collections.Mapping): + return to_query_parameters_dict(parameters) + + return to_query_parameters_list(parameters) diff --git a/bigquery/google/cloud/bigquery/dbapi/connection.py b/bigquery/google/cloud/bigquery/dbapi/connection.py new file mode 100644 index 000000000000..66aa0929b97e --- /dev/null +++ b/bigquery/google/cloud/bigquery/dbapi/connection.py @@ -0,0 +1,58 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Connection for the Google BigQuery DB-API.""" + +from google.cloud import bigquery +from google.cloud.bigquery.dbapi import cursor + + +class Connection(object): + """DB-API Connection to Google BigQuery. + + :type client: :class:`~google.cloud.bigquery.Client` + :param client: A client used to connect to BigQuery. + """ + def __init__(self, client): + self._client = client + + def close(self): + """No-op.""" + + def commit(self): + """No-op.""" + + def cursor(self): + """Return a new cursor object. + + :rtype: :class:`~google.cloud.bigquery.dbapi.Cursor` + :returns: A DB-API cursor that uses this connection. + """ + return cursor.Cursor(self) + + +def connect(client=None): + """Construct a DB-API connection to Google BigQuery. + + :type client: :class:`~google.cloud.bigquery.Client` + :param client: + (Optional) A client used to connect to BigQuery. If not passed, a + client is created using default options inferred from the environment. + + :rtype: :class:`~google.cloud.bigquery.dbapi.Connection` + :returns: A new DB-API connection to BigQuery. + """ + if client is None: + client = bigquery.Client() + return Connection(client) diff --git a/bigquery/google/cloud/bigquery/dbapi/cursor.py b/bigquery/google/cloud/bigquery/dbapi/cursor.py new file mode 100644 index 000000000000..7519c762ae1e --- /dev/null +++ b/bigquery/google/cloud/bigquery/dbapi/cursor.py @@ -0,0 +1,329 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Cursor for the Google BigQuery DB-API.""" + +import collections +import uuid + +import six + +from google.cloud.bigquery.dbapi import _helpers +from google.cloud.bigquery.dbapi import exceptions +import google.cloud.exceptions + +# Per PEP 249: A 7-item sequence containing information describing one result +# column. The first two items (name and type_code) are mandatory, the other +# five are optional and are set to None if no meaningful values can be +# provided. +Column = collections.namedtuple( + 'Column', + [ + 'name', 'type_code', 'display_size', 'internal_size', 'precision', + 'scale', 'null_ok', + ]) + + +class Cursor(object): + """DB-API Cursor to Google BigQuery. + + :type connection: :class:`~google.cloud.bigquery.dbapi.Connection` + :param connection: A DB-API connection to Google BigQuery. + """ + def __init__(self, connection): + self.connection = connection + self.description = None + # Per PEP 249: The attribute is -1 in case no .execute*() has been + # performed on the cursor or the rowcount of the last operation + # cannot be determined by the interface. + self.rowcount = -1 + # Per PEP 249: The arraysize attribute defaults to 1, meaning to fetch + # a single row at a time. + self.arraysize = 1 + self._query_data = None + self._page_token = None + self._has_fetched_all_rows = True + + def close(self): + """No-op.""" + + def _set_description(self, schema): + """Set description from schema. + + :type schema: Sequence[google.cloud.bigquery.schema.SchemaField] + :param schema: A description of fields in the schema. + """ + if schema is None: + self.description = None + return + + self.description = tuple([ + Column( + name=field.name, + type_code=field.field_type, + display_size=None, + internal_size=None, + precision=None, + scale=None, + null_ok=field.is_nullable) + for field in schema]) + + def _set_rowcount(self, query_results): + """Set the rowcount from query results. + + Normally, this sets rowcount to the number of rows returned by the + query, but if it was a DML statement, it sets rowcount to the number + of modified rows. + + :type query_results: + :class:`~google.cloud.bigquery.query.QueryResults` + :param query_results: results of a query + """ + total_rows = 0 + num_dml_affected_rows = query_results.num_dml_affected_rows + + if (query_results.total_rows is not None + and query_results.total_rows > 0): + total_rows = query_results.total_rows + if num_dml_affected_rows is not None and num_dml_affected_rows > 0: + total_rows = num_dml_affected_rows + self.rowcount = total_rows + + def execute(self, operation, parameters=None): + """Prepare and execute a database operation. + + .. note:: + When setting query parameters, values which are "text" + (``unicode`` in Python2, ``str`` in Python3) will use + the 'STRING' BigQuery type. Values which are "bytes" (``str`` in + Python2, ``bytes`` in Python3), will use using the 'BYTES' type. + + A `~datetime.datetime` parameter without timezone information uses + the 'DATETIME' BigQuery type (example: Global Pi Day Celebration + March 14, 2017 at 1:59pm). A `~datetime.datetime` parameter with + timezone information uses the 'TIMESTAMP' BigQuery type (example: + a wedding on April 29, 2011 at 11am, British Summer Time). + + For more information about BigQuery data types, see: + https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types + + ``STRUCT``/``RECORD`` and ``REPEATED`` query parameters are not + yet supported. See: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues/3524 + + :type operation: str + :param operation: A Google BigQuery query string. + + :type parameters: Mapping[str, Any] or Sequence[Any] + :param parameters: + (Optional) dictionary or sequence of parameter values. + """ + self._query_results = None + self._page_token = None + self._has_fetched_all_rows = False + client = self.connection._client + job_id = str(uuid.uuid4()) + + # The DB-API uses the pyformat formatting, since the way BigQuery does + # query parameters was not one of the standard options. Convert both + # the query and the parameters to the format expected by the client + # libraries. + formatted_operation = _format_operation( + operation, parameters=parameters) + query_parameters = _helpers.to_query_parameters(parameters) + + query_job = client.run_async_query( + job_id, + formatted_operation, + query_parameters=query_parameters) + query_job.use_legacy_sql = False + + try: + query_results = query_job.result() + except google.cloud.exceptions.GoogleCloudError: + raise exceptions.DatabaseError(query_job.errors) + + # Force the iterator to run because the query_results doesn't + # have the total_rows populated. See: + # https://github.com/GoogleCloudPlatform/google-cloud-python/issues/3506 + query_iterator = query_results.fetch_data() + try: + six.next(iter(query_iterator)) + except StopIteration: + pass + + self._query_data = iter( + query_results.fetch_data(max_results=self.arraysize)) + self._set_rowcount(query_results) + self._set_description(query_results.schema) + + def executemany(self, operation, seq_of_parameters): + """Prepare and execute a database operation multiple times. + + :type operation: str + :param operation: A Google BigQuery query string. + + :type seq_of_parameters: Sequence[Mapping[str, Any] or Sequence[Any]] + :param parameters: Sequence of many sets of parameter values. + """ + for parameters in seq_of_parameters: + self.execute(operation, parameters) + + def fetchone(self): + """Fetch a single row from the results of the last ``execute*()`` call. + + :rtype: tuple + :returns: + A tuple representing a row or ``None`` if no more data is + available. + :raises: :class:`~google.cloud.bigquery.dbapi.InterfaceError` + if called before ``execute()``. + """ + if self._query_data is None: + raise exceptions.InterfaceError( + 'No query results: execute() must be called before fetch.') + + try: + return six.next(self._query_data) + except StopIteration: + return None + + def fetchmany(self, size=None): + """Fetch multiple results from the last ``execute*()`` call. + + .. note:: + The size parameter is not used for the request/response size. + Set the ``arraysize`` attribute before calling ``execute()`` to + set the batch size. + + :type size: int + :param size: + (Optional) Maximum number of rows to return. Defaults to the + ``arraysize`` property value. + + :rtype: List[tuple] + :returns: A list of rows. + :raises: :class:`~google.cloud.bigquery.dbapi.InterfaceError` + if called before ``execute()``. + """ + if self._query_data is None: + raise exceptions.InterfaceError( + 'No query results: execute() must be called before fetch.') + if size is None: + size = self.arraysize + + rows = [] + for row in self._query_data: + rows.append(row) + if len(rows) >= size: + break + return rows + + def fetchall(self): + """Fetch all remaining results from the last ``execute*()`` call. + + :rtype: List[tuple] + :returns: A list of all the rows in the results. + :raises: :class:`~google.cloud.bigquery.dbapi.InterfaceError` + if called before ``execute()``. + """ + if self._query_data is None: + raise exceptions.InterfaceError( + 'No query results: execute() must be called before fetch.') + return [row for row in self._query_data] + + def setinputsizes(self, sizes): + """No-op.""" + + def setoutputsize(self, size, column=None): + """No-op.""" + + +def _format_operation_list(operation, parameters): + """Formats parameters in operation in the way BigQuery expects. + + The input operation will be a query like ``SELECT %s`` and the output + will be a query like ``SELECT ?``. + + :type operation: str + :param operation: A Google BigQuery query string. + + :type parameters: Sequence[Any] + :param parameters: Sequence of parameter values. + + :rtype: str + :returns: A formatted query string. + :raises: :class:`~google.cloud.bigquery.dbapi.ProgrammingError` + if a parameter used in the operation is not found in the + ``parameters`` argument. + """ + formatted_params = ['?' for _ in parameters] + + try: + return operation % tuple(formatted_params) + except TypeError as exc: + raise exceptions.ProgrammingError(exc) + + +def _format_operation_dict(operation, parameters): + """Formats parameters in operation in the way BigQuery expects. + + The input operation will be a query like ``SELECT %(namedparam)s`` and + the output will be a query like ``SELECT @namedparam``. + + :type operation: str + :param operation: A Google BigQuery query string. + + :type parameters: Mapping[str, Any] + :param parameters: Dictionary of parameter values. + + :rtype: str + :returns: A formatted query string. + :raises: :class:`~google.cloud.bigquery.dbapi.ProgrammingError` + if a parameter used in the operation is not found in the + ``parameters`` argument. + """ + formatted_params = {} + for name in parameters: + escaped_name = name.replace('`', r'\`') + formatted_params[name] = '@`{}`'.format(escaped_name) + + try: + return operation % formatted_params + except KeyError as exc: + raise exceptions.ProgrammingError(exc) + + +def _format_operation(operation, parameters=None): + """Formats parameters in operation in way BigQuery expects. + + :type: str + :param operation: A Google BigQuery query string. + + :type: Mapping[str, Any] or Sequence[Any] + :param parameters: Optional parameter values. + + :rtype: str + :returns: A formatted query string. + :raises: :class:`~google.cloud.bigquery.dbapi.ProgrammingError` + if a parameter used in the operation is not found in the + ``parameters`` argument. + """ + if parameters is None: + return operation + + if isinstance(parameters, collections.Mapping): + return _format_operation_dict(operation, parameters) + + return _format_operation_list(operation, parameters) diff --git a/bigquery/google/cloud/bigquery/dbapi/exceptions.py b/bigquery/google/cloud/bigquery/dbapi/exceptions.py new file mode 100644 index 000000000000..77494e5ff1e1 --- /dev/null +++ b/bigquery/google/cloud/bigquery/dbapi/exceptions.py @@ -0,0 +1,58 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Exceptions used in the Google BigQuery DB-API.""" + + +class Warning(Exception): + """Exception raised for important DB-API warnings.""" + + +class Error(Exception): + """Exception representing all non-warning DB-API errors.""" + + +class InterfaceError(Error): + """DB-API error related to the database interface.""" + + +class DatabaseError(Error): + """DB-API error related to the database.""" + + +class DataError(DatabaseError): + """DB-API error due to problems with the processed data.""" + + +class OperationalError(DatabaseError): + """DB-API error related to the database operation. + + These errors are not necessarily under the control of the programmer. + """ + + +class IntegrityError(DatabaseError): + """DB-API error when integrity of the database is affected.""" + + +class InternalError(DatabaseError): + """DB-API error when the database encounters an internal error.""" + + +class ProgrammingError(DatabaseError): + """DB-API exception raised for programming errors.""" + + +class NotSupportedError(DatabaseError): + """DB-API error for operations not supported by the database or API.""" diff --git a/bigquery/google/cloud/bigquery/dbapi/types.py b/bigquery/google/cloud/bigquery/dbapi/types.py new file mode 100644 index 000000000000..2d06f260e360 --- /dev/null +++ b/bigquery/google/cloud/bigquery/dbapi/types.py @@ -0,0 +1,84 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Types used in the Google BigQuery DB-API. + +See `PEP-249`_ for details. + +.. _PEP-249: + https://www.python.org/dev/peps/pep-0249/#type-objects-and-constructors +""" + +import datetime + + +Date = datetime.date +Time = datetime.time +Timestamp = datetime.datetime +DateFromTicks = datetime.date.fromtimestamp +TimestampFromTicks = datetime.datetime.fromtimestamp + + +def Binary(string): + """Contruct a DB-API binary value. + + :type string: str + :param string: A string to encode as a binary value. + + :rtype: bytes + :returns: The UTF-8 encoded bytes representing the string. + """ + return string.encode('utf-8') + + +def TimeFromTicks(ticks, tz=None): + """Construct a DB-API time value from the given ticks value. + + :type ticks: float + :param ticks: + a number of seconds since the epoch; see the documentation of the + standard Python time module for details. + + :type tz: :class:`datetime.tzinfo` + :param tz: (Optional) time zone to use for conversion + + :rtype: :class:`datetime.time` + :returns: time represented by ticks. + """ + dt = datetime.datetime.fromtimestamp(ticks, tz=tz) + return dt.timetz() + + +class _DBAPITypeObject(object): + """DB-API type object which compares equal to many different strings. + + See `PEP-249`_ for details. + + .. _PEP-249: + https://www.python.org/dev/peps/pep-0249/#implementation-hints-for-module-authors + """ + + def __init__(self, *values): + self.values = values + + def __eq__(self, other): + return other in self.values + + +STRING = 'STRING' +BINARY = _DBAPITypeObject('BYTES', 'RECORD', 'STRUCT') +NUMBER = _DBAPITypeObject( + 'INTEGER', 'INT64', 'FLOAT', 'FLOAT64', 'BOOLEAN', 'BOOL') +DATETIME = _DBAPITypeObject('TIMESTAMP', 'DATE', 'TIME', 'DATETIME') +ROWID = 'ROWID' diff --git a/bigquery/google/cloud/bigquery/job.py b/bigquery/google/cloud/bigquery/job.py index 4f791bdbea0c..ef5353f9ff14 100644 --- a/bigquery/google/cloud/bigquery/job.py +++ b/bigquery/google/cloud/bigquery/job.py @@ -14,8 +14,13 @@ """Define API Jobs.""" +import collections +import threading + import six +from six.moves import http_client +from google.cloud import exceptions from google.cloud.exceptions import NotFound from google.cloud._helpers import _datetime_from_microseconds from google.cloud.bigquery.dataset import Dataset @@ -27,6 +32,60 @@ from google.cloud.bigquery._helpers import UDFResourcesProperty from google.cloud.bigquery._helpers import _EnumProperty from google.cloud.bigquery._helpers import _TypedProperty +import google.cloud.future.polling + +_DONE_STATE = 'DONE' +_STOPPED_REASON = 'stopped' + +_ERROR_REASON_TO_EXCEPTION = { + 'accessDenied': http_client.FORBIDDEN, + 'backendError': http_client.INTERNAL_SERVER_ERROR, + 'billingNotEnabled': http_client.FORBIDDEN, + 'billingTierLimitExceeded': http_client.BAD_REQUEST, + 'blocked': http_client.FORBIDDEN, + 'duplicate': http_client.CONFLICT, + 'internalError': http_client.INTERNAL_SERVER_ERROR, + 'invalid': http_client.BAD_REQUEST, + 'invalidQuery': http_client.BAD_REQUEST, + 'notFound': http_client.NOT_FOUND, + 'notImplemented': http_client.NOT_IMPLEMENTED, + 'quotaExceeded': http_client.FORBIDDEN, + 'rateLimitExceeded': http_client.FORBIDDEN, + 'resourceInUse': http_client.BAD_REQUEST, + 'resourcesExceeded': http_client.BAD_REQUEST, + 'responseTooLarge': http_client.FORBIDDEN, + 'stopped': http_client.OK, + 'tableUnavailable': http_client.BAD_REQUEST, +} + +_FakeResponse = collections.namedtuple('_FakeResponse', ['status']) + + +def _error_result_to_exception(error_result): + """Maps BigQuery error reasons to an exception. + + The reasons and their matching HTTP status codes are documented on + the `troubleshooting errors`_ page. + + .. _troubleshooting errors: https://cloud.google.com/bigquery\ + /troubleshooting-errors + + :type error_result: Mapping[str, str] + :param error_result: The error result from BigQuery. + + :rtype google.cloud.exceptions.GoogleCloudError: + :returns: The mapped exception. + """ + reason = error_result.get('reason') + status_code = _ERROR_REASON_TO_EXCEPTION.get( + reason, http_client.INTERNAL_SERVER_ERROR) + # make_exception expects an httplib2 response object. + fake_response = _FakeResponse(status=status_code) + return exceptions.make_exception( + fake_response, + error_result.get('message', ''), + error_info=error_result, + use_json=False) class Compression(_EnumProperty): @@ -82,16 +141,23 @@ class WriteDisposition(_EnumProperty): ALLOWED = (WRITE_APPEND, WRITE_TRUNCATE, WRITE_EMPTY) -class _BaseJob(object): - """Base class for jobs. +class _AsyncJob(google.cloud.future.polling.PollingFuture): + """Base class for asynchronous jobs. + + :type name: str + :param name: the name of the job :type client: :class:`google.cloud.bigquery.client.Client` :param client: A client which holds credentials and project configuration for the dataset (which requires a project). """ - def __init__(self, client): + def __init__(self, name, client): + super(_AsyncJob, self).__init__() + self.name = name self._client = client self._properties = {} + self._result_set = False + self._completion_lock = threading.Lock() @property def project(self): @@ -117,21 +183,6 @@ def _require_client(self, client): client = self._client return client - -class _AsyncJob(_BaseJob): - """Base class for asynchronous jobs. - - :type name: str - :param name: the name of the job - - :type client: :class:`google.cloud.bigquery.client.Client` - :param client: A client which holds credentials and project configuration - for the dataset (which requires a project). - """ - def __init__(self, name, client): - super(_AsyncJob, self).__init__(client) - self.name = name - @property def job_type(self): """Type of job @@ -273,6 +324,9 @@ def _set_properties(self, api_response): self._properties.clear() self._properties.update(cleaned) + # For Future interface + self._set_future_result() + @classmethod def _get_resource_config(cls, resource): """Helper for :meth:`from_api_repr` @@ -345,7 +399,7 @@ def exists(self, client=None): return True def reload(self, client=None): - """API call: refresh job properties via a GET request + """API call: refresh job properties via a GET request. See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/get @@ -371,12 +425,85 @@ def cancel(self, client=None): ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current dataset. + + :rtype: bool + :returns: Boolean indicating that the cancel request was sent. """ client = self._require_client(client) api_response = client._connection.api_request( method='POST', path='%s/cancel' % (self.path,)) self._set_properties(api_response['job']) + # The Future interface requires that we return True if the *attempt* + # to cancel was successful. + return True + + # The following methods implement the PollingFuture interface. Note that + # the methods above are from the pre-Future interface and are left for + # compatibility. The only "overloaded" method is :meth:`cancel`, which + # satisfies both interfaces. + + def _set_future_result(self): + """Set the result or exception from the job if it is complete.""" + # This must be done in a lock to prevent the polling thread + # and main thread from both executing the completion logic + # at the same time. + with self._completion_lock: + # If the operation isn't complete or if the result has already been + # set, do not call set_result/set_exception again. + # Note: self._result_set is set to True in set_result and + # set_exception, in case those methods are invoked directly. + if self.state != _DONE_STATE or self._result_set: + return + + if self.error_result is not None: + exception = _error_result_to_exception(self.error_result) + self.set_exception(exception) + else: + self.set_result(self) + + def done(self): + """Refresh the job and checks if it is complete. + + :rtype: bool + :returns: True if the job is complete, False otherwise. + """ + # Do not refresh is the state is already done, as the job will not + # change once complete. + if self.state != _DONE_STATE: + self.reload() + return self.state == _DONE_STATE + + def result(self, timeout=None): + """Start the job and wait for it to complete and get the result. + + :type timeout: int + :param timeout: How long to wait for job to complete before raising + a :class:`TimeoutError`. + + :rtype: _AsyncJob + :returns: This instance. + + :raises: :class:`~google.cloud.exceptions.GoogleCloudError` if the job + failed or :class:`TimeoutError` if the job did not complete in the + given timeout. + """ + if self.state is None: + self.begin() + return super(_AsyncJob, self).result(timeout=timeout) + + def cancelled(self): + """Check if the job has been cancelled. + + This always returns False. It's not possible to check if a job was + cancelled in the API. This method is here to satisfy the interface + for :class:`google.cloud.future.Future`. + + :rtype: bool + :returns: False + """ + return (self.error_result is not None + and self.error_result.get('reason') == _STOPPED_REASON) class _LoadConfiguration(object): @@ -1127,7 +1254,7 @@ def from_api_repr(cls, resource, client): job._set_properties(resource) return job - def results(self): + def query_results(self): """Construct a QueryResults instance, bound to this job. :rtype: :class:`~google.cloud.bigquery.query.QueryResults` @@ -1135,3 +1262,21 @@ def results(self): """ from google.cloud.bigquery.query import QueryResults return QueryResults.from_query_job(self) + + def result(self, timeout=None): + """Start the job and wait for it to complete and get the result. + + :type timeout: int + :param timeout: How long to wait for job to complete before raising + a :class:`TimeoutError`. + + :rtype: :class:`~google.cloud.bigquery.query.QueryResults` + :returns: The query results. + + :raises: :class:`~google.cloud.exceptions.GoogleCloudError` if the job + failed or :class:`TimeoutError` if the job did not complete in the + given timeout. + """ + super(QueryJob, self).result(timeout=timeout) + # Return a QueryResults instance instead of returning the job. + return self.query_results() diff --git a/bigquery/google/cloud/bigquery/query.py b/bigquery/google/cloud/bigquery/query.py index 6db2742bbe01..d596deadfb40 100644 --- a/bigquery/google/cloud/bigquery/query.py +++ b/bigquery/google/cloud/bigquery/query.py @@ -414,13 +414,12 @@ def fetch_data(self, max_results=None, page_token=None, start_index=None, :param client: the client to use. If not passed, falls back to the ``client`` stored on the current dataset. - :rtype: tuple - :returns: ``(row_data, total_rows, page_token)``, where ``row_data`` - is a list of tuples, one per result row, containing only - the values; ``total_rows`` is a count of the total number - of rows in the table; and ``page_token`` is an opaque - string which can be used to fetch the next batch of rows - (``None`` if no further batches can be fetched). + :rtype: :class:`~google.cloud.iterator.Iterator` + :returns: Iterator of row data :class:`tuple`s. During each page, the + iterator will have the ``total_rows`` attribute set, + which counts the total number of rows **in the result + set** (this is distinct from the total number of rows in + the current page: ``iterator.page.num_items``). :raises: ValueError if the query has not yet been executed. """ if self.name is None: diff --git a/bigquery/google/cloud/bigquery/schema.py b/bigquery/google/cloud/bigquery/schema.py index 6d4a437a809f..edd8dd68f3bd 100644 --- a/bigquery/google/cloud/bigquery/schema.py +++ b/bigquery/google/cloud/bigquery/schema.py @@ -26,27 +26,94 @@ class SchemaField(object): 'FLOAT', 'BOOLEAN', 'TIMESTAMP' or 'RECORD'). :type mode: str - :param mode: the type of the field (one of 'NULLABLE', 'REQUIRED', + :param mode: the mode of the field (one of 'NULLABLE', 'REQUIRED', or 'REPEATED'). :type description: str :param description: optional description for the field. - :type fields: list of :class:`SchemaField`, or None + :type fields: tuple of :class:`SchemaField` :param fields: subfields (requires ``field_type`` of 'RECORD'). """ - def __init__(self, name, field_type, mode='NULLABLE', description=None, - fields=None): - self.name = name - self.field_type = field_type - self.mode = mode - self.description = description - self.fields = fields + def __init__(self, name, field_type, mode='NULLABLE', + description=None, fields=()): + self._name = name + self._field_type = field_type + self._mode = mode + self._description = description + self._fields = tuple(fields) - def __eq__(self, other): + @property + def name(self): + """str: The name of the field.""" + return self._name + + @property + def field_type(self): + """str: The type of the field. + + Will be one of 'STRING', 'INTEGER', 'FLOAT', 'BOOLEAN', + 'TIMESTAMP' or 'RECORD'. + """ + return self._field_type + + @property + def mode(self): + """str: The mode of the field. + + Will be one of 'NULLABLE', 'REQUIRED', or 'REPEATED'. + """ + return self._mode + + @property + def is_nullable(self): + """Check whether 'mode' is 'nullable'.""" + return self._mode == 'NULLABLE' + + @property + def description(self): + """Optional[str]: Description for the field.""" + return self._description + + @property + def fields(self): + """tuple: Subfields contained in this field. + + If ``field_type`` is not 'RECORD', this property must be + empty / unset. + """ + return self._fields + + def _key(self): + """A tuple key that unique-ly describes this field. + + Used to compute this instance's hashcode and evaluate equality. + + Returns: + tuple: The contents of this :class:`SchemaField`. + """ return ( - self.name == other.name and - self.field_type.lower() == other.field_type.lower() and - self.mode == other.mode and - self.description == other.description and - self.fields == other.fields) + self._name, + self._field_type.lower(), + self._mode, + self._description, + self._fields, + ) + + def __eq__(self, other): + if isinstance(other, SchemaField): + return self._key() == other._key() + else: + return NotImplemented + + def __ne__(self, other): + if isinstance(other, SchemaField): + return self._key() != other._key() + else: + return NotImplemented + + def __hash__(self): + return hash(self._key()) + + def __repr__(self): + return 'SchemaField{}'.format(self._key()) diff --git a/bigquery/google/cloud/bigquery/table.py b/bigquery/google/cloud/bigquery/table.py index 662cc670d541..f7752bb8fc36 100644 --- a/bigquery/google/cloud/bigquery/table.py +++ b/bigquery/google/cloud/bigquery/table.py @@ -15,22 +15,21 @@ """Define API Datasets.""" import datetime -import json import os import httplib2 import six +import google.auth.transport.requests +from google import resumable_media +from google.resumable_media.requests import MultipartUpload +from google.resumable_media.requests import ResumableUpload + from google.cloud._helpers import _datetime_from_microseconds from google.cloud._helpers import _millis_from_datetime from google.cloud.exceptions import NotFound from google.cloud.exceptions import make_exception from google.cloud.iterator import HTTPIterator -from google.cloud.streaming.exceptions import HttpError -from google.cloud.streaming.http_wrapper import Request -from google.cloud.streaming.http_wrapper import make_api_request -from google.cloud.streaming.transfer import RESUMABLE_UPLOAD -from google.cloud.streaming.transfer import Upload from google.cloud.bigquery.schema import SchemaField from google.cloud.bigquery._helpers import _item_to_row from google.cloud.bigquery._helpers import _rows_page_start @@ -39,6 +38,17 @@ _TABLE_HAS_NO_SCHEMA = "Table has no schema: call 'table.reload()'" _MARKER = object() +_DEFAULT_CHUNKSIZE = 1048576 # 1024 * 1024 B = 1 MB +_BASE_UPLOAD_TEMPLATE = ( + u'https://www.googleapis.com/upload/bigquery/v2/projects/' + u'{project}/jobs?uploadType=') +_MULTIPART_URL_TEMPLATE = _BASE_UPLOAD_TEMPLATE + u'multipart' +_RESUMABLE_URL_TEMPLATE = _BASE_UPLOAD_TEMPLATE + u'resumable' +_GENERIC_CONTENT_TYPE = u'*/*' +_READ_LESS_THAN_SIZE = ( + 'Size {:d} was specified but the file-like object only had ' + '{:d} bytes remaining.') +_DEFAULT_NUM_RETRIES = 6 class Table(object): @@ -193,7 +203,7 @@ def table_id(self): def table_type(self): """The type of the table. - Possible values are "TABLE" or "VIEW". + Possible values are "TABLE", "VIEW", or "EXTERNAL". :rtype: str, or ``NoneType`` :returns: the URL (None until set from the server). @@ -364,13 +374,49 @@ def view_query(self, value): """ if not isinstance(value, six.string_types): raise ValueError("Pass a string") - self._properties['view'] = {'query': value} + if self._properties.get('view') is None: + self._properties['view'] = {} + self._properties['view']['query'] = value @view_query.deleter def view_query(self): """Delete SQL query defining the table as a view.""" self._properties.pop('view', None) + @property + def view_use_legacy_sql(self): + """Specifies whether to execute the view with legacy or standard SQL. + + If not set, None is returned. BigQuery's default mode is equivalent to + useLegacySql = True. + + :rtype: bool, or ``NoneType`` + :returns: The boolean for view.useLegacySql as set by the user, or + None (the default). + """ + view = self._properties.get('view') + if view is not None: + return view.get('useLegacySql') + + @view_use_legacy_sql.setter + def view_use_legacy_sql(self, value): + """Update the view sub-property 'useLegacySql'. + + This boolean specifies whether to execute the view with legacy SQL + (True) or standard SQL (False). The default, if not specified, is + 'True'. + + :type value: bool + :param value: The boolean for view.useLegacySql + + :raises: ValueError for invalid value types. + """ + if not isinstance(value, bool): + raise ValueError("Pass a boolean") + if self._properties.get('view') is None: + self._properties['view'] = {} + self._properties['view']['useLegacySql'] = value + def list_partitions(self, client=None): """List the partitions in a table. @@ -470,6 +516,8 @@ def _build_resource(self): if self.view_query is not None: view = resource['view'] = {} view['query'] = self.view_query + if self.view_use_legacy_sql is not None: + view['useLegacySql'] = self.view_use_legacy_sql if self._schema: resource['schema'] = { @@ -479,7 +527,7 @@ def _build_resource(self): return resource def create(self, client=None): - """API call: create the dataset via a PUT request + """API call: create the table via a PUT request See https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/insert @@ -777,15 +825,177 @@ def insert_data(self, return errors - @staticmethod - def _check_response_error(request, http_response): - """Helper for :meth:`upload_from_file`.""" - info = http_response.info - status = int(info['status']) - if not 200 <= status < 300: - faux_response = httplib2.Response({'status': status}) - raise make_exception(faux_response, http_response.content, - error_info=request.url) + def _make_transport(self, client): + """Make an authenticated transport with a client's credentials. + + :type client: :class:`~google.cloud.bigquery.client.Client` + :param client: The client to use. + + :rtype transport: + :class:`~google.auth.transport.requests.AuthorizedSession` + :returns: The transport (with credentials) that will + make authenticated requests. + """ + # Create a ``requests`` transport with the client's credentials. + transport = google.auth.transport.requests.AuthorizedSession( + client._credentials) + return transport + + def _initiate_resumable_upload(self, client, stream, + metadata, num_retries): + """Initiate a resumable upload. + + :type client: :class:`~google.cloud.bigquery.client.Client` + :param client: The client to use. + + :type stream: IO[bytes] + :param stream: A bytes IO object open for reading. + + :type metadata: dict + :param metadata: The metadata associated with the upload. + + :type num_retries: int + :param num_retries: Number of upload retries. (Deprecated: This + argument will be removed in a future release.) + + :rtype: tuple + :returns: + Pair of + + * The :class:`~google.resumable_media.requests.ResumableUpload` + that was created + * The ``transport`` used to initiate the upload. + """ + chunk_size = _DEFAULT_CHUNKSIZE + transport = self._make_transport(client) + headers = _get_upload_headers(client._connection.USER_AGENT) + upload_url = _RESUMABLE_URL_TEMPLATE.format(project=self.project) + upload = ResumableUpload(upload_url, chunk_size, headers=headers) + + if num_retries is not None: + upload._retry_strategy = resumable_media.RetryStrategy( + max_retries=num_retries) + + upload.initiate( + transport, stream, metadata, _GENERIC_CONTENT_TYPE, + stream_final=False) + + return upload, transport + + def _do_resumable_upload(self, client, stream, metadata, num_retries): + """Perform a resumable upload. + + :type client: :class:`~google.cloud.bigquery.client.Client` + :param client: The client to use. + + :type stream: IO[bytes] + :param stream: A bytes IO object open for reading. + + :type metadata: dict + :param metadata: The metadata associated with the upload. + + :type num_retries: int + :param num_retries: Number of upload retries. (Deprecated: This + argument will be removed in a future release.) + + :rtype: :class:`~requests.Response` + :returns: The "200 OK" response object returned after the final chunk + is uploaded. + """ + upload, transport = self._initiate_resumable_upload( + client, stream, metadata, num_retries) + + while not upload.finished: + response = upload.transmit_next_chunk(transport) + + return response + + def _do_multipart_upload(self, client, stream, metadata, + size, num_retries): + """Perform a multipart upload. + + :type client: :class:`~google.cloud.bigquery.client.Client` + :param client: The client to use. + + :type stream: IO[bytes] + :param stream: A bytes IO object open for reading. + + :type metadata: dict + :param metadata: The metadata associated with the upload. + + :type size: int + :param size: The number of bytes to be uploaded (which will be read + from ``stream``). If not provided, the upload will be + concluded once ``stream`` is exhausted (or :data:`None`). + + :type num_retries: int + :param num_retries: Number of upload retries. (Deprecated: This + argument will be removed in a future release.) + + :rtype: :class:`~requests.Response` + :returns: The "200 OK" response object returned after the multipart + upload request. + :raises: :exc:`ValueError` if the ``stream`` has fewer than ``size`` + bytes remaining. + """ + data = stream.read(size) + if len(data) < size: + msg = _READ_LESS_THAN_SIZE.format(size, len(data)) + raise ValueError(msg) + + transport = self._make_transport(client) + headers = _get_upload_headers(client._connection.USER_AGENT) + + upload_url = _MULTIPART_URL_TEMPLATE.format(project=self.project) + upload = MultipartUpload(upload_url, headers=headers) + + if num_retries is not None: + upload._retry_strategy = resumable_media.RetryStrategy( + max_retries=num_retries) + + response = upload.transmit( + transport, data, metadata, _GENERIC_CONTENT_TYPE) + + return response + + def _do_upload(self, client, stream, metadata, size, num_retries): + """Determine an upload strategy and then perform the upload. + + If ``size`` is :data:`None`, then a resumable upload will be used, + otherwise the content and the metadata will be uploaded + in a single multipart upload request. + + :type client: :class:`~google.cloud.bigquery.client.Client` + :param client: The client to use. + + :type stream: IO[bytes] + :param stream: A bytes IO object open for reading. + + :type metadata: dict + :param metadata: The metadata associated with the upload. + + :type size: int + :param size: The number of bytes to be uploaded (which will be read + from ``stream``). If not provided, the upload will be + concluded once ``stream`` is exhausted (or :data:`None`). + + :type num_retries: int + :param num_retries: Number of upload retries. (Deprecated: This + argument will be removed in a future release.) + + :rtype: dict + :returns: The parsed JSON from the "200 OK" response. This will be the + **only** response in the multipart case and it will be the + **final** response in the resumable case. + """ + if size is None: + response = self._do_resumable_upload( + client, stream, metadata, num_retries) + else: + response = self._do_multipart_upload( + client, stream, metadata, size, num_retries) + + return response.json() # pylint: disable=too-many-arguments,too-many-locals def upload_from_file(self, @@ -793,7 +1003,7 @@ def upload_from_file(self, source_format, rewind=False, size=None, - num_retries=6, + num_retries=_DEFAULT_NUM_RETRIES, allow_jagged_rows=None, allow_quoted_newlines=None, create_disposition=None, @@ -804,13 +1014,10 @@ def upload_from_file(self, quote_character=None, skip_leading_rows=None, write_disposition=None, - client=None): + client=None, + job_name=None): """Upload the contents of this table from a file-like object. - The content type of the upload will either be - - The value passed in to the function (if any) - - ``text/csv``. - :type file_obj: file :param file_obj: A file handle opened in binary mode for reading. @@ -821,7 +1028,7 @@ def upload_from_file(self, :type rewind: bool :param rewind: If True, seek to the beginning of the file handle before - writing the file to Cloud Storage. + writing the file. :type size: int :param size: The number of bytes to read from the file handle. @@ -872,12 +1079,16 @@ def upload_from_file(self, :param write_disposition: job configuration option; see :meth:`google.cloud.bigquery.job.LoadJob`. - :type client: :class:`~google.cloud.storage.client.Client` or - ``NoneType`` - :param client: Optional. The client to use. If not passed, falls back - to the ``client`` stored on the current dataset. + :type client: :class:`~google.cloud.bigquery.client.Client` + :param client: (Optional) The client to use. If not passed, falls back + to the ``client`` stored on the current table. + + :type job_name: str + :param job_name: Optional. The id of the job. Generated if not + explicitly passed in. + + :rtype: :class:`~google.cloud.bigquery.jobs.LoadTableFromStorageJob` - :rtype: :class:`google.cloud.bigquery.jobs.LoadTableFromStorageJob` :returns: the job instance used to load the data (e.g., for querying status). Note that the job is already started: do not call ``job.begin()``. @@ -886,102 +1097,23 @@ def upload_from_file(self, a file opened in text mode. """ client = self._require_client(client) - connection = client._connection - content_type = 'application/octet-stream' - - # Rewind the file if desired. - if rewind: - file_obj.seek(0, os.SEEK_SET) - - mode = getattr(file_obj, 'mode', None) - - if mode is not None and mode not in ('rb', 'r+b', 'rb+'): - raise ValueError( - "Cannot upload files opened in text mode: use " - "open(filename, mode='rb') or open(filename, mode='r+b')") - - # Get the basic stats about the file. - total_bytes = size - if total_bytes is None: - if hasattr(file_obj, 'fileno'): - total_bytes = os.fstat(file_obj.fileno()).st_size - else: - raise ValueError('total bytes could not be determined. Please ' - 'pass an explicit size.') - headers = { - 'Accept': 'application/json', - 'Accept-Encoding': 'gzip, deflate', - 'User-Agent': connection.USER_AGENT, - 'content-type': 'application/json', - } - - metadata = { - 'configuration': { - 'load': { - 'sourceFormat': source_format, - 'destinationTable': { - 'projectId': self._dataset.project, - 'datasetId': self._dataset.name, - 'tableId': self.name, - } - } - } - } - - if len(self._schema) > 0: - load_config = metadata['configuration']['load'] - load_config['schema'] = { - 'fields': _build_schema_resource(self._schema) - } - + _maybe_rewind(file_obj, rewind=rewind) + _check_mode(file_obj) + metadata = _get_upload_metadata( + source_format, self._schema, self._dataset, self.name) _configure_job_metadata(metadata, allow_jagged_rows, allow_quoted_newlines, create_disposition, encoding, field_delimiter, ignore_unknown_values, max_bad_records, quote_character, skip_leading_rows, - write_disposition) - - upload = Upload(file_obj, content_type, total_bytes, - auto_transfer=False) - - url_builder = _UrlBuilder() - upload_config = _UploadConfig() + write_disposition, job_name) - # Base URL may change once we know simple vs. resumable. - base_url = connection.API_BASE_URL + '/upload' - path = '/projects/%s/jobs' % (self._dataset.project,) - upload_url = connection.build_api_url(api_base_url=base_url, path=path) - - # Use apitools 'Upload' facility. - request = Request(upload_url, 'POST', headers, - body=json.dumps(metadata)) - - upload.configure_request(upload_config, request, url_builder) - query_params = url_builder.query_params - base_url = connection.API_BASE_URL + '/upload' - request.url = connection.build_api_url(api_base_url=base_url, - path=path, - query_params=query_params) try: - upload.initialize_upload(request, connection.http) - except HttpError as err_response: - faux_response = httplib2.Response(err_response.response) - raise make_exception(faux_response, err_response.content, - error_info=request.url) - - if upload.strategy == RESUMABLE_UPLOAD: - http_response = upload.stream_file(use_chunks=True) - else: - http_response = make_api_request(connection.http, request, - retries=num_retries) - - self._check_response_error(request, http_response) - - response_content = http_response.content - if not isinstance(response_content, - six.string_types): # pragma: NO COVER Python3 - response_content = response_content.decode('utf-8') - return client.job_from_resource(json.loads(response_content)) + created_json = self._do_upload( + client, file_obj, metadata, size, num_retries) + return client.job_from_resource(created_json) + except resumable_media.InvalidResponse as exc: + _raise_from_invalid_response(exc) # pylint: enable=too-many-arguments,too-many-locals @@ -995,7 +1127,8 @@ def _configure_job_metadata(metadata, # pylint: disable=too-many-arguments max_bad_records, quote_character, skip_leading_rows, - write_disposition): + write_disposition, + job_name): """Helper for :meth:`Table.upload_from_file`.""" load_config = metadata['configuration']['load'] @@ -1029,6 +1162,9 @@ def _configure_job_metadata(metadata, # pylint: disable=too-many-arguments if write_disposition is not None: load_config['writeDisposition'] = write_disposition + if job_name is not None: + load_config['jobReference'] = {'jobId': job_name} + def _parse_schema_resource(info): """Parse a resource fragment into a schema field. @@ -1041,7 +1177,7 @@ def _parse_schema_resource(info): present in ``info``. """ if 'fields' not in info: - return None + return () schema = [] for r_field in info['fields']: @@ -1071,24 +1207,113 @@ def _build_schema_resource(fields): 'mode': field.mode} if field.description is not None: info['description'] = field.description - if field.fields is not None: + if field.fields: info['fields'] = _build_schema_resource(field.fields) infos.append(info) return infos +# pylint: enable=unused-argument + + +def _maybe_rewind(stream, rewind=False): + """Rewind the stream if desired. + + :type stream: IO[bytes] + :param stream: A bytes IO object open for reading. + + :type rewind: bool + :param rewind: Indicates if we should seek to the beginning of the stream. + """ + if rewind: + stream.seek(0, os.SEEK_SET) + +def _check_mode(stream): + """Check that a stream was opened in read-binary mode. -class _UploadConfig(object): - """Faux message FBO apitools' 'configure_request'.""" - accept = ['*/*'] - max_size = None - resumable_multipart = True - resumable_path = u'/upload/bigquery/v2/projects/{project}/jobs' - simple_multipart = True - simple_path = u'/upload/bigquery/v2/projects/{project}/jobs' + :type stream: IO[bytes] + :param stream: A bytes IO object open for reading. + + :raises: :exc:`ValueError` if the ``stream.mode`` is a valid attribute + and is not among ``rb``, ``r+b`` or ``rb+``. + """ + mode = getattr(stream, 'mode', None) + + if mode is not None and mode not in ('rb', 'r+b', 'rb+'): + raise ValueError( + "Cannot upload files opened in text mode: use " + "open(filename, mode='rb') or open(filename, mode='r+b')") + + +def _get_upload_headers(user_agent): + """Get the headers for an upload request. + + :type user_agent: str + :param user_agent: The user-agent for requests. + + :rtype: dict + :returns: The headers to be used for the request. + """ + return { + 'Accept': 'application/json', + 'Accept-Encoding': 'gzip, deflate', + 'User-Agent': user_agent, + 'content-type': 'application/json', + } -class _UrlBuilder(object): - """Faux builder FBO apitools' 'configure_request'""" - def __init__(self): - self.query_params = {} - self._relative_path = '' +def _get_upload_metadata(source_format, schema, dataset, name): + """Get base metadata for creating a table. + + :type source_format: str + :param source_format: one of 'CSV' or 'NEWLINE_DELIMITED_JSON'. + job configuration option. + + :type schema: list + :param schema: List of :class:`SchemaField` associated with a table. + + :type dataset: :class:`~google.cloud.bigquery.dataset.Dataset` + :param dataset: A dataset which contains a table. + + :type name: str + :param name: The name of the table. + + :rtype: dict + :returns: The metadata dictionary. + """ + load_config = { + 'sourceFormat': source_format, + 'destinationTable': { + 'projectId': dataset.project, + 'datasetId': dataset.name, + 'tableId': name, + }, + } + if schema: + load_config['schema'] = { + 'fields': _build_schema_resource(schema), + } + + return { + 'configuration': { + 'load': load_config, + }, + } + + +def _raise_from_invalid_response(error, error_info=None): + """Re-wrap and raise an ``InvalidResponse`` exception. + + :type error: :exc:`google.resumable_media.InvalidResponse` + :param error: A caught exception from the ``google-resumable-media`` + library. + + :type error_info: str + :param error_info: (Optional) Extra information about the failed request. + + :raises: :class:`~google.cloud.exceptions.GoogleCloudError` corresponding + to the failed status code + """ + response = error.response + faux_response = httplib2.Response({'status': response.status_code}) + raise make_exception(faux_response, response.content, + error_info=error_info, use_json=False) diff --git a/bigquery/nox.py b/bigquery/nox.py index 27bfb7f87ac4..989965443159 100644 --- a/bigquery/nox.py +++ b/bigquery/nox.py @@ -19,7 +19,9 @@ import nox -LOCAL_DEPS = ('../core/',) +LOCAL_DEPS = ( + os.path.join('..', 'core'), +) @nox.session @@ -30,15 +32,25 @@ def unit_tests(session, python_version): # Run unit tests against all supported versions of Python. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'unit-' + python_version + # Install all test dependencies, then install this package in-place. session.install('mock', 'pytest', 'pytest-cov', *LOCAL_DEPS) session.install('-e', '.') # Run py.test against the unit tests. - session.run('py.test', '--quiet', - '--cov=google.cloud.bigquery', '--cov=tests.unit', '--cov-append', - '--cov-config=.coveragerc', '--cov-report=', '--cov-fail-under=97', - 'tests/unit', + session.run( + 'py.test', + '--quiet', + '--cov=google.cloud.bigquery', + '--cov=tests.unit', + '--cov-append', + '--cov-config=.coveragerc', + '--cov-report=', + '--cov-fail-under=97', + os.path.join('tests', 'unit'), + *session.posargs ) @@ -49,38 +61,62 @@ def system_tests(session, python_version): # Sanity check: Only run system tests if the environment variable is set. if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): - return + session.skip('Credentials must be set via environment variable.') # Run the system tests against latest Python 2 and Python 3 only. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'sys-' + python_version + # Install all test dependencies, then install this package into the # virutalenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) - session.install('../storage/', '../test_utils/') + session.install( + os.path.join('..', 'storage'), + os.path.join('..', 'test_utils'), + ) session.install('.') # Run py.test against the system tests. - session.run('py.test', '--quiet', 'tests/system.py') + session.run( + 'py.test', + '--quiet', + os.path.join('tests', 'system.py'), + *session.posargs + ) @nox.session def lint(session): - """Run flake8. + """Run linters. - Returns a failure if flake8 finds linting errors or sufficiently + Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ session.interpreter = 'python3.6' - session.install('flake8', *LOCAL_DEPS) + + session.install('flake8', 'pylint', 'gcp-devrel-py-tools', *LOCAL_DEPS) session.install('.') - session.run('flake8', 'google/cloud/bigquery') + session.run('flake8', os.path.join('google', 'cloud', 'bigquery')) + session.run('flake8', 'tests') + session.run( + 'gcp-devrel-py-tools', 'run-pylint', + '--config', 'pylint.config.py', + '--library-filesets', 'google', + '--test-filesets', 'tests', + # Temporarily allow this to fail. + success_codes=range(0, 100)) @nox.session def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.interpreter = 'python3.6' + + # Set the virtualenv dirname. + session.virtualenv_dirname = 'setup' + session.install('docutils', 'Pygments') session.run( 'python', 'setup.py', 'check', '--restructuredtext', '--strict') @@ -94,6 +130,7 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.interpreter = 'python3.6' + session.install('coverage', 'pytest-cov') session.run('coverage', 'report', '--show-missing', '--fail-under=100') session.run('coverage', 'erase') diff --git a/bigquery/pylint.config.py b/bigquery/pylint.config.py new file mode 100644 index 000000000000..b618319b8b61 --- /dev/null +++ b/bigquery/pylint.config.py @@ -0,0 +1,25 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module is used to configure gcp-devrel-py-tools run-pylint.""" + +# Library configuration + +# library_additions = {} +# library_replacements = {} + +# Test configuration + +# test_additions = copy.deepcopy(library_additions) +# test_replacements = copy.deepcopy(library_replacements) diff --git a/bigquery/setup.py b/bigquery/setup.py index ffd62619efe3..eeb2d90549d8 100644 --- a/bigquery/setup.py +++ b/bigquery/setup.py @@ -27,7 +27,7 @@ # consolidate. SETUP_BASE = { 'author': 'Google Cloud Platform', - 'author_email': 'jjg+google-cloud-python@google.com', + 'author_email': 'googleapis-publisher@google.com', 'scripts': [], 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', 'license': 'Apache 2.0', @@ -51,12 +51,15 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.24.0, < 0.25dev', + 'google-cloud-core >= 0.25.0, < 0.26dev', + 'google-auth >= 1.0.0', + 'google-resumable-media >= 0.2.1', + 'requests >= 2.0.0', ] setup( name='google-cloud-bigquery', - version='0.24.0', + version='0.25.0', description='Python Client for Google BigQuery', long_description=README, namespace_packages=[ diff --git a/bigquery/tests/system.py b/bigquery/tests/system.py index 456953194a53..1d3da3d2a83d 100644 --- a/bigquery/tests/system.py +++ b/bigquery/tests/system.py @@ -19,9 +19,11 @@ import os import time import unittest +import uuid from google.cloud import bigquery from google.cloud._helpers import UTC +from google.cloud.bigquery import dbapi from google.cloud.exceptions import Forbidden from test_utils.retry import RetryErrors @@ -70,10 +72,12 @@ class Config(object): global state. """ CLIENT = None + CURSOR = None def setUpModule(): Config.CLIENT = bigquery.Client() + Config.CURSOR = dbapi.connect(Config.CLIENT).cursor() class TestBigQuery(unittest.TestCase): @@ -167,9 +171,9 @@ def test_list_datasets(self): 'newest' + unique_resource_id(), ] for dataset_name in datasets_to_create: - dataset = Config.CLIENT.dataset(dataset_name) - retry_403(dataset.create)() - self.to_delete.append(dataset) + created_dataset = Config.CLIENT.dataset(dataset_name) + retry_403(created_dataset.create)() + self.to_delete.append(created_dataset) # Retrieve the datasets. iterator = Config.CLIENT.list_datasets() @@ -222,9 +226,9 @@ def test_list_tables(self): mode='REQUIRED') age = bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED') for table_name in tables_to_create: - table = dataset.table(table_name, schema=[full_name, age]) - table.create() - self.to_delete.insert(0, table) + created_table = dataset.table(table_name, schema=[full_name, age]) + created_table.create() + self.to_delete.insert(0, created_table) # Retrieve the tables. iterator = dataset.list_tables() @@ -376,9 +380,6 @@ def test_load_table_from_local_file_then_dump_table(self): write_disposition='WRITE_EMPTY', ) - def _job_done(instance): - return instance.state.lower() == 'done' - # Retry until done. retry = RetryInstanceState(_job_done, max_tries=8) retry(job.reload)() @@ -417,9 +418,6 @@ def test_load_table_from_local_avro_file_then_dump_table(self): write_disposition='WRITE_TRUNCATE' ) - def _job_done(instance): - return instance.state.lower() == 'done' - # Retry until done. retry = RetryInstanceState(_job_done, max_tries=8) retry(job.reload)() @@ -492,9 +490,6 @@ def test_load_table_from_storage_then_dump_table(self): job.begin() - def _job_done(instance): - return instance.state in ('DONE', 'done') - # Allow for 90 seconds of "warm up" before rows visible. See # https://cloud.google.com/bigquery/streaming-data-into-bigquery#dataavailability # 8 tries -> 1 + 2 + 4 + 8 + 16 + 32 + 64 = 127 seconds @@ -528,9 +523,6 @@ def test_job_cancel(self): job.begin() job.cancel() - def _job_done(instance): - return instance.state in ('DONE', 'done') - retry = RetryInstanceState(_job_done, max_tries=8) retry(job.reload)() @@ -544,7 +536,7 @@ def test_sync_query_w_legacy_sql_types(self): naive = datetime.datetime(2016, 12, 5, 12, 41, 9) stamp = '%s %s' % (naive.date().isoformat(), naive.time().isoformat()) zoned = naive.replace(tzinfo=UTC) - EXAMPLES = [ + examples = [ { 'sql': 'SELECT 1', 'expected': 1, @@ -570,7 +562,7 @@ def test_sync_query_w_legacy_sql_types(self): 'expected': zoned, }, ] - for example in EXAMPLES: + for example in examples: query = Config.CLIENT.run_sync_query(example['sql']) query.use_legacy_sql = True query.run() @@ -578,11 +570,11 @@ def test_sync_query_w_legacy_sql_types(self): self.assertEqual(len(query.rows[0]), 1) self.assertEqual(query.rows[0][0], example['expected']) - def test_sync_query_w_standard_sql_types(self): + def _generate_standard_sql_types_examples(self): naive = datetime.datetime(2016, 12, 5, 12, 41, 9) stamp = '%s %s' % (naive.date().isoformat(), naive.time().isoformat()) zoned = naive.replace(tzinfo=UTC) - EXAMPLES = [ + return [ { 'sql': 'SELECT 1', 'expected': 1, @@ -659,7 +651,10 @@ def test_sync_query_w_standard_sql_types(self): 'expected': [{u'_field_1': [1, 2]}], }, ] - for example in EXAMPLES: + + def test_sync_query_w_standard_sql_types(self): + examples = self._generate_standard_sql_types_examples() + for example in examples: query = Config.CLIENT.run_sync_query(example['sql']) query.use_legacy_sql = False query.run() @@ -667,6 +662,80 @@ def test_sync_query_w_standard_sql_types(self): self.assertEqual(len(query.rows[0]), 1) self.assertEqual(query.rows[0][0], example['expected']) + def test_dbapi_w_standard_sql_types(self): + examples = self._generate_standard_sql_types_examples() + for example in examples: + Config.CURSOR.execute(example['sql']) + self.assertEqual(Config.CURSOR.rowcount, 1) + row = Config.CURSOR.fetchone() + self.assertEqual(len(row), 1) + self.assertEqual(row[0], example['expected']) + row = Config.CURSOR.fetchone() + self.assertIsNone(row) + + def _load_table_for_dml(self, rows, dataset_name, table_name): + import csv + from google.cloud._testing import _NamedTemporaryFile + + dataset = Config.CLIENT.dataset(dataset_name) + retry_403(dataset.create)() + self.to_delete.append(dataset) + + greeting = bigquery.SchemaField( + 'greeting', 'STRING', mode='NULLABLE') + table = dataset.table(table_name, schema=[greeting]) + table.create() + self.to_delete.insert(0, table) + + with _NamedTemporaryFile() as temp: + with open(temp.name, 'w') as csv_write: + writer = csv.writer(csv_write) + writer.writerow(('Greeting',)) + writer.writerows(rows) + + with open(temp.name, 'rb') as csv_read: + job = table.upload_from_file( + csv_read, + source_format='CSV', + skip_leading_rows=1, + create_disposition='CREATE_NEVER', + write_disposition='WRITE_EMPTY', + ) + + # Retry until done. + retry = RetryInstanceState(_job_done, max_tries=8) + retry(job.reload)() + self._fetch_single_page(table) + + def test_sync_query_w_dml(self): + dataset_name = _make_dataset_name('dml_tests') + table_name = 'test_table' + self._load_table_for_dml([('Hello World',)], dataset_name, table_name) + query_template = """UPDATE {}.{} + SET greeting = 'Guten Tag' + WHERE greeting = 'Hello World' + """ + + query = Config.CLIENT.run_sync_query( + query_template.format(dataset_name, table_name)) + query.use_legacy_sql = False + query.run() + + self.assertEqual(query.num_dml_affected_rows, 1) + + def test_dbapi_w_dml(self): + dataset_name = _make_dataset_name('dml_tests') + table_name = 'test_table' + self._load_table_for_dml([('Hello World',)], dataset_name, table_name) + query_template = """UPDATE {}.{} + SET greeting = 'Guten Tag' + WHERE greeting = 'Hello World' + """ + + Config.CURSOR.execute(query_template.format(dataset_name, table_name)) + self.assertEqual(Config.CURSOR.rowcount, 1) + self.assertIsNone(Config.CURSOR.fetchone()) + def test_sync_query_w_query_params(self): from google.cloud.bigquery._helpers import ArrayQueryParameter from google.cloud.bigquery._helpers import ScalarQueryParameter @@ -729,7 +798,7 @@ def test_sync_query_w_query_params(self): name='friends', array_type='STRING', values=[phred_name, bharney_name]) with_friends_param = StructQueryParameter(None, friends_param) - EXAMPLES = [ + examples = [ { 'sql': 'SELECT @question', 'expected': question, @@ -809,7 +878,7 @@ def test_sync_query_w_query_params(self): 'query_parameters': [with_friends_param], }, ] - for example in EXAMPLES: + for example in examples: query = Config.CLIENT.run_sync_query( example['sql'], query_parameters=example['query_parameters']) @@ -819,6 +888,105 @@ def test_sync_query_w_query_params(self): self.assertEqual(len(query.rows[0]), 1) self.assertEqual(query.rows[0][0], example['expected']) + def test_dbapi_w_query_parameters(self): + examples = [ + { + 'sql': 'SELECT %(boolval)s', + 'expected': True, + 'query_parameters': { + 'boolval': True, + }, + }, + { + 'sql': 'SELECT %(a "very" weird `name`)s', + 'expected': True, + 'query_parameters': { + 'a "very" weird `name`': True, + }, + }, + { + 'sql': 'SELECT %(select)s', + 'expected': True, + 'query_parameters': { + 'select': True, # this name is a keyword + }, + }, + { + 'sql': 'SELECT %s', + 'expected': False, + 'query_parameters': [False], + }, + { + 'sql': 'SELECT %(intval)s', + 'expected': 123, + 'query_parameters': { + 'intval': 123, + }, + }, + { + 'sql': 'SELECT %s', + 'expected': -123456789, + 'query_parameters': [-123456789], + }, + { + 'sql': 'SELECT %(floatval)s', + 'expected': 1.25, + 'query_parameters': { + 'floatval': 1.25, + }, + }, + { + 'sql': 'SELECT LOWER(%(strval)s)', + 'query_parameters': { + 'strval': 'I Am A String', + }, + 'expected': 'i am a string', + }, + { + 'sql': 'SELECT DATE_SUB(%(dateval)s, INTERVAL 1 DAY)', + 'query_parameters': { + 'dateval': datetime.date(2017, 4, 2), + }, + 'expected': datetime.date(2017, 4, 1), + }, + { + 'sql': 'SELECT TIME_ADD(%(timeval)s, INTERVAL 4 SECOND)', + 'query_parameters': { + 'timeval': datetime.time(12, 34, 56), + }, + 'expected': datetime.time(12, 35, 0), + }, + { + 'sql': ( + 'SELECT DATETIME_ADD(%(datetimeval)s, INTERVAL 53 SECOND)' + ), + 'query_parameters': { + 'datetimeval': datetime.datetime(2012, 3, 4, 5, 6, 7), + }, + 'expected': datetime.datetime(2012, 3, 4, 5, 7, 0), + }, + { + 'sql': 'SELECT TIMESTAMP_TRUNC(%(zoned)s, MINUTE)', + 'query_parameters': { + 'zoned': datetime.datetime( + 2012, 3, 4, 5, 6, 7, tzinfo=UTC), + }, + 'expected': datetime.datetime(2012, 3, 4, 5, 6, 0, tzinfo=UTC), + }, + ] + for example in examples: + msg = 'sql: {} query_parameters: {}'.format( + example['sql'], example['query_parameters']) + + Config.CURSOR.execute(example['sql'], example['query_parameters']) + + self.assertEqual(Config.CURSOR.rowcount, 1, msg=msg) + row = Config.CURSOR.fetchone() + self.assertEqual(len(row), 1, msg=msg) + self.assertEqual(row[0], example['expected'], msg=msg) + row = Config.CURSOR.fetchone() + self.assertIsNone(row, msg=msg) + def test_dump_table_w_public_data(self): PUBLIC = 'bigquery-public-data' DATASET_NAME = 'samples' @@ -838,7 +1006,6 @@ def test_large_query_w_public_data(self): SQL = 'SELECT * from `{}.{}.{}` LIMIT {}'.format( PUBLIC, DATASET_NAME, TABLE_NAME, LIMIT) - dataset = Config.CLIENT.dataset(DATASET_NAME, project=PUBLIC) query = Config.CLIENT.run_sync_query(SQL) query.use_legacy_sql = False query.run() @@ -847,6 +1014,15 @@ def test_large_query_w_public_data(self): rows = list(iterator) self.assertEqual(len(rows), LIMIT) + def test_async_query_future(self): + query_job = Config.CLIENT.run_async_query( + str(uuid.uuid4()), 'SELECT 1') + query_job.use_legacy_sql = False + + iterator = query_job.result().fetch_data() + rows = list(iterator) + self.assertEqual(rows, [(1,)]) + def test_insert_nested_nested(self): # See #2951 SF = bigquery.SchemaField @@ -951,3 +1127,7 @@ def test_create_table_insert_fetch_nested_schema(self): parts = time.strptime(expected[7], '%Y-%m-%dT%H:%M:%S') e_favtime = datetime.datetime(*parts[0:6]) self.assertEqual(found[7], e_favtime) # FavoriteTime + + +def _job_done(instance): + return instance.state.lower() == 'done' diff --git a/bigquery/tests/unit/test_dbapi__helpers.py b/bigquery/tests/unit/test_dbapi__helpers.py new file mode 100644 index 000000000000..48bca5ae9a59 --- /dev/null +++ b/bigquery/tests/unit/test_dbapi__helpers.py @@ -0,0 +1,97 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import math +import unittest + +import google.cloud._helpers +from google.cloud.bigquery.dbapi import _helpers +from google.cloud.bigquery.dbapi import exceptions + + +class TestQueryParameters(unittest.TestCase): + + def test_scalar_to_query_parameter(self): + expected_types = [ + (True, 'BOOL'), + (False, 'BOOL'), + (123, 'INT64'), + (-123456789, 'INT64'), + (1.25, 'FLOAT64'), + (b'I am some bytes', 'BYTES'), + (u'I am a string', 'STRING'), + (datetime.date(2017, 4, 1), 'DATE'), + (datetime.time(12, 34, 56), 'TIME'), + (datetime.datetime(2012, 3, 4, 5, 6, 7), 'DATETIME'), + ( + datetime.datetime( + 2012, 3, 4, 5, 6, 7, tzinfo=google.cloud._helpers.UTC), + 'TIMESTAMP', + ), + ] + for value, expected_type in expected_types: + msg = 'value: {} expected_type: {}'.format(value, expected_type) + parameter = _helpers.scalar_to_query_parameter(value) + self.assertIsNone(parameter.name, msg=msg) + self.assertEqual(parameter.type_, expected_type, msg=msg) + self.assertEqual(parameter.value, value, msg=msg) + named_parameter = _helpers.scalar_to_query_parameter( + value, name='myvar') + self.assertEqual(named_parameter.name, 'myvar', msg=msg) + self.assertEqual(named_parameter.type_, expected_type, msg=msg) + self.assertEqual(named_parameter.value, value, msg=msg) + + def test_scalar_to_query_parameter_w_unexpected_type(self): + with self.assertRaises(exceptions.ProgrammingError): + _helpers.scalar_to_query_parameter(value={'a': 'dictionary'}) + + def test_scalar_to_query_parameter_w_special_floats(self): + nan_parameter = _helpers.scalar_to_query_parameter(float('nan')) + self.assertTrue(math.isnan(nan_parameter.value)) + self.assertEqual(nan_parameter.type_, 'FLOAT64') + inf_parameter = _helpers.scalar_to_query_parameter(float('inf')) + self.assertTrue(math.isinf(inf_parameter.value)) + self.assertEqual(inf_parameter.type_, 'FLOAT64') + + def test_to_query_parameters_w_dict(self): + parameters = { + 'somebool': True, + 'somestring': u'a-string-value', + } + query_parameters = _helpers.to_query_parameters(parameters) + query_parameter_tuples = [] + for param in query_parameters: + query_parameter_tuples.append( + (param.name, param.type_, param.value)) + self.assertSequenceEqual( + sorted(query_parameter_tuples), + sorted([ + ('somebool', 'BOOL', True), + ('somestring', 'STRING', u'a-string-value'), + ])) + + def test_to_query_parameters_w_list(self): + parameters = [True, u'a-string-value'] + query_parameters = _helpers.to_query_parameters(parameters) + query_parameter_tuples = [] + for param in query_parameters: + query_parameter_tuples.append( + (param.name, param.type_, param.value)) + self.assertSequenceEqual( + sorted(query_parameter_tuples), + sorted([ + (None, 'BOOL', True), + (None, 'STRING', u'a-string-value'), + ])) diff --git a/bigquery/tests/unit/test_dbapi_connection.py b/bigquery/tests/unit/test_dbapi_connection.py new file mode 100644 index 000000000000..d30852377852 --- /dev/null +++ b/bigquery/tests/unit/test_dbapi_connection.py @@ -0,0 +1,73 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import mock + + +class TestConnection(unittest.TestCase): + + @staticmethod + def _get_target_class(): + from google.cloud.bigquery.dbapi import Connection + return Connection + + def _make_one(self, *args, **kw): + return self._get_target_class()(*args, **kw) + + def _mock_client(self, rows=None, schema=None): + from google.cloud.bigquery import client + mock_client = mock.create_autospec(client.Client) + return mock_client + + def test_ctor(self): + from google.cloud.bigquery.dbapi import Connection + mock_client = self._mock_client() + connection = self._make_one(client=mock_client) + self.assertIsInstance(connection, Connection) + self.assertIs(connection._client, mock_client) + + @mock.patch('google.cloud.bigquery.Client', autospec=True) + def test_connect_wo_client(self, mock_client): + from google.cloud.bigquery.dbapi import connect + from google.cloud.bigquery.dbapi import Connection + connection = connect() + self.assertIsInstance(connection, Connection) + self.assertIsNotNone(connection._client) + + def test_connect_w_client(self): + from google.cloud.bigquery.dbapi import connect + from google.cloud.bigquery.dbapi import Connection + mock_client = self._mock_client() + connection = connect(client=mock_client) + self.assertIsInstance(connection, Connection) + self.assertIs(connection._client, mock_client) + + def test_close(self): + connection = self._make_one(client=self._mock_client()) + # close() is a no-op, there is nothing to test. + connection.close() + + def test_commit(self): + connection = self._make_one(client=self._mock_client()) + # commit() is a no-op, there is nothing to test. + connection.commit() + + def test_cursor(self): + from google.cloud.bigquery.dbapi import Cursor + connection = self._make_one(client=self._mock_client()) + cursor = connection.cursor() + self.assertIsInstance(cursor, Cursor) + self.assertIs(cursor.connection, connection) diff --git a/bigquery/tests/unit/test_dbapi_cursor.py b/bigquery/tests/unit/test_dbapi_cursor.py new file mode 100644 index 000000000000..2a2ccfd989a6 --- /dev/null +++ b/bigquery/tests/unit/test_dbapi_cursor.py @@ -0,0 +1,287 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import mock + + +class TestCursor(unittest.TestCase): + + @staticmethod + def _get_target_class(): + from google.cloud.bigquery.dbapi import Cursor + return Cursor + + def _make_one(self, *args, **kw): + return self._get_target_class()(*args, **kw) + + def _mock_client( + self, rows=None, schema=None, num_dml_affected_rows=None): + from google.cloud.bigquery import client + mock_client = mock.create_autospec(client.Client) + mock_client.run_async_query.return_value = self._mock_job( + rows=rows, schema=schema, + num_dml_affected_rows=num_dml_affected_rows) + return mock_client + + def _mock_job( + self, rows=None, schema=None, num_dml_affected_rows=None): + from google.cloud.bigquery import job + mock_job = mock.create_autospec(job.QueryJob) + mock_job.error_result = None + mock_job.state = 'DONE' + mock_job.result.return_value = self._mock_results( + rows=rows, schema=schema, + num_dml_affected_rows=num_dml_affected_rows) + return mock_job + + def _mock_results( + self, rows=None, schema=None, num_dml_affected_rows=None): + from google.cloud.bigquery import query + mock_results = mock.create_autospec(query.QueryResults) + mock_results.schema = schema + mock_results.num_dml_affected_rows = num_dml_affected_rows + + if rows is None: + mock_results.total_rows = 0 + else: + mock_results.total_rows = len(rows) + + mock_results.fetch_data.return_value = rows + return mock_results + + def test_ctor(self): + from google.cloud.bigquery.dbapi import connect + from google.cloud.bigquery.dbapi import Cursor + connection = connect(self._mock_client()) + cursor = self._make_one(connection) + self.assertIsInstance(cursor, Cursor) + self.assertIs(cursor.connection, connection) + + def test_close(self): + from google.cloud.bigquery.dbapi import connect + connection = connect(self._mock_client()) + cursor = connection.cursor() + # close() is a no-op, there is nothing to test. + cursor.close() + + def test_fetchone_wo_execute_raises_error(self): + from google.cloud.bigquery import dbapi + connection = dbapi.connect(self._mock_client()) + cursor = connection.cursor() + self.assertRaises(dbapi.Error, cursor.fetchone) + + def test_fetchone_w_row(self): + from google.cloud.bigquery import dbapi + connection = dbapi.connect( + self._mock_client(rows=[(1,)])) + cursor = connection.cursor() + cursor.execute('SELECT 1;') + row = cursor.fetchone() + self.assertEqual(row, (1,)) + self.assertIsNone(cursor.fetchone()) + + def test_fetchmany_wo_execute_raises_error(self): + from google.cloud.bigquery import dbapi + connection = dbapi.connect(self._mock_client()) + cursor = connection.cursor() + self.assertRaises(dbapi.Error, cursor.fetchmany) + + def test_fetchmany_w_row(self): + from google.cloud.bigquery import dbapi + connection = dbapi.connect( + self._mock_client(rows=[(1,)])) + cursor = connection.cursor() + cursor.execute('SELECT 1;') + rows = cursor.fetchmany() + self.assertEqual(len(rows), 1) + self.assertEqual(rows[0], (1,)) + + def test_fetchmany_w_size(self): + from google.cloud.bigquery import dbapi + connection = dbapi.connect( + self._mock_client( + rows=[ + (1, 2, 3), + (4, 5, 6), + (7, 8, 9), + ])) + cursor = connection.cursor() + cursor.execute('SELECT a, b, c;') + rows = cursor.fetchmany(size=2) + self.assertEqual(len(rows), 2) + self.assertEqual(rows[0], (1, 2, 3)) + self.assertEqual(rows[1], (4, 5, 6)) + second_page = cursor.fetchmany(size=2) + self.assertEqual(len(second_page), 1) + self.assertEqual(second_page[0], (7, 8, 9)) + third_page = cursor.fetchmany(size=2) + self.assertEqual(third_page, []) + + def test_fetchmany_w_arraysize(self): + from google.cloud.bigquery import dbapi + connection = dbapi.connect( + self._mock_client( + rows=[ + (1, 2, 3), + (4, 5, 6), + (7, 8, 9), + ])) + cursor = connection.cursor() + cursor.arraysize = 2 + cursor.execute('SELECT a, b, c;') + rows = cursor.fetchmany() + self.assertEqual(len(rows), 2) + self.assertEqual(rows[0], (1, 2, 3)) + self.assertEqual(rows[1], (4, 5, 6)) + second_page = cursor.fetchmany() + self.assertEqual(len(second_page), 1) + self.assertEqual(second_page[0], (7, 8, 9)) + third_page = cursor.fetchmany() + self.assertEqual(third_page, []) + + def test_fetchall_wo_execute_raises_error(self): + from google.cloud.bigquery import dbapi + connection = dbapi.connect(self._mock_client()) + cursor = connection.cursor() + self.assertRaises(dbapi.Error, cursor.fetchall) + + def test_fetchall_w_row(self): + from google.cloud.bigquery import dbapi + connection = dbapi.connect( + self._mock_client(rows=[(1,)])) + cursor = connection.cursor() + cursor.execute('SELECT 1;') + self.assertIsNone(cursor.description) + self.assertEqual(cursor.rowcount, 1) + rows = cursor.fetchall() + self.assertEqual(len(rows), 1) + self.assertEqual(rows[0], (1,)) + + def test_execute_w_dml(self): + from google.cloud.bigquery.dbapi import connect + connection = connect( + self._mock_client(rows=[], num_dml_affected_rows=12)) + cursor = connection.cursor() + cursor.execute('DELETE FROM UserSessions WHERE user_id = \'test\';') + self.assertIsNone(cursor.description) + self.assertEqual(cursor.rowcount, 12) + + def test_execute_w_query(self): + from google.cloud.bigquery.schema import SchemaField + from google.cloud.bigquery import dbapi + + connection = dbapi.connect(self._mock_client( + rows=[('hello', 'world', 1), ('howdy', 'y\'all', 2)], + schema=[ + SchemaField('a', 'STRING', mode='NULLABLE'), + SchemaField('b', 'STRING', mode='REQUIRED'), + SchemaField('c', 'INTEGER', mode='NULLABLE')])) + cursor = connection.cursor() + cursor.execute('SELECT a, b, c FROM hello_world WHERE d > 3;') + + # Verify the description. + self.assertEqual(len(cursor.description), 3) + a_name, a_type, _, _, _, _, a_null_ok = cursor.description[0] + self.assertEqual(a_name, 'a') + self.assertEqual(a_type, 'STRING') + self.assertEqual(a_type, dbapi.STRING) + self.assertTrue(a_null_ok) + b_name, b_type, _, _, _, _, b_null_ok = cursor.description[1] + self.assertEqual(b_name, 'b') + self.assertEqual(b_type, 'STRING') + self.assertEqual(b_type, dbapi.STRING) + self.assertFalse(b_null_ok) + c_name, c_type, _, _, _, _, c_null_ok = cursor.description[2] + self.assertEqual(c_name, 'c') + self.assertEqual(c_type, 'INTEGER') + self.assertEqual(c_type, dbapi.NUMBER) + self.assertTrue(c_null_ok) + + # Verify the results. + self.assertEqual(cursor.rowcount, 2) + row = cursor.fetchone() + self.assertEqual(row, ('hello', 'world', 1)) + row = cursor.fetchone() + self.assertEqual(row, ('howdy', 'y\'all', 2)) + row = cursor.fetchone() + self.assertIsNone(row) + + def test_execute_raises_if_result_raises(self): + import google.cloud.exceptions + + from google.cloud.bigquery import client + from google.cloud.bigquery import job + from google.cloud.bigquery.dbapi import connect + from google.cloud.bigquery.dbapi import exceptions + + job = mock.create_autospec(job.QueryJob) + job.result.side_effect = google.cloud.exceptions.GoogleCloudError('') + client = mock.create_autospec(client.Client) + client.run_async_query.return_value = job + connection = connect(client) + cursor = connection.cursor() + + with self.assertRaises(exceptions.DatabaseError): + cursor.execute('SELECT 1') + + def test_executemany_w_dml(self): + from google.cloud.bigquery.dbapi import connect + connection = connect( + self._mock_client(rows=[], num_dml_affected_rows=12)) + cursor = connection.cursor() + cursor.executemany( + 'DELETE FROM UserSessions WHERE user_id = %s;', + (('test',), ('anothertest',))) + self.assertIsNone(cursor.description) + self.assertEqual(cursor.rowcount, 12) + + def test__format_operation_w_dict(self): + from google.cloud.bigquery.dbapi import cursor + formatted_operation = cursor._format_operation( + 'SELECT %(somevalue)s, %(a `weird` one)s;', + { + 'somevalue': 'hi', + 'a `weird` one': 'world', + }) + self.assertEqual( + formatted_operation, 'SELECT @`somevalue`, @`a \\`weird\\` one`;') + + def test__format_operation_w_wrong_dict(self): + from google.cloud.bigquery import dbapi + from google.cloud.bigquery.dbapi import cursor + self.assertRaises( + dbapi.ProgrammingError, + cursor._format_operation, + 'SELECT %(somevalue)s, %(othervalue)s;', + { + 'somevalue-not-here': 'hi', + 'othervalue': 'world', + }) + + def test__format_operation_w_sequence(self): + from google.cloud.bigquery.dbapi import cursor + formatted_operation = cursor._format_operation( + 'SELECT %s, %s;', ('hello', 'world')) + self.assertEqual(formatted_operation, 'SELECT ?, ?;') + + def test__format_operation_w_too_short_sequence(self): + from google.cloud.bigquery import dbapi + from google.cloud.bigquery.dbapi import cursor + self.assertRaises( + dbapi.ProgrammingError, + cursor._format_operation, + 'SELECT %s, %s;', + ('hello',)) diff --git a/bigquery/tests/unit/test_dbapi_types.py b/bigquery/tests/unit/test_dbapi_types.py new file mode 100644 index 000000000000..afd45b259263 --- /dev/null +++ b/bigquery/tests/unit/test_dbapi_types.py @@ -0,0 +1,40 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import unittest + +import google.cloud._helpers +from google.cloud.bigquery.dbapi import types + + +class TestTypes(unittest.TestCase): + def test_binary_type(self): + self.assertEqual('BYTES', types.BINARY) + self.assertEqual('RECORD', types.BINARY) + self.assertEqual('STRUCT', types.BINARY) + self.assertNotEqual('STRING', types.BINARY) + + def test_binary_constructor(self): + self.assertEqual(types.Binary(u'hello'), b'hello') + self.assertEqual(types.Binary(u'\u1f60'), u'\u1f60'.encode('utf-8')) + + def test_timefromticks(self): + somedatetime = datetime.datetime( + 2017, 2, 18, 12, 47, 26, tzinfo=google.cloud._helpers.UTC) + epoch = datetime.datetime(1970, 1, 1, tzinfo=google.cloud._helpers.UTC) + ticks = (somedatetime - epoch).total_seconds() + self.assertEqual( + types.TimeFromTicks(ticks, google.cloud._helpers.UTC), + datetime.time(12, 47, 26, tzinfo=google.cloud._helpers.UTC)) diff --git a/bigquery/tests/unit/test_job.py b/bigquery/tests/unit/test_job.py index 57d96bf8ae15..fcb518d9c502 100644 --- a/bigquery/tests/unit/test_job.py +++ b/bigquery/tests/unit/test_job.py @@ -12,9 +12,33 @@ # See the License for the specific language governing permissions and # limitations under the License. +import copy + +from six.moves import http_client import unittest +class Test__error_result_to_exception(unittest.TestCase): + def _call_fut(self, *args, **kwargs): + from google.cloud.bigquery import job + return job._error_result_to_exception(*args, **kwargs) + + def test_simple(self): + error_result = { + 'reason': 'invalid', + 'message': 'bad request' + } + exception = self._call_fut(error_result) + self.assertEqual(exception.code, http_client.BAD_REQUEST) + self.assertTrue(exception.message.startswith('bad request')) + self.assertIn("'reason': 'invalid'", exception.message) + + def test_missing_reason(self): + error_result = {} + exception = self._call_fut(error_result) + self.assertEqual(exception.code, http_client.INTERNAL_SERVER_ERROR) + + class _Base(object): PROJECT = 'project' SOURCE1 = 'http://example.com/source1.csv' @@ -1514,15 +1538,78 @@ def test_from_api_repr_w_properties(self): self.assertIs(dataset._client, client) self._verifyResourceProperties(dataset, RESOURCE) - def test_results(self): + def test_cancelled(self): + client = _Client(self.PROJECT) + job = self._make_one(self.JOB_NAME, self.QUERY, client) + job._properties['status'] = { + 'state': 'DONE', + 'errorResult': { + 'reason': 'stopped' + } + } + + self.assertTrue(job.cancelled()) + + def test_query_results(self): from google.cloud.bigquery.query import QueryResults client = _Client(self.PROJECT) job = self._make_one(self.JOB_NAME, self.QUERY, client) - results = job.results() + results = job.query_results() self.assertIsInstance(results, QueryResults) self.assertIs(results._job, job) + def test_result(self): + from google.cloud.bigquery.query import QueryResults + + client = _Client(self.PROJECT) + job = self._make_one(self.JOB_NAME, self.QUERY, client) + job._properties['status'] = {'state': 'DONE'} + + result = job.result() + + self.assertIsInstance(result, QueryResults) + self.assertIs(result._job, job) + + def test_result_invokes_begins(self): + begun_resource = self._makeResource() + done_resource = copy.deepcopy(begun_resource) + done_resource['status'] = {'state': 'DONE'} + connection = _Connection(begun_resource, done_resource) + client = _Client(self.PROJECT, connection=connection) + job = self._make_one(self.JOB_NAME, self.QUERY, client) + + job.result() + + self.assertEqual(len(connection._requested), 2) + begin_request, reload_request = connection._requested + self.assertEqual(begin_request['method'], 'POST') + self.assertEqual(reload_request['method'], 'GET') + + def test_result_error(self): + from google.cloud import exceptions + + client = _Client(self.PROJECT) + job = self._make_one(self.JOB_NAME, self.QUERY, client) + error_result = { + 'debugInfo': 'DEBUG', + 'location': 'LOCATION', + 'message': 'MESSAGE', + 'reason': 'invalid' + } + job._properties['status'] = { + 'errorResult': error_result, + 'errors': [error_result], + 'state': 'DONE' + } + job._set_future_result() + + with self.assertRaises(exceptions.GoogleCloudError) as exc_info: + job.result() + + self.assertIsInstance(exc_info.exception, exceptions.GoogleCloudError) + self.assertEqual(exc_info.exception.code, http_client.BAD_REQUEST) + def test_begin_w_bound_client(self): PATH = '/projects/%s/jobs' % (self.PROJECT,) RESOURCE = self._makeResource() diff --git a/bigquery/tests/unit/test_query.py b/bigquery/tests/unit/test_query.py index d7977a4e7d0c..76d5057f6450 100644 --- a/bigquery/tests/unit/test_query.py +++ b/bigquery/tests/unit/test_query.py @@ -88,9 +88,9 @@ def _verifySchema(self, query, resource): self.assertEqual(found.mode, expected['mode']) self.assertEqual(found.description, expected.get('description')) - self.assertEqual(found.fields, expected.get('fields')) + self.assertEqual(found.fields, expected.get('fields', ())) else: - self.assertIsNone(query.schema) + self.assertEqual(query.schema, ()) def _verifyRows(self, query, resource): expected = resource.get('rows') @@ -166,7 +166,7 @@ def test_ctor_defaults(self): self.assertIsNone(query.page_token) self.assertEqual(query.query_parameters, []) self.assertEqual(query.rows, []) - self.assertIsNone(query.schema) + self.assertEqual(query.schema, ()) self.assertIsNone(query.total_rows) self.assertIsNone(query.total_bytes_processed) self.assertEqual(query.udf_resources, []) diff --git a/bigquery/tests/unit/test_schema.py b/bigquery/tests/unit/test_schema.py index 8081fcd6f4e0..bf3cf2e025d1 100644 --- a/bigquery/tests/unit/test_schema.py +++ b/bigquery/tests/unit/test_schema.py @@ -26,43 +26,82 @@ def _get_target_class(): def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) - def test_ctor_defaults(self): + def test_constructor_defaults(self): field = self._make_one('test', 'STRING') - self.assertEqual(field.name, 'test') - self.assertEqual(field.field_type, 'STRING') - self.assertEqual(field.mode, 'NULLABLE') - self.assertIsNone(field.description) - self.assertIsNone(field.fields) + self.assertEqual(field._name, 'test') + self.assertEqual(field._field_type, 'STRING') + self.assertEqual(field._mode, 'NULLABLE') + self.assertIsNone(field._description) + self.assertEqual(field._fields, ()) - def test_ctor_explicit(self): + def test_constructor_explicit(self): field = self._make_one('test', 'STRING', mode='REQUIRED', description='Testing') - self.assertEqual(field.name, 'test') - self.assertEqual(field.field_type, 'STRING') - self.assertEqual(field.mode, 'REQUIRED') - self.assertEqual(field.description, 'Testing') - self.assertIsNone(field.fields) - - def test_ctor_subfields(self): + self.assertEqual(field._name, 'test') + self.assertEqual(field._field_type, 'STRING') + self.assertEqual(field._mode, 'REQUIRED') + self.assertEqual(field._description, 'Testing') + self.assertEqual(field._fields, ()) + + def test_constructor_subfields(self): + sub_field1 = self._make_one('area_code', 'STRING') + sub_field2 = self._make_one('local_number', 'STRING') field = self._make_one( - 'phone_number', 'RECORD', - fields=[self._make_one('area_code', 'STRING'), - self._make_one('local_number', 'STRING')]) - self.assertEqual(field.name, 'phone_number') - self.assertEqual(field.field_type, 'RECORD') - self.assertEqual(field.mode, 'NULLABLE') - self.assertIsNone(field.description) - self.assertEqual(len(field.fields), 2) - self.assertEqual(field.fields[0].name, 'area_code') - self.assertEqual(field.fields[0].field_type, 'STRING') - self.assertEqual(field.fields[0].mode, 'NULLABLE') - self.assertIsNone(field.fields[0].description) - self.assertIsNone(field.fields[0].fields) - self.assertEqual(field.fields[1].name, 'local_number') - self.assertEqual(field.fields[1].field_type, 'STRING') - self.assertEqual(field.fields[1].mode, 'NULLABLE') - self.assertIsNone(field.fields[1].description) - self.assertIsNone(field.fields[1].fields) + 'phone_number', + 'RECORD', + fields=[sub_field1, sub_field2], + ) + self.assertEqual(field._name, 'phone_number') + self.assertEqual(field._field_type, 'RECORD') + self.assertEqual(field._mode, 'NULLABLE') + self.assertIsNone(field._description) + self.assertEqual(len(field._fields), 2) + self.assertIs(field._fields[0], sub_field1) + self.assertIs(field._fields[1], sub_field2) + + def test_name_property(self): + name = 'lemon-ness' + schema_field = self._make_one(name, 'INTEGER') + self.assertIs(schema_field.name, name) + + def test_field_type_property(self): + field_type = 'BOOLEAN' + schema_field = self._make_one('whether', field_type) + self.assertIs(schema_field.field_type, field_type) + + def test_mode_property(self): + mode = 'REPEATED' + schema_field = self._make_one('again', 'FLOAT', mode=mode) + self.assertIs(schema_field.mode, mode) + + def test_is_nullable(self): + mode = 'NULLABLE' + schema_field = self._make_one('test', 'FLOAT', mode=mode) + self.assertTrue(schema_field.is_nullable) + + def test_is_not_nullable(self): + mode = 'REPEATED' + schema_field = self._make_one('test', 'FLOAT', mode=mode) + self.assertFalse(schema_field.is_nullable) + + def test_description_property(self): + description = 'It holds some data.' + schema_field = self._make_one( + 'do', 'TIMESTAMP', description=description) + self.assertIs(schema_field.description, description) + + def test_fields_property(self): + sub_field1 = self._make_one('one', 'STRING') + sub_field2 = self._make_one('fish', 'INTEGER') + fields = (sub_field1, sub_field2) + schema_field = self._make_one('boat', 'RECORD', fields=fields) + self.assertIs(schema_field.fields, fields) + + def test___eq___wrong_type(self): + field = self._make_one('test', 'STRING') + other = object() + self.assertNotEqual(field, other) + self.assertIs(field.__eq__(other), NotImplemented) def test___eq___name_mismatch(self): field = self._make_one('test', 'STRING') @@ -111,3 +150,46 @@ def test___eq___hit_w_fields(self): field = self._make_one('test', 'RECORD', fields=[sub1, sub2]) other = self._make_one('test', 'RECORD', fields=[sub1, sub2]) self.assertEqual(field, other) + + def test___ne___wrong_type(self): + field = self._make_one('toast', 'INTEGER') + other = object() + self.assertNotEqual(field, other) + self.assertIs(field.__ne__(other), NotImplemented) + + def test___ne___same_value(self): + field1 = self._make_one('test', 'TIMESTAMP', mode='REPEATED') + field2 = self._make_one('test', 'TIMESTAMP', mode='REPEATED') + # unittest ``assertEqual`` uses ``==`` not ``!=``. + comparison_val = (field1 != field2) + self.assertFalse(comparison_val) + + def test___ne___different_values(self): + field1 = self._make_one( + 'test1', 'FLOAT', mode='REPEATED', description='Not same') + field2 = self._make_one( + 'test2', 'FLOAT', mode='NULLABLE', description='Knot saym') + self.assertNotEqual(field1, field2) + + def test___hash__set_equality(self): + sub1 = self._make_one('sub1', 'STRING') + sub2 = self._make_one('sub2', 'STRING') + field1 = self._make_one('test', 'RECORD', fields=[sub1]) + field2 = self._make_one('test', 'RECORD', fields=[sub2]) + set_one = {field1, field2} + set_two = {field1, field2} + self.assertEqual(set_one, set_two) + + def test___hash__not_equals(self): + sub1 = self._make_one('sub1', 'STRING') + sub2 = self._make_one('sub2', 'STRING') + field1 = self._make_one('test', 'RECORD', fields=[sub1]) + field2 = self._make_one('test', 'RECORD', fields=[sub2]) + set_one = {field1} + set_two = {field2} + self.assertNotEqual(set_one, set_two) + + def test___repr__(self): + field1 = self._make_one('field1', 'STRING') + expected = "SchemaField('field1', 'string', 'NULLABLE', None, ())" + self.assertEqual(repr(field1), expected) diff --git a/bigquery/tests/unit/test_table.py b/bigquery/tests/unit/test_table.py index c940706c6b86..502c0495f9c9 100644 --- a/bigquery/tests/unit/test_table.py +++ b/bigquery/tests/unit/test_table.py @@ -12,8 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. +import email +import io +import json import unittest +import mock +from six.moves import http_client +import pytest + class _SchemaBase(object): @@ -31,7 +38,8 @@ def _verifySchema(self, schema, resource): class TestTable(unittest.TestCase, _SchemaBase): - PROJECT = 'project' + + PROJECT = 'prahj-ekt' DS_NAME = 'dataset-name' TABLE_NAME = 'table-name' @@ -124,8 +132,12 @@ def _verifyResourceProperties(self, table, resource): if 'view' in resource: self.assertEqual(table.view_query, resource['view']['query']) + self.assertEqual( + table.view_use_legacy_sql, + resource['view'].get('useLegacySql')) else: self.assertIsNone(table.view_query) + self.assertIsNone(table.view_use_legacy_sql) if 'schema' in resource: self._verifySchema(table.schema, resource) @@ -160,6 +172,7 @@ def test_ctor(self): self.assertIsNone(table.friendly_name) self.assertIsNone(table.location) self.assertIsNone(table.view_query) + self.assertIsNone(table.view_use_legacy_sql) def test_ctor_w_schema(self): from google.cloud.bigquery.table import SchemaField @@ -358,6 +371,22 @@ def test_view_query_deleter(self): del table.view_query self.assertIsNone(table.view_query) + def test_view_use_legacy_sql_setter_bad_value(self): + client = _Client(self.PROJECT) + dataset = _Dataset(client) + table = self._make_one(self.TABLE_NAME, dataset) + with self.assertRaises(ValueError): + table.view_use_legacy_sql = 12345 + + def test_view_use_legacy_sql_setter(self): + client = _Client(self.PROJECT) + dataset = _Dataset(client) + table = self._make_one(self.TABLE_NAME, dataset) + table.view_use_legacy_sql = False + table.view_query = 'select * from foo' + self.assertEqual(table.view_use_legacy_sql, False) + self.assertEqual(table.view_query, 'select * from foo') + def test_from_api_repr_missing_identity(self): self._setUpConstants() client = _Client(self.PROJECT) @@ -403,7 +432,7 @@ def test_create_new_day_partitioned_table(self): dataset = _Dataset(client) table = self._make_one(self.TABLE_NAME, dataset) table.partitioning_type = 'DAY' - table.create() + table.create() self.assertEqual(len(conn._requested), 1) req = conn._requested[0] @@ -978,7 +1007,7 @@ def test_update_w_alternate_client(self): self.EXP_TIME = datetime.datetime(2015, 8, 1, 23, 59, 59, tzinfo=UTC) RESOURCE['expirationTime'] = _millis(self.EXP_TIME) - RESOURCE['view'] = {'query': QUERY} + RESOURCE['view'] = {'query': QUERY, 'useLegacySql': True} RESOURCE['type'] = 'VIEW' conn1 = _Connection() client1 = _Client(project=self.PROJECT, connection=conn1) @@ -990,6 +1019,7 @@ def test_update_w_alternate_client(self): table.location = LOCATION table.expires = self.EXP_TIME table.view_query = QUERY + table.view_use_legacy_sql = True table.update(client=client2) @@ -1005,7 +1035,7 @@ def test_update_w_alternate_client(self): 'tableId': self.TABLE_NAME}, 'expirationTime': _millis(self.EXP_TIME), 'location': 'EU', - 'view': {'query': QUERY}, + 'view': {'query': QUERY, 'useLegacySql': True}, } self.assertEqual(req['data'], SENT) self._verifyResourceProperties(table, RESOURCE) @@ -1049,12 +1079,6 @@ def test_fetch_data_wo_schema(self): client = _Client(project=self.PROJECT) dataset = _Dataset(client) table = self._make_one(self.TABLE_NAME, dataset=dataset) - ROWS = [ - ('Phred Phlyntstone', 32), - ('Bharney Rhubble', 33), - ('Wylma Phlyntstone', 29), - ('Bhettye Rhubble', 27), - ] with self.assertRaises(ValueError) as exc: table.fetch_data() @@ -1537,296 +1561,476 @@ def _row_data(row): self.assertEqual(req['path'], '/%s' % PATH) self.assertEqual(req['data'], SENT) - def test_upload_from_file_text_mode_file_failure(self): + @mock.patch('google.auth.transport.requests.AuthorizedSession') + def test__make_transport(self, session_factory): + client = mock.Mock(spec=[u'_credentials']) + table = self._make_one(self.TABLE_NAME, None) + transport = table._make_transport(client) - class TextModeFile(object): - mode = 'r' + self.assertIs(transport, session_factory.return_value) + session_factory.assert_called_once_with(client._credentials) - conn = _Connection() - client = _Client(project=self.PROJECT, connection=conn) + @staticmethod + def _mock_requests_response(status_code, headers, content=b''): + return mock.Mock( + content=content, headers=headers, status_code=status_code, + spec=['content', 'headers', 'status_code']) + + def _mock_transport(self, status_code, headers, content=b''): + fake_transport = mock.Mock(spec=['request']) + fake_response = self._mock_requests_response( + status_code, headers, content=content) + fake_transport.request.return_value = fake_response + return fake_transport + + def _initiate_resumable_upload_helper(self, num_retries=None): + from google.resumable_media.requests import ResumableUpload + from google.cloud.bigquery.table import _DEFAULT_CHUNKSIZE + from google.cloud.bigquery.table import _GENERIC_CONTENT_TYPE + from google.cloud.bigquery.table import _get_upload_headers + from google.cloud.bigquery.table import _get_upload_metadata + + connection = _Connection() + client = _Client(self.PROJECT, connection=connection) dataset = _Dataset(client) - file_obj = TextModeFile() - table = self._make_one(self.TABLE_NAME, dataset=dataset) - with self.assertRaises(ValueError): - table.upload_from_file(file_obj, 'CSV', size=1234) + table = self._make_one(self.TABLE_NAME, dataset) - def test_upload_from_file_binary_mode_no_failure(self): - self._upload_from_file_helper(input_file_mode='r+b') + # Create mocks to be checked for doing transport. + resumable_url = 'http://test.invalid?upload_id=hey-you' + response_headers = {'location': resumable_url} + fake_transport = self._mock_transport( + http_client.OK, response_headers) + table._make_transport = mock.Mock( + return_value=fake_transport, spec=[]) + + # Create some mock arguments and call the method under test. + data = b'goodbye gudbi gootbee' + stream = io.BytesIO(data) + metadata = _get_upload_metadata( + 'CSV', table._schema, table._dataset, table.name) + upload, transport = table._initiate_resumable_upload( + client, stream, metadata, num_retries) + + # Check the returned values. + self.assertIsInstance(upload, ResumableUpload) + upload_url = ( + 'https://www.googleapis.com/upload/bigquery/v2/projects/' + + self.PROJECT + + '/jobs?uploadType=resumable') + self.assertEqual(upload.upload_url, upload_url) + expected_headers = _get_upload_headers(connection.USER_AGENT) + self.assertEqual(upload._headers, expected_headers) + self.assertFalse(upload.finished) + self.assertEqual(upload._chunk_size, _DEFAULT_CHUNKSIZE) + self.assertIs(upload._stream, stream) + self.assertIsNone(upload._total_bytes) + self.assertEqual(upload._content_type, _GENERIC_CONTENT_TYPE) + self.assertEqual(upload.resumable_url, resumable_url) + + retry_strategy = upload._retry_strategy + self.assertEqual(retry_strategy.max_sleep, 64.0) + if num_retries is None: + self.assertEqual(retry_strategy.max_cumulative_retry, 600.0) + self.assertIsNone(retry_strategy.max_retries) + else: + self.assertIsNone(retry_strategy.max_cumulative_retry) + self.assertEqual(retry_strategy.max_retries, num_retries) + self.assertIs(transport, fake_transport) + # Make sure we never read from the stream. + self.assertEqual(stream.tell(), 0) + + # Check the mocks. + table._make_transport.assert_called_once_with(client) + request_headers = expected_headers.copy() + request_headers['x-upload-content-type'] = _GENERIC_CONTENT_TYPE + fake_transport.request.assert_called_once_with( + 'POST', + upload_url, + data=json.dumps(metadata).encode('utf-8'), + headers=request_headers, + ) - def test_upload_from_file_size_failure(self): - conn = _Connection() - client = _Client(project=self.PROJECT, connection=conn) - dataset = _Dataset(client) - file_obj = object() - table = self._make_one(self.TABLE_NAME, dataset=dataset) - with self.assertRaises(ValueError): - table.upload_from_file(file_obj, 'CSV', size=None) + def test__initiate_resumable_upload(self): + self._initiate_resumable_upload_helper() - def test_upload_from_file_multipart_w_400(self): - import csv - import datetime - from six.moves.http_client import BAD_REQUEST - from google.cloud._testing import _NamedTemporaryFile - from google.cloud._helpers import UTC - from google.cloud.exceptions import BadRequest + def test__initiate_resumable_upload_with_retry(self): + self._initiate_resumable_upload_helper(num_retries=11) - WHEN_TS = 1437767599.006 - WHEN = datetime.datetime.utcfromtimestamp(WHEN_TS).replace( - tzinfo=UTC) - response = {'status': BAD_REQUEST} - conn = _Connection( - (response, b'{}'), - ) - client = _Client(project=self.PROJECT, connection=conn) + def _do_multipart_upload_success_helper( + self, get_boundary, num_retries=None): + from google.cloud.bigquery.table import _get_upload_headers + from google.cloud.bigquery.table import _get_upload_metadata + + connection = _Connection() + client = _Client(self.PROJECT, connection=connection) dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset=dataset) + table = self._make_one(self.TABLE_NAME, dataset) - with _NamedTemporaryFile() as temp: - with open(temp.name, 'w') as file_obj: - writer = csv.writer(file_obj) - writer.writerow(('full_name', 'age', 'joined')) - writer.writerow(('Phred Phlyntstone', 32, WHEN)) + # Create mocks to be checked for doing transport. + fake_transport = self._mock_transport(http_client.OK, {}) + table._make_transport = mock.Mock(return_value=fake_transport, spec=[]) + + # Create some mock arguments. + data = b'Bzzzz-zap \x00\x01\xf4' + stream = io.BytesIO(data) + metadata = _get_upload_metadata( + 'CSV', table._schema, table._dataset, table.name) + size = len(data) + response = table._do_multipart_upload( + client, stream, metadata, size, num_retries) + + # Check the mocks and the returned value. + self.assertIs(response, fake_transport.request.return_value) + self.assertEqual(stream.tell(), size) + table._make_transport.assert_called_once_with(client) + get_boundary.assert_called_once_with() + + upload_url = ( + 'https://www.googleapis.com/upload/bigquery/v2/projects/' + + self.PROJECT + + '/jobs?uploadType=multipart') + payload = ( + b'--==0==\r\n' + + b'content-type: application/json; charset=UTF-8\r\n\r\n' + + json.dumps(metadata).encode('utf-8') + b'\r\n' + + b'--==0==\r\n' + + b'content-type: */*\r\n\r\n' + + data + b'\r\n' + + b'--==0==--') + headers = _get_upload_headers(connection.USER_AGENT) + headers['content-type'] = b'multipart/related; boundary="==0=="' + fake_transport.request.assert_called_once_with( + 'POST', + upload_url, + data=payload, + headers=headers, + ) - with open(temp.name, 'rb') as file_obj: - with self.assertRaises(BadRequest): - table.upload_from_file( - file_obj, 'CSV', rewind=True) + @mock.patch(u'google.resumable_media._upload.get_boundary', + return_value=b'==0==') + def test__do_multipart_upload(self, get_boundary): + self._do_multipart_upload_success_helper(get_boundary) - def _upload_from_file_helper(self, **kw): - import csv - import datetime - from six.moves.http_client import OK - from google.cloud._helpers import UTC - from google.cloud._testing import _NamedTemporaryFile - from google.cloud.bigquery.table import SchemaField + @mock.patch(u'google.resumable_media._upload.get_boundary', + return_value=b'==0==') + def test__do_multipart_upload_with_retry(self, get_boundary): + self._do_multipart_upload_success_helper(get_boundary, num_retries=8) - WHEN_TS = 1437767599.006 - WHEN = datetime.datetime.utcfromtimestamp(WHEN_TS).replace( - tzinfo=UTC) - PATH = 'projects/%s/jobs' % (self.PROJECT,) - response = {'status': OK} - conn = _Connection( - (response, b'{}'), - ) - client = _Client(project=self.PROJECT, connection=conn) - expected_job = object() - if 'client' in kw: - kw['client']._job = expected_job - else: - client._job = expected_job - input_file_mode = kw.pop('input_file_mode', 'rb') - dataset = _Dataset(client) - full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') - age = SchemaField('age', 'INTEGER', mode='REQUIRED') - joined = SchemaField('joined', 'TIMESTAMP', mode='NULLABLE') - table = self._make_one(self.TABLE_NAME, dataset=dataset, - schema=[full_name, age, joined]) - ROWS = [ - ('Phred Phlyntstone', 32, WHEN), - ('Bharney Rhubble', 33, WHEN + datetime.timedelta(seconds=1)), - ('Wylma Phlyntstone', 29, WHEN + datetime.timedelta(seconds=2)), - ('Bhettye Rhubble', 27, None), - ] - with _NamedTemporaryFile() as temp: - with open(temp.name, 'w') as file_obj: - writer = csv.writer(file_obj) - writer.writerow(('full_name', 'age', 'joined')) - writer.writerows(ROWS) - - with open(temp.name, input_file_mode) as file_obj: - BODY = file_obj.read() - explicit_size = kw.pop('_explicit_size', False) - if explicit_size: - kw['size'] = len(BODY) - job = table.upload_from_file( - file_obj, 'CSV', rewind=True, **kw) - - self.assertIs(job, expected_job) - return conn.http._requested, PATH, BODY - - def test_upload_from_file_w_bound_client_multipart(self): - import json - from six.moves.urllib.parse import parse_qsl - from six.moves.urllib.parse import urlsplit - from google.cloud._helpers import _to_bytes - - requested, PATH, BODY = self._upload_from_file_helper() - parse_chunk = _email_chunk_parser() - - self.assertEqual(len(requested), 1) - req = requested[0] - self.assertEqual(req['method'], 'POST') - uri = req['uri'] - scheme, netloc, path, qs, _ = urlsplit(uri) - self.assertEqual(scheme, 'http') - self.assertEqual(netloc, 'example.com') - self.assertEqual(path, '/%s' % PATH) - self.assertEqual(dict(parse_qsl(qs)), - {'uploadType': 'multipart'}) - - ctype, boundary = [x.strip() - for x in req['headers']['content-type'].split(';')] - self.assertEqual(ctype, 'multipart/related') - self.assertTrue(boundary.startswith('boundary="==')) - self.assertTrue(boundary.endswith('=="')) - - divider = b'--' + _to_bytes(boundary[len('boundary="'):-1]) - chunks = req['body'].split(divider)[1:-1] # discard prolog / epilog - self.assertEqual(len(chunks), 2) - - text_msg = parse_chunk(chunks[0].strip()) - self.assertEqual(dict(text_msg._headers), - {'Content-Type': 'application/json', - 'MIME-Version': '1.0'}) - metadata = json.loads(text_msg._payload) - load_config = metadata['configuration']['load'] - DESTINATION_TABLE = { - 'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, - 'tableId': self.TABLE_NAME, - } - self.assertEqual(load_config['destinationTable'], DESTINATION_TABLE) - self.assertEqual(load_config['sourceFormat'], 'CSV') - - app_msg = parse_chunk(chunks[1].strip()) - self.assertEqual(dict(app_msg._headers), - {'Content-Type': 'application/octet-stream', - 'Content-Transfer-Encoding': 'binary', - 'MIME-Version': '1.0'}) - body = BODY.decode('ascii').rstrip() - body_lines = [line.strip() for line in body.splitlines()] - payload_lines = app_msg._payload.rstrip().splitlines() - self.assertEqual(payload_lines, body_lines) - - def test_upload_from_file_resumable_with_400(self): - import csv - import datetime - import mock - from six.moves.http_client import BAD_REQUEST - from google.cloud.exceptions import BadRequest - from google.cloud._helpers import UTC - from google.cloud._testing import _NamedTemporaryFile +class TestTableUpload(object): + # NOTE: This is a "partner" to `TestTable` meant to test some of the + # "upload" portions of `Table`. It also uses `pytest`-style tests + # rather than `unittest`-style. - WHEN_TS = 1437767599.006 - WHEN = datetime.datetime.utcfromtimestamp(WHEN_TS).replace( - tzinfo=UTC) - initial_response = {'status': BAD_REQUEST} - conn = _Connection( - (initial_response, b'{}'), - ) - client = _Client(project=self.PROJECT, connection=conn) + @staticmethod + def _make_table(): + from google.cloud.bigquery import _http + from google.cloud.bigquery import client + from google.cloud.bigquery import dataset + from google.cloud.bigquery import table - class _UploadConfig(object): - accept = ['*/*'] - max_size = None - resumable_multipart = True - resumable_path = u'/upload/bigquery/v2/projects/{project}/jobs' - simple_multipart = True - simple_path = u'' # force resumable - dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset=dataset) + connection = mock.create_autospec(_http.Connection, instance=True) + client = mock.create_autospec(client.Client, instance=True) + client._connection = connection + client._credentials = mock.sentinel.credentials + client.project = 'project_id' - with mock.patch('google.cloud.bigquery.table._UploadConfig', - new=_UploadConfig): - with _NamedTemporaryFile() as temp: - with open(temp.name, 'w') as file_obj: - writer = csv.writer(file_obj) - writer.writerow(('full_name', 'age', 'joined')) - writer.writerow(('Phred Phlyntstone', 32, WHEN)) - - with open(temp.name, 'rb') as file_obj: - with self.assertRaises(BadRequest): - table.upload_from_file( - file_obj, 'CSV', rewind=True) - - # pylint: disable=too-many-statements - def test_upload_from_file_w_explicit_client_resumable(self): - import json - import mock - from six.moves.http_client import OK - from six.moves.urllib.parse import parse_qsl - from six.moves.urllib.parse import urlsplit - - UPLOAD_PATH = 'https://example.com/upload/test' - initial_response = {'status': OK, 'location': UPLOAD_PATH} - upload_response = {'status': OK} - conn = _Connection( - (initial_response, b'{}'), - (upload_response, b'{}'), - ) - client = _Client(project=self.PROJECT, connection=conn) + dataset = dataset.Dataset('test_dataset', client) + table = table.Table('test_table', dataset) - class _UploadConfig(object): - accept = ['*/*'] - max_size = None - resumable_multipart = True - resumable_path = u'/upload/bigquery/v2/projects/{project}/jobs' - simple_multipart = True - simple_path = u'' # force resumable - - with mock.patch('google.cloud.bigquery.table._UploadConfig', - new=_UploadConfig): - orig_requested, PATH, BODY = self._upload_from_file_helper( - allow_jagged_rows=False, - allow_quoted_newlines=False, - create_disposition='CREATE_IF_NEEDED', - encoding='utf8', - field_delimiter=',', - ignore_unknown_values=False, - max_bad_records=0, - quote_character='"', - skip_leading_rows=1, - write_disposition='WRITE_APPEND', - client=client, - _explicit_size=True) - - self.assertEqual(len(orig_requested), 0) - - requested = conn.http._requested - self.assertEqual(len(requested), 2) - req = requested[0] - self.assertEqual(req['method'], 'POST') - uri = req['uri'] - scheme, netloc, path, qs, _ = urlsplit(uri) - self.assertEqual(scheme, 'http') - self.assertEqual(netloc, 'example.com') - self.assertEqual(path, '/%s' % PATH) - self.assertEqual(dict(parse_qsl(qs)), - {'uploadType': 'resumable'}) - - self.assertEqual(req['headers']['content-type'], 'application/json') - metadata = json.loads(req['body']) - load_config = metadata['configuration']['load'] - DESTINATION_TABLE = { - 'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, - 'tableId': self.TABLE_NAME, + return table + + @staticmethod + def _make_response(status_code, content='', headers={}): + """Make a mock HTTP response.""" + import requests + response = mock.create_autospec(requests.Response, instance=True) + response.content = content.encode('utf-8') + response.headers = headers + response.status_code = status_code + return response + + @classmethod + def _make_do_upload_patch(cls, table, method, side_effect=None): + """Patches the low-level upload helpers.""" + if side_effect is None: + side_effect = [cls._make_response( + http_client.OK, + json.dumps({}), + {'Content-Type': 'application/json'})] + return mock.patch.object( + table, method, side_effect=side_effect, autospec=True) + + EXPECTED_CONFIGURATION = { + 'configuration': { + 'load': { + 'sourceFormat': 'CSV', + 'destinationTable': { + 'projectId': 'project_id', + 'datasetId': 'test_dataset', + 'tableId': 'test_table' + } + } } - self.assertEqual(load_config['destinationTable'], DESTINATION_TABLE) - self.assertEqual(load_config['sourceFormat'], 'CSV') - self.assertEqual(load_config['allowJaggedRows'], False) - self.assertEqual(load_config['allowQuotedNewlines'], False) - self.assertEqual(load_config['createDisposition'], 'CREATE_IF_NEEDED') - self.assertEqual(load_config['encoding'], 'utf8') - self.assertEqual(load_config['fieldDelimiter'], ',') - self.assertEqual(load_config['ignoreUnknownValues'], False) - self.assertEqual(load_config['maxBadRecords'], 0) - self.assertEqual(load_config['quote'], '"') - self.assertEqual(load_config['skipLeadingRows'], 1) - self.assertEqual(load_config['writeDisposition'], 'WRITE_APPEND') - - req = requested[1] - self.assertEqual(req['method'], 'PUT') - self.assertEqual(req['uri'], UPLOAD_PATH) - headers = req['headers'] - length = len(BODY) - self.assertEqual(headers['Content-Type'], 'application/octet-stream') - self.assertEqual(headers['Content-Range'], - 'bytes 0-%d/%d' % (length - 1, length)) - self.assertEqual(headers['content-length'], '%d' % (length,)) - self.assertEqual(req['body'], BODY) - # pylint: enable=too-many-statements + } + + @staticmethod + def _make_file_obj(): + return io.BytesIO(b'hello, is it me you\'re looking for?') + + # High-level tests + + def test_upload_from_file_resumable(self): + import google.cloud.bigquery.table + + table = self._make_table() + file_obj = self._make_file_obj() + + do_upload_patch = self._make_do_upload_patch( + table, '_do_resumable_upload') + with do_upload_patch as do_upload: + table.upload_from_file(file_obj, source_format='CSV') + + do_upload.assert_called_once_with( + table._dataset._client, + file_obj, + self.EXPECTED_CONFIGURATION, + google.cloud.bigquery.table._DEFAULT_NUM_RETRIES) + + def test_upload_file_resumable_metadata(self): + table = self._make_table() + file_obj = self._make_file_obj() + + config_args = { + 'source_format': 'CSV', + 'allow_jagged_rows': False, + 'allow_quoted_newlines': False, + 'create_disposition': 'CREATE_IF_NEEDED', + 'encoding': 'utf8', + 'field_delimiter': ',', + 'ignore_unknown_values': False, + 'max_bad_records': 0, + 'quote_character': '"', + 'skip_leading_rows': 1, + 'write_disposition': 'WRITE_APPEND', + 'job_name': 'oddjob' + } + + expected_config = { + 'configuration': { + 'load': { + 'sourceFormat': config_args['source_format'], + 'destinationTable': { + 'projectId': table._dataset._client.project, + 'datasetId': table.dataset_name, + 'tableId': table.name + }, + 'allowJaggedRows': config_args['allow_jagged_rows'], + 'allowQuotedNewlines': + config_args['allow_quoted_newlines'], + 'createDisposition': config_args['create_disposition'], + 'encoding': config_args['encoding'], + 'fieldDelimiter': config_args['field_delimiter'], + 'ignoreUnknownValues': + config_args['ignore_unknown_values'], + 'maxBadRecords': config_args['max_bad_records'], + 'quote': config_args['quote_character'], + 'skipLeadingRows': config_args['skip_leading_rows'], + 'writeDisposition': config_args['write_disposition'], + 'jobReference': {'jobId': config_args['job_name']} + } + } + } + + do_upload_patch = self._make_do_upload_patch( + table, '_do_resumable_upload') + with do_upload_patch as do_upload: + table.upload_from_file( + file_obj, **config_args) + + do_upload.assert_called_once_with( + table._dataset._client, + file_obj, + expected_config, + mock.ANY) + + def test_upload_from_file_multipart(self): + import google.cloud.bigquery.table + + table = self._make_table() + file_obj = self._make_file_obj() + file_obj_size = 10 + + do_upload_patch = self._make_do_upload_patch( + table, '_do_multipart_upload') + with do_upload_patch as do_upload: + table.upload_from_file( + file_obj, source_format='CSV', size=file_obj_size) + + do_upload.assert_called_once_with( + table._dataset._client, + file_obj, + self.EXPECTED_CONFIGURATION, + file_obj_size, + google.cloud.bigquery.table._DEFAULT_NUM_RETRIES) + + def test_upload_from_file_with_retries(self): + table = self._make_table() + file_obj = self._make_file_obj() + num_retries = 20 + + do_upload_patch = self._make_do_upload_patch( + table, '_do_resumable_upload') + with do_upload_patch as do_upload: + table.upload_from_file( + file_obj, source_format='CSV', num_retries=num_retries) + + do_upload.assert_called_once_with( + table._dataset._client, + file_obj, + self.EXPECTED_CONFIGURATION, + num_retries) + + def test_upload_from_file_with_rewind(self): + table = self._make_table() + file_obj = self._make_file_obj() + file_obj.seek(2) + + with self._make_do_upload_patch(table, '_do_resumable_upload'): + table.upload_from_file( + file_obj, source_format='CSV', rewind=True) + + assert file_obj.tell() == 0 + + def test_upload_from_file_failure(self): + from google.resumable_media import InvalidResponse + from google.cloud import exceptions + + table = self._make_table() + file_obj = self._make_file_obj() + + response = self._make_response( + content='Someone is already in this spot.', + status_code=http_client.CONFLICT) + + do_upload_patch = self._make_do_upload_patch( + table, '_do_resumable_upload', + side_effect=InvalidResponse(response)) + + with do_upload_patch, pytest.raises(exceptions.Conflict) as exc_info: + table.upload_from_file( + file_obj, source_format='CSV', rewind=True) + + assert exc_info.value.message == response.content.decode('utf-8') + assert exc_info.value.errors == [] + + def test_upload_from_file_bad_mode(self): + table = self._make_table() + file_obj = mock.Mock(spec=['mode']) + file_obj.mode = 'x' + + with pytest.raises(ValueError): + table.upload_from_file( + file_obj, source_format='CSV',) + + # Low-level tests + + @classmethod + def _make_resumable_upload_responses(cls, size): + """Make a series of responses for a successful resumable upload.""" + from google import resumable_media + + resumable_url = 'http://test.invalid?upload_id=and-then-there-was-1' + initial_response = cls._make_response( + http_client.OK, '', {'location': resumable_url}) + data_response = cls._make_response( + resumable_media.PERMANENT_REDIRECT, + '', {'range': 'bytes=0-{:d}'.format(size - 1)}) + final_response = cls._make_response( + http_client.OK, + json.dumps({'size': size}), + {'Content-Type': 'application/json'}) + return [initial_response, data_response, final_response] + + @staticmethod + def _make_transport_patch(table, responses=None): + """Patch a table's _make_transport method to return given responses.""" + import google.auth.transport.requests + + transport = mock.create_autospec( + google.auth.transport.requests.AuthorizedSession, instance=True) + transport.request.side_effect = responses + return mock.patch.object( + table, '_make_transport', return_value=transport, autospec=True) + + def test__do_resumable_upload(self): + table = self._make_table() + file_obj = self._make_file_obj() + file_obj_len = len(file_obj.getvalue()) + responses = self._make_resumable_upload_responses(file_obj_len) + + with self._make_transport_patch(table, responses) as transport: + result = table._do_resumable_upload( + table._dataset._client, + file_obj, + self.EXPECTED_CONFIGURATION, + None) + + content = result.content.decode('utf-8') + assert json.loads(content) == {'size': file_obj_len} + + # Verify that configuration data was passed in with the initial + # request. + transport.return_value.request.assert_any_call( + 'POST', + mock.ANY, + data=json.dumps(self.EXPECTED_CONFIGURATION).encode('utf-8'), + headers=mock.ANY) + + def test__do_multipart_upload(self): + table = self._make_table() + file_obj = self._make_file_obj() + file_obj_len = len(file_obj.getvalue()) + responses = [self._make_response(http_client.OK)] + + with self._make_transport_patch(table, responses) as transport: + table._do_multipart_upload( + table._dataset._client, + file_obj, + self.EXPECTED_CONFIGURATION, + file_obj_len, + None) + + # Verify that configuration data was passed in with the initial + # request. + request_args = transport.return_value.request.mock_calls[0][2] + request_data = request_args['data'].decode('utf-8') + request_headers = request_args['headers'] + + request_content = email.message_from_string( + 'Content-Type: {}\r\n{}'.format( + request_headers['content-type'].decode('utf-8'), + request_data)) + + # There should be two payloads: the configuration and the binary daya. + configuration_data = request_content.get_payload(0).get_payload() + binary_data = request_content.get_payload(1).get_payload() + + assert json.loads(configuration_data) == self.EXPECTED_CONFIGURATION + assert binary_data.encode('utf-8') == file_obj.getvalue() + + def test__do_multipart_upload_wrong_size(self): + table = self._make_table() + file_obj = self._make_file_obj() + file_obj_len = len(file_obj.getvalue()) + + with pytest.raises(ValueError): + table._do_multipart_upload( + table._dataset._client, + file_obj, + {}, + file_obj_len+1, + None) class Test_parse_schema_resource(unittest.TestCase, _SchemaBase): @@ -1942,6 +2146,70 @@ def test_w_subfields(self): 'mode': 'REQUIRED'}]}) +class Test__get_upload_metadata(unittest.TestCase): + + @staticmethod + def _call_fut(source_format, schema, dataset, name): + from google.cloud.bigquery.table import _get_upload_metadata + + return _get_upload_metadata(source_format, schema, dataset, name) + + def test_empty_schema(self): + source_format = 'AVRO' + dataset = mock.Mock(project='prediction', spec=['name', 'project']) + dataset.name = 'market' # mock.Mock() treats `name` specially. + table_name = 'chairs' + metadata = self._call_fut(source_format, [], dataset, table_name) + + expected = { + 'configuration': { + 'load': { + 'sourceFormat': source_format, + 'destinationTable': { + 'projectId': dataset.project, + 'datasetId': dataset.name, + 'tableId': table_name, + }, + }, + }, + } + self.assertEqual(metadata, expected) + + def test_with_schema(self): + from google.cloud.bigquery.table import SchemaField + + source_format = 'CSV' + full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') + dataset = mock.Mock(project='blind', spec=['name', 'project']) + dataset.name = 'movie' # mock.Mock() treats `name` specially. + table_name = 'teebull-neem' + metadata = self._call_fut( + source_format, [full_name], dataset, table_name) + + expected = { + 'configuration': { + 'load': { + 'sourceFormat': source_format, + 'destinationTable': { + 'projectId': dataset.project, + 'datasetId': dataset.name, + 'tableId': table_name, + }, + 'schema': { + 'fields': [ + { + 'name': full_name.name, + 'type': full_name.field_type, + 'mode': full_name.mode, + }, + ], + }, + }, + }, + } + self.assertEqual(metadata, expected) + + class _Client(object): _query_results = () @@ -1950,9 +2218,6 @@ def __init__(self, project='project', connection=None): self.project = project self._connection = connection - def job_from_resource(self, resource): # pylint: disable=unused-argument - return self._job - def run_sync_query(self, query): return _Query(query, self) @@ -1984,37 +2249,14 @@ def project(self): return self._client.project -class _Responder(object): - - def __init__(self, *responses): - self._responses = responses[:] - self._requested = [] - - def _respond(self, **kw): - self._requested.append(kw) - response, self._responses = self._responses[0], self._responses[1:] - return response - - -class _HTTP(_Responder): - - connections = {} # For google-apitools debugging. - - def request(self, uri, method, headers, body, **kw): - if hasattr(body, 'read'): - body = body.read() - return self._respond(uri=uri, method=method, headers=headers, - body=body, **kw) - - -class _Connection(_Responder): +class _Connection(object): API_BASE_URL = 'http://example.com' USER_AGENT = 'testing 1.2.3' def __init__(self, *responses): - super(_Connection, self).__init__(*responses) - self.http = _HTTP(*responses) + self._responses = responses[:] + self._requested = [] def api_request(self, **kw): from google.cloud.exceptions import NotFound @@ -2027,29 +2269,3 @@ def api_request(self, **kw): raise NotFound('miss') else: return response - - def build_api_url(self, path, query_params=None, - api_base_url=API_BASE_URL): - from six.moves.urllib.parse import urlencode - from six.moves.urllib.parse import urlsplit - from six.moves.urllib.parse import urlunsplit - - # Mimic the build_api_url interface. - qs = urlencode(query_params or {}) - scheme, netloc, _, _, _ = urlsplit(api_base_url) - return urlunsplit((scheme, netloc, path, qs, '')) - - -def _email_chunk_parser(): - import six - - if six.PY3: # pragma: NO COVER Python3 - from email.parser import BytesParser - - parser = BytesParser() - return parser.parsebytes - else: - from email.parser import Parser - - parser = Parser() - return parser.parsestr diff --git a/bigtable/MANIFEST.in b/bigtable/MANIFEST.in index 9f7100c9528a..fc77f8c82ff0 100644 --- a/bigtable/MANIFEST.in +++ b/bigtable/MANIFEST.in @@ -1,4 +1,4 @@ include README.rst LICENSE recursive-include google *.json *.proto -recursive-include unit_tests * +recursive-include tests * global-exclude *.pyc __pycache__ diff --git a/bigtable/README.rst b/bigtable/README.rst index 3b37f5ec6880..ebc202d8d87e 100644 --- a/bigtable/README.rst +++ b/bigtable/README.rst @@ -9,7 +9,7 @@ Python Client for Google Cloud Bigtable - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/bigtable-usage.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/bigtable/usage.html Quick Start ----------- @@ -26,7 +26,7 @@ possible. Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html .. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication Using the API @@ -42,6 +42,6 @@ See the ``google-cloud-python`` API Bigtable `Documentation`_ to learn how to manage your data in Bigtable tables. .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-bigtable.svg - :target: https://pypi.python.org/pypi/google-cloud-bigtable + :target: https://pypi.org/project/google-cloud-bigtable/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-bigtable.svg - :target: https://pypi.python.org/pypi/google-cloud-bigtable + :target: https://pypi.org/project/google-cloud-bigtable/ diff --git a/bigtable/google/cloud/bigtable/client.py b/bigtable/google/cloud/bigtable/client.py index 86ee7173c917..de6d0768266f 100644 --- a/bigtable/google/cloud/bigtable/client.py +++ b/bigtable/google/cloud/bigtable/client.py @@ -31,16 +31,13 @@ import os -import google.auth.credentials from google.gax.utils import metrics from google.longrunning import operations_grpc from google.cloud._helpers import make_insecure_stub from google.cloud._helpers import make_secure_stub from google.cloud._http import DEFAULT_USER_AGENT -from google.cloud.client import _ClientFactoryMixin -from google.cloud.client import _ClientProjectMixin -from google.cloud.credentials import get_credentials +from google.cloud.client import ClientWithProject from google.cloud.environment_vars import BIGTABLE_EMULATOR from google.cloud.bigtable import __version__ @@ -166,13 +163,13 @@ def _make_table_stub(client): client.emulator_host) -class Client(_ClientFactoryMixin, _ClientProjectMixin): +class Client(ClientWithProject): """Client for interacting with Google Cloud Bigtable API. .. note:: Since the Cloud Bigtable API requires the gRPC transport, no - ``http`` argument is accepted by this class. + ``_http`` argument is accepted by this class. :type project: :class:`str` or :func:`unicode ` :param project: (Optional) The ID of the project which owns the @@ -209,31 +206,21 @@ class Client(_ClientFactoryMixin, _ClientProjectMixin): def __init__(self, project=None, credentials=None, read_only=False, admin=False, user_agent=DEFAULT_USER_AGENT): - _ClientProjectMixin.__init__(self, project=project) - if credentials is None: - credentials = get_credentials() - if read_only and admin: raise ValueError('A read-only client cannot also perform' 'administrative actions.') - scopes = [] - if read_only: - scopes.append(READ_ONLY_SCOPE) - else: - scopes.append(DATA_SCOPE) - + # NOTE: We set the scopes **before** calling the parent constructor. + # It **may** use those scopes in ``with_scopes_if_required``. self._read_only = bool(read_only) - - if admin: - scopes.append(ADMIN_SCOPE) - self._admin = bool(admin) + self.SCOPE = self._get_scopes() - credentials = google.auth.credentials.with_scopes_if_required( - credentials, scopes) - - self._credentials = credentials + # NOTE: This API has no use for the _http argument, but sending it + # will have no impact since the _http() @property only lazily + # creates a working HTTP object. + super(Client, self).__init__( + project=project, credentials=credentials, _http=None) self.user_agent = user_agent self.emulator_host = os.getenv(BIGTABLE_EMULATOR) @@ -244,6 +231,22 @@ def __init__(self, project=None, credentials=None, self._operations_stub_internal = _make_operations_stub(self) self._table_stub_internal = _make_table_stub(self) + def _get_scopes(self): + """Get the scopes corresponding to admin / read-only state. + + Returns: + Tuple[str, ...]: The tuple of scopes. + """ + if self._read_only: + scopes = (READ_ONLY_SCOPE,) + else: + scopes = (DATA_SCOPE,) + + if self._admin: + scopes += (ADMIN_SCOPE,) + + return scopes + def copy(self): """Make a copy of this client. diff --git a/bigtable/google/cloud/bigtable/cluster.py b/bigtable/google/cloud/bigtable/cluster.py index 80b9068958db..8d15547efae3 100644 --- a/bigtable/google/cloud/bigtable/cluster.py +++ b/bigtable/google/cloud/bigtable/cluster.py @@ -21,9 +21,7 @@ instance_pb2 as data_v2_pb2) from google.cloud.bigtable._generated import ( bigtable_instance_admin_pb2 as messages_v2_pb2) -from google.cloud.operation import Operation -from google.cloud.operation import register_type - +from google.cloud.future import operation _CLUSTER_NAME_RE = re.compile(r'^projects/(?P[^/]+)/' r'instances/(?P[^/]+)/clusters/' @@ -33,9 +31,6 @@ """Default number of nodes to use when creating a cluster.""" -register_type(messages_v2_pb2.UpdateClusterMetadata) - - def _prepare_create_request(cluster): """Creates a protobuf request for a CreateCluster request. @@ -49,6 +44,7 @@ def _prepare_create_request(cluster): parent=cluster._instance.name, cluster_id=cluster.cluster_id, cluster=data_v2_pb2.Cluster( + location=cluster.location, serve_nodes=cluster.serve_nodes, ), ) @@ -207,15 +203,18 @@ def create(self): :returns: The long-running operation corresponding to the create operation. """ - request_pb = _prepare_create_request(self) - # We expect a `google.longrunning.operations_pb2.Operation`. client = self._instance._client + + # We expect a `google.longrunning.operations_pb2.Operation`. + request_pb = _prepare_create_request(self) operation_pb = client._instance_stub.CreateCluster(request_pb) - operation = Operation.from_pb(operation_pb, client) - operation.target = self - operation.caller_metadata['request_type'] = 'CreateCluster' - return operation + operation_future = operation.from_grpc( + operation_pb, + client._operations_stub, + data_v2_pb2.Cluster, + metadata_type=messages_v2_pb2.UpdateClusterMetadata) + return operation_future def update(self): """Update this cluster. @@ -235,18 +234,21 @@ def update(self): :returns: The long-running operation corresponding to the update operation. """ + client = self._instance._client + + # We expect a `google.longrunning.operations_pb2.Operation`. request_pb = data_v2_pb2.Cluster( name=self.name, serve_nodes=self.serve_nodes, ) - # We expect a `google.longrunning.operations_pb2.Operation`. - client = self._instance._client operation_pb = client._instance_stub.UpdateCluster(request_pb) - operation = Operation.from_pb(operation_pb, client) - operation.target = self - operation.caller_metadata['request_type'] = 'UpdateCluster' - return operation + operation_future = operation.from_grpc( + operation_pb, + client._operations_stub, + data_v2_pb2.Cluster, + metadata_type=messages_v2_pb2.UpdateClusterMetadata) + return operation_future def delete(self): """Delete this cluster. diff --git a/bigtable/google/cloud/bigtable/instance.py b/bigtable/google/cloud/bigtable/instance.py index 1de3cbcea814..958f16602953 100644 --- a/bigtable/google/cloud/bigtable/instance.py +++ b/bigtable/google/cloud/bigtable/instance.py @@ -26,8 +26,7 @@ from google.cloud.bigtable.cluster import Cluster from google.cloud.bigtable.cluster import DEFAULT_SERVE_NODES from google.cloud.bigtable.table import Table -from google.cloud.operation import Operation -from google.cloud.operation import register_type +from google.cloud.future import operation _EXISTING_INSTANCE_LOCATION_ID = 'see-existing-cluster' @@ -35,10 +34,6 @@ r'instances/(?P[a-z][-a-z0-9]*)$') -register_type(messages_v2_pb2.CreateInstanceMetadata) -register_type(data_v2_pb2.Instance) - - def _prepare_create_request(instance): """Creates a protobuf request for a CreateInstance request. @@ -232,10 +227,12 @@ def create(self): # We expect a `google.longrunning.operations_pb2.Operation`. operation_pb = self._client._instance_stub.CreateInstance(request_pb) - operation = Operation.from_pb(operation_pb, self._client) - operation.target = self - operation.caller_metadata['request_type'] = 'CreateInstance' - return operation + operation_future = operation.from_grpc( + operation_pb, + self._client._operations_stub, + data_v2_pb2.Instance, + metadata_type=messages_v2_pb2.CreateInstanceMetadata) + return operation_future def update(self): """Update this instance. diff --git a/bigtable/google/cloud/bigtable/table.py b/bigtable/google/cloud/bigtable/table.py index 8dbf8c1ce6fb..40ef3a2ca2fb 100644 --- a/bigtable/google/cloud/bigtable/table.py +++ b/bigtable/google/cloud/bigtable/table.py @@ -181,7 +181,7 @@ def create(self, initial_split_keys=None, column_families=()): table_pb = table_v2_pb2.Table() for col_fam in column_families: curr_id = col_fam.column_family_id - table_pb.column_families[curr_id].MergeFrom(col_fam.to_pb()) + table_pb.column_families[curr_id].CopyFrom(col_fam.to_pb()) request_pb = table_admin_messages_v2_pb2.CreateTableRequest( initial_splits=initial_split_keys or [], diff --git a/bigtable/nox.py b/bigtable/nox.py index bc60a19c8217..83b56e49d2df 100644 --- a/bigtable/nox.py +++ b/bigtable/nox.py @@ -29,15 +29,25 @@ def unit_tests(session, python_version): # Run unit tests against all supported versions of Python. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'unit-' + python_version + # Install all test dependencies, then install this package in-place. session.install('mock', 'pytest', 'pytest-cov', *LOCAL_DEPS) session.install('-e', '.') # Run py.test against the unit tests. - session.run('py.test', '--quiet', - '--cov=google.cloud.bigtable', '--cov=tests.unit', '--cov-append', - '--cov-config=.coveragerc', '--cov-report=', '--cov-fail-under=97', + session.run( + 'py.test', + '--quiet', + '--cov=google.cloud.bigtable', + '--cov=tests.unit', + '--cov-append', + '--cov-config=.coveragerc', + '--cov-report=', + '--cov-fail-under=97', 'tests/unit', + *session.posargs ) @@ -48,11 +58,14 @@ def system_tests(session, python_version): # Sanity check: Only run system tests if the environment variable is set. if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): - return + session.skip('Credentials must be set via environment variable.') # Run the system tests against latest Python 2 and Python 3 only. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'sys-' + python_version + # Install all test dependencies, then install this package into the # virutalenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) @@ -65,21 +78,32 @@ def system_tests(session, python_version): @nox.session def lint(session): - """Run flake8. + """Run linters. - Returns a failure if flake8 finds linting errors or sufficiently + Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ session.interpreter = 'python3.6' - session.install('flake8', *LOCAL_DEPS) + session.install('flake8', 'pylint', 'gcp-devrel-py-tools', *LOCAL_DEPS) session.install('.') session.run('flake8', 'google/cloud/bigtable') + session.run( + 'gcp-devrel-py-tools', 'run-pylint', + '--config', 'pylint.config.py', + '--library-filesets', 'google', + '--test-filesets', 'tests', + # Temporarily allow this to fail. + success_codes=range(0, 100)) @nox.session def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.interpreter = 'python3.6' + + # Set the virtualenv dirname. + session.virtualenv_dirname = 'setup' + session.install('docutils', 'Pygments') session.run( 'python', 'setup.py', 'check', '--restructuredtext', '--strict') diff --git a/bigtable/pylint.config.py b/bigtable/pylint.config.py new file mode 100644 index 000000000000..b618319b8b61 --- /dev/null +++ b/bigtable/pylint.config.py @@ -0,0 +1,25 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module is used to configure gcp-devrel-py-tools run-pylint.""" + +# Library configuration + +# library_additions = {} +# library_replacements = {} + +# Test configuration + +# test_additions = copy.deepcopy(library_additions) +# test_replacements = copy.deepcopy(library_replacements) diff --git a/bigtable/setup.py b/bigtable/setup.py index 212feda21758..8d5bad6a1ffd 100644 --- a/bigtable/setup.py +++ b/bigtable/setup.py @@ -27,7 +27,7 @@ # consolidate. SETUP_BASE = { 'author': 'Google Cloud Platform', - 'author_email': 'jjg+google-cloud-python@google.com', + 'author_email': 'googleapis-publisher@google.com', 'scripts': [], 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', 'license': 'Apache 2.0', @@ -51,13 +51,13 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.24.0, < 0.25dev', + 'google-cloud-core >= 0.25.0, < 0.26dev', 'google-gax>=0.15.7, <0.16dev', ] setup( name='google-cloud-bigtable', - version='0.24.0', + version='0.25.0', description='Python Client for Google Cloud Bigtable', long_description=README, namespace_packages=[ diff --git a/bigtable/tests/system.py b/bigtable/tests/system.py index 1fcda808db39..cfc2cb17f805 100644 --- a/bigtable/tests/system.py +++ b/bigtable/tests/system.py @@ -32,7 +32,6 @@ from google.cloud.environment_vars import BIGTABLE_EMULATOR from test_utils.retry import RetryErrors -from test_utils.retry import RetryResult from test_utils.system import EmulatorCreds from test_utils.system import unique_resource_id @@ -65,27 +64,6 @@ class Config(object): IN_EMULATOR = False -def _wait_until_complete(operation, max_attempts=5): - """Wait until an operation has completed. - - :type operation: :class:`google.cloud.operation.Operation` - :param operation: Operation that has not completed. - - :type max_attempts: int - :param max_attempts: (Optional) The maximum number of times to check if - the operation has completed. Defaults to 5. - - :rtype: bool - :returns: Boolean indicating if the operation is complete. - """ - - def _operation_complete(result): - return result - - retry = RetryResult(_operation_complete, max_tries=max_attempts) - return retry(operation.poll)() - - def _retry_on_unavailable(exc): """Retry only errors whose status code is 'UNAVAILABLE'.""" from grpc import StatusCode @@ -117,8 +95,7 @@ def setUpModule(): # After listing, create the test instance. created_op = Config.INSTANCE.create() - if not _wait_until_complete(created_op): - raise RuntimeError('Instance creation exceed 5 seconds.') + created_op.result(timeout=10) def tearDownModule(): @@ -166,7 +143,7 @@ def test_create_instance(self): self.instances_to_delete.append(instance) # We want to make sure the operation completes. - self.assertTrue(_wait_until_complete(operation)) + operation.result(timeout=10) # Create a new instance instance and make sure it is the same. instance_alt = Config.CLIENT.instance(ALT_INSTANCE_ID, LOCATION_ID) diff --git a/bigtable/tests/unit/test_client.py b/bigtable/tests/unit/test_client.py index 17656be60c00..9e0485a41554 100644 --- a/bigtable/tests/unit/test_client.py +++ b/bigtable/tests/unit/test_client.py @@ -256,171 +256,215 @@ def _get_target_class(): def _make_one(self, *args, **kwargs): return self._get_target_class()(*args, **kwargs) - def _make_oneWithMocks(self, *args, **kwargs): - from google.cloud._testing import _Monkey - from google.cloud.bigtable import client as MUT - - mock_make_data_stub = _MakeStubMock() - mock_make_instance_stub = _MakeStubMock() - mock_make_operations_stub = _MakeStubMock() - mock_make_table_stub = _MakeStubMock() - with _Monkey(MUT, _make_data_stub=mock_make_data_stub, - _make_instance_stub=mock_make_instance_stub, - _make_operations_stub=mock_make_operations_stub, - _make_table_stub=mock_make_table_stub): - return self._make_one(*args, **kwargs) - - def _constructor_test_helper(self, expected_scopes, creds, - read_only=False, admin=False, - user_agent=None, expected_creds=None): - from google.cloud._testing import _Monkey - from google.cloud.bigtable import client as MUT - - user_agent = user_agent or MUT.DEFAULT_USER_AGENT - - mock_make_data_stub = _MakeStubMock() - mock_make_instance_stub = _MakeStubMock() - mock_make_operations_stub = _MakeStubMock() - mock_make_table_stub = _MakeStubMock() - with _Monkey(MUT, _make_data_stub=mock_make_data_stub, - _make_instance_stub=mock_make_instance_stub, - _make_operations_stub=mock_make_operations_stub, - _make_table_stub=mock_make_table_stub): - client = self._make_one(project=self.PROJECT, credentials=creds, - read_only=read_only, admin=admin, - user_agent=user_agent) - - # Verify the mocks. - self.assertEqual(mock_make_data_stub.calls, [client]) - if admin: - self.assertSequenceEqual(mock_make_instance_stub.calls, [client]) - self.assertSequenceEqual(mock_make_operations_stub.calls, [client]) - self.assertSequenceEqual(mock_make_table_stub.calls, [client]) - else: - self.assertSequenceEqual(mock_make_instance_stub.calls, []) - self.assertSequenceEqual(mock_make_operations_stub.calls, []) - self.assertSequenceEqual(mock_make_table_stub.calls, []) - - expected_creds = expected_creds or creds.with_scopes.return_value - self.assertIs(client._credentials, expected_creds) + @mock.patch('google.cloud.bigtable.client._make_table_stub') + @mock.patch('google.cloud.bigtable.client._make_operations_stub') + @mock.patch('google.cloud.bigtable.client._make_instance_stub') + @mock.patch('google.cloud.bigtable.client._make_data_stub') + def _make_one_with_mocks( + self, _make_data_stub, _make_instance_stub, + _make_operations_stub, _make_table_stub, + *args, **kwargs): + return self._make_one(*args, **kwargs) + + @mock.patch('google.cloud.bigtable.client._make_table_stub') + @mock.patch('google.cloud.bigtable.client._make_operations_stub') + @mock.patch('google.cloud.bigtable.client._make_instance_stub') + @mock.patch('google.cloud.bigtable.client._make_data_stub') + def test_constructor_default_scopes( + self, _make_data_stub, _make_instance_stub, + _make_operations_stub, _make_table_stub): + from google.cloud.bigtable.client import DATA_SCOPE + + expected_scopes = (DATA_SCOPE,) + credentials = _make_credentials() + custom_user_agent = 'custom-application' + client = self._make_one( + project=self.PROJECT, credentials=credentials, + user_agent=custom_user_agent) - if expected_scopes is not None: - creds.with_scopes.assert_called_once_with(expected_scopes) + self.assertEqual(client.project, self.PROJECT) + self.assertIs( + client._credentials, credentials.with_scopes.return_value) + self.assertIsNone(client._http_internal) + self.assertFalse(client._read_only) + self.assertFalse(client._admin) + self.assertEqual(client.SCOPE, expected_scopes) + self.assertEqual(client.user_agent, custom_user_agent) + self.assertIsNone(client.emulator_host) + self.assertIs(client._data_stub, _make_data_stub.return_value) + self.assertIsNone(client._instance_stub_internal) + self.assertIsNone(client._operations_stub_internal) + self.assertIsNone(client._table_stub_internal) + + # Check mocks. + credentials.with_scopes.assert_called_once_with(expected_scopes) + _make_data_stub.assert_called_once_with(client) + _make_instance_stub.assert_not_called() + _make_operations_stub.assert_not_called() + _make_table_stub.assert_not_called() + + @mock.patch('google.cloud.bigtable.client._make_table_stub') + @mock.patch('google.cloud.bigtable.client._make_operations_stub') + @mock.patch('google.cloud.bigtable.client._make_instance_stub') + @mock.patch('google.cloud.bigtable.client._make_data_stub') + def test_constructor_with_admin( + self, _make_data_stub, _make_instance_stub, + _make_operations_stub, _make_table_stub): + from google.cloud._http import DEFAULT_USER_AGENT + from google.cloud.bigtable.client import ADMIN_SCOPE + from google.cloud.bigtable.client import DATA_SCOPE + + expected_scopes = (DATA_SCOPE, ADMIN_SCOPE) + credentials = _make_credentials() + client = self._make_one( + project=self.PROJECT, credentials=credentials, admin=True) self.assertEqual(client.project, self.PROJECT) - self.assertEqual(client.user_agent, user_agent) - # Check gRPC stubs (or mocks of them) are set - self.assertIs(client._data_stub, mock_make_data_stub.result) - if admin: - self.assertIs(client._instance_stub_internal, - mock_make_instance_stub.result) - self.assertIs(client._operations_stub_internal, - mock_make_operations_stub.result) - self.assertIs(client._table_stub_internal, - mock_make_table_stub.result) + self.assertIs( + client._credentials, credentials.with_scopes.return_value) + self.assertIsNone(client._http_internal) + self.assertFalse(client._read_only) + self.assertTrue(client._admin) + self.assertEqual(client.SCOPE, expected_scopes) + self.assertEqual(client.user_agent, DEFAULT_USER_AGENT) + self.assertIsNone(client.emulator_host) + self.assertIs(client._data_stub, _make_data_stub.return_value) + self.assertIs( + client._instance_stub_internal, _make_instance_stub.return_value) + self.assertIs( + client._operations_stub_internal, + _make_operations_stub.return_value) + self.assertIs( + client._table_stub_internal, _make_table_stub.return_value) + + # Check mocks. + credentials.with_scopes.assert_called_once_with(expected_scopes) + _make_data_stub.assert_called_once_with(client) + _make_instance_stub.assert_called_once_with(client) + _make_operations_stub.assert_called_once_with(client) + _make_table_stub.assert_called_once_with(client) + + def test_constructor_both_admin_and_read_only(self): + credentials = _make_credentials() + with self.assertRaises(ValueError): + self._make_one( + project=self.PROJECT, credentials=credentials, + admin=True, read_only=True) + + def test__get_scopes_default(self): + from google.cloud.bigtable.client import DATA_SCOPE + + client = self._make_one( + project=self.PROJECT, credentials=_make_credentials()) + self.assertEqual(client._get_scopes(), (DATA_SCOPE,)) + + def test__get_scopes_admin(self): + from google.cloud.bigtable.client import ADMIN_SCOPE + from google.cloud.bigtable.client import DATA_SCOPE + + client = self._make_one( + project=self.PROJECT, credentials=_make_credentials(), + admin=True) + expected_scopes = (DATA_SCOPE, ADMIN_SCOPE) + self.assertEqual(client._get_scopes(), expected_scopes) + + def test__get_scopes_read_only(self): + from google.cloud.bigtable.client import READ_ONLY_SCOPE + + client = self._make_one( + project=self.PROJECT, credentials=_make_credentials(), + read_only=True) + self.assertEqual(client._get_scopes(), (READ_ONLY_SCOPE,)) + + def _copy_helper_check_stubs(self, client, new_client): + if client._admin: + # Check the instance stub. + self.assertIs( + client._instance_stub_internal, mock.sentinel.inst_stub1) + self.assertIs( + new_client._instance_stub_internal, mock.sentinel.inst_stub2) + self.assertIsNot( + new_client._instance_stub_internal, + client._instance_stub_internal) + # Check the operations stub. + self.assertIs( + client._operations_stub_internal, mock.sentinel.ops_stub1) + self.assertIs( + new_client._operations_stub_internal, mock.sentinel.ops_stub2) + self.assertIsNot( + new_client._operations_stub_internal, + client._operations_stub_internal) + # Check the table stub. + self.assertIs( + client._table_stub_internal, mock.sentinel.table_stub1) + self.assertIs( + new_client._table_stub_internal, mock.sentinel.table_stub2) + self.assertIsNot( + new_client._table_stub_internal, client._table_stub_internal) else: + # Check the instance stub. self.assertIsNone(client._instance_stub_internal) + self.assertIsNone(new_client._instance_stub_internal) + # Check the operations stub. self.assertIsNone(client._operations_stub_internal) + self.assertIsNone(new_client._operations_stub_internal) + # Check the table stub. self.assertIsNone(client._table_stub_internal) + self.assertIsNone(new_client._table_stub_internal) + + @mock.patch( + 'google.cloud.bigtable.client._make_table_stub', + side_effect=[mock.sentinel.table_stub1, mock.sentinel.table_stub2], + ) + @mock.patch( + 'google.cloud.bigtable.client._make_operations_stub', + side_effect=[mock.sentinel.ops_stub1, mock.sentinel.ops_stub2], + ) + @mock.patch( + 'google.cloud.bigtable.client._make_instance_stub', + side_effect=[mock.sentinel.inst_stub1, mock.sentinel.inst_stub2], + ) + @mock.patch( + 'google.cloud.bigtable.client._make_data_stub', + side_effect=[mock.sentinel.data_stub1, mock.sentinel.data_stub2], + ) + def _copy_test_helper( + self, _make_data_stub, _make_instance_stub, + _make_operations_stub, _make_table_stub, **kwargs): + credentials = _make_credentials() + # Make sure it "already" is scoped. + credentials.requires_scopes = False - def test_constructor_default_scopes(self): - from google.cloud.bigtable import client as MUT - - expected_scopes = [MUT.DATA_SCOPE] - creds = _make_credentials() - self._constructor_test_helper(expected_scopes, creds) - - def test_constructor_custom_user_agent(self): - from google.cloud.bigtable import client as MUT - - CUSTOM_USER_AGENT = 'custom-application' - expected_scopes = [MUT.DATA_SCOPE] - creds = _make_credentials() - self._constructor_test_helper(expected_scopes, creds, - user_agent=CUSTOM_USER_AGENT) - - def test_constructor_with_admin(self): - from google.cloud.bigtable import client as MUT - - expected_scopes = [MUT.DATA_SCOPE, MUT.ADMIN_SCOPE] - creds = _make_credentials() - self._constructor_test_helper(expected_scopes, creds, admin=True) - - def test_constructor_with_read_only(self): - from google.cloud.bigtable import client as MUT - - expected_scopes = [MUT.READ_ONLY_SCOPE] - creds = _make_credentials() - self._constructor_test_helper(expected_scopes, creds, read_only=True) - - def test_constructor_both_admin_and_read_only(self): - creds = _make_credentials() - with self.assertRaises(ValueError): - self._constructor_test_helper([], creds, admin=True, - read_only=True) - - def test_constructor_implicit_credentials(self): - from google.cloud._testing import _Monkey - from google.cloud.bigtable import client as MUT - - creds = _make_credentials() - expected_scopes = [MUT.DATA_SCOPE] - - def mock_get_credentials(): - return creds - - with _Monkey(MUT, get_credentials=mock_get_credentials): - self._constructor_test_helper( - None, None, - expected_creds=creds.with_scopes.return_value) - - creds.with_scopes.assert_called_once_with(expected_scopes) - - def test_constructor_credentials_wo_create_scoped(self): - creds = _make_credentials() - expected_scopes = None - self._constructor_test_helper(expected_scopes, creds) - - def _copy_test_helper(self, read_only=False, admin=False): - from google.cloud._testing import _Monkey - from google.cloud.bigtable import client as MUT + client = self._make_one( + project=self.PROJECT, credentials=credentials, **kwargs) + self.assertIs(client._credentials, credentials) - credentials = _make_credentials() - client = self._make_oneWithMocks( - project=self.PROJECT, - credentials=credentials, - read_only=read_only, - admin=admin, - user_agent=self.USER_AGENT) - # Put some fake stubs in place so that we can verify they don't - # get copied. In the admin=False case, only the data stub will - # not be None, so we over-ride all the internal values. - client._data_stub = object() - client._instance_stub_internal = object() - client._operations_stub_internal = object() - client._table_stub_internal = object() - - mock_make_data_stub = _MakeStubMock() - mock_make_instance_stub = _MakeStubMock() - mock_make_operations_stub = _MakeStubMock() - mock_make_table_stub = _MakeStubMock() - with _Monkey(MUT, _make_data_stub=mock_make_data_stub, - _make_instance_stub=mock_make_instance_stub, - _make_operations_stub=mock_make_operations_stub, - _make_table_stub=mock_make_table_stub): - new_client = client.copy() + new_client = client.copy() self.assertEqual(new_client._admin, client._admin) self.assertEqual(new_client._credentials, client._credentials) self.assertEqual(new_client.project, client.project) self.assertEqual(new_client.user_agent, client.user_agent) # Make sure stubs are not preserved. - self.assertNotEqual(new_client._data_stub, client._data_stub) - self.assertNotEqual(new_client._instance_stub_internal, - client._instance_stub_internal) - self.assertNotEqual(new_client._operations_stub_internal, - client._operations_stub_internal) - self.assertNotEqual(new_client._table_stub_internal, - client._table_stub_internal) + self.assertIs(client._data_stub, mock.sentinel.data_stub1) + self.assertIs(new_client._data_stub, mock.sentinel.data_stub2) + self.assertIsNot(new_client._data_stub, client._data_stub) + self._copy_helper_check_stubs(client, new_client) + + # Check mocks. + credentials.with_scopes.assert_not_called() + stub_calls = [ + mock.call(client), + mock.call(new_client), + ] + self.assertEqual(_make_data_stub.mock_calls, stub_calls) + if client._admin: + self.assertEqual(_make_instance_stub.mock_calls, stub_calls) + self.assertEqual(_make_operations_stub.mock_calls, stub_calls) + self.assertEqual(_make_table_stub.mock_calls, stub_calls) + else: + _make_instance_stub.assert_not_called() + _make_operations_stub.assert_not_called() + _make_table_stub.assert_not_called() def test_copy(self): self._copy_test_helper() @@ -434,61 +478,61 @@ def test_copy_read_only(self): def test_credentials_getter(self): credentials = _make_credentials() project = 'PROJECT' - client = self._make_oneWithMocks(project=project, - credentials=credentials) + client = self._make_one_with_mocks( + project=project, credentials=credentials) self.assertIs(client.credentials, credentials.with_scopes.return_value) def test_project_name_property(self): credentials = _make_credentials() project = 'PROJECT' - client = self._make_oneWithMocks(project=project, - credentials=credentials) + client = self._make_one_with_mocks( + project=project, credentials=credentials) project_name = 'projects/' + project self.assertEqual(client.project_name, project_name) def test_instance_stub_getter(self): credentials = _make_credentials() project = 'PROJECT' - client = self._make_oneWithMocks(project=project, - credentials=credentials, admin=True) + client = self._make_one_with_mocks( + project=project, credentials=credentials, admin=True) self.assertIs(client._instance_stub, client._instance_stub_internal) def test_instance_stub_non_admin_failure(self): credentials = _make_credentials() project = 'PROJECT' - client = self._make_oneWithMocks(project=project, - credentials=credentials, admin=False) + client = self._make_one_with_mocks( + project=project, credentials=credentials, admin=False) with self.assertRaises(ValueError): getattr(client, '_instance_stub') def test_operations_stub_getter(self): credentials = _make_credentials() project = 'PROJECT' - client = self._make_oneWithMocks(project=project, - credentials=credentials, admin=True) + client = self._make_one_with_mocks( + project=project, credentials=credentials, admin=True) self.assertIs(client._operations_stub, client._operations_stub_internal) def test_operations_stub_non_admin_failure(self): credentials = _make_credentials() project = 'PROJECT' - client = self._make_oneWithMocks(project=project, - credentials=credentials, admin=False) + client = self._make_one_with_mocks( + project=project, credentials=credentials, admin=False) with self.assertRaises(ValueError): getattr(client, '_operations_stub') def test_table_stub_getter(self): credentials = _make_credentials() project = 'PROJECT' - client = self._make_oneWithMocks(project=project, - credentials=credentials, admin=True) + client = self._make_one_with_mocks( + project=project, credentials=credentials, admin=True) self.assertIs(client._table_stub, client._table_stub_internal) def test_table_stub_non_admin_failure(self): credentials = _make_credentials() project = 'PROJECT' - client = self._make_oneWithMocks(project=project, - credentials=credentials, admin=False) + client = self._make_one_with_mocks( + project=project, credentials=credentials, admin=False) with self.assertRaises(ValueError): getattr(client, '_table_stub') @@ -502,8 +546,8 @@ def test_instance_factory_defaults(self): INSTANCE_ID = 'instance-id' DISPLAY_NAME = 'display-name' credentials = _make_credentials() - client = self._make_oneWithMocks(project=PROJECT, - credentials=credentials) + client = self._make_one_with_mocks( + project=PROJECT, credentials=credentials) instance = client.instance(INSTANCE_ID, display_name=DISPLAY_NAME) @@ -524,8 +568,8 @@ def test_instance_factory_w_explicit_serve_nodes(self): LOCATION_ID = 'locname' SERVE_NODES = 5 credentials = _make_credentials() - client = self._make_oneWithMocks(project=PROJECT, - credentials=credentials) + client = self._make_one_with_mocks( + project=PROJECT, credentials=credentials) instance = client.instance( INSTANCE_ID, display_name=DISPLAY_NAME, @@ -555,7 +599,7 @@ def test_list_instances(self): 'projects/' + self.PROJECT + '/instances/' + INSTANCE_ID2) credentials = _make_credentials() - client = self._make_oneWithMocks( + client = self._make_one_with_mocks( project=self.PROJECT, credentials=credentials, admin=True, @@ -610,14 +654,3 @@ def __init__(self, credentials, user_agent, emulator_host=None): self.credentials = credentials self.user_agent = user_agent self.emulator_host = emulator_host - - -class _MakeStubMock(object): - - def __init__(self): - self.result = object() - self.calls = [] - - def __call__(self, client): - self.calls.append(client) - return self.result diff --git a/bigtable/tests/unit/test_cluster.py b/bigtable/tests/unit/test_cluster.py index 3cc40964ba49..e244b55d6dff 100644 --- a/bigtable/tests/unit/test_cluster.py +++ b/bigtable/tests/unit/test_cluster.py @@ -15,6 +15,8 @@ import unittest +import mock + class TestCluster(unittest.TestCase): @@ -232,7 +234,7 @@ def test_reload(self): def test_create(self): from google.longrunning import operations_pb2 - from google.cloud.operation import Operation + from google.cloud.future import operation from google.cloud.bigtable._generated import ( bigtable_instance_admin_pb2 as messages_v2_pb2) from tests.unit._testing import _FakeStub @@ -256,13 +258,9 @@ def test_create(self): # Perform the method and check the result. result = cluster.create() - self.assertIsInstance(result, Operation) - self.assertEqual(result.name, OP_NAME) - self.assertIs(result.target, cluster) - self.assertIs(result.client, client) + self.assertIsInstance(result, operation.Operation) + self.assertEqual(result.operation.name, OP_NAME) self.assertIsNone(result.metadata) - self.assertEqual(result.caller_metadata, - {'request_type': 'CreateCluster'}) self.assertEqual(len(stub.method_calls), 1) api_name, args, kwargs = stub.method_calls[0] @@ -278,7 +276,7 @@ def test_create(self): def test_update(self): import datetime from google.longrunning import operations_pb2 - from google.cloud.operation import Operation + from google.cloud.future import operation from google.protobuf.any_pb2 import Any from google.cloud._helpers import _datetime_to_pb_timestamp from google.cloud.bigtable._generated import ( @@ -324,15 +322,11 @@ def test_update(self): result = cluster.update() - self.assertIsInstance(result, Operation) - self.assertEqual(result.name, OP_NAME) - self.assertIs(result.target, cluster) - self.assertIs(result.client, client) + self.assertIsInstance(result, operation.Operation) + self.assertEqual(result.operation.name, OP_NAME) self.assertIsInstance(result.metadata, messages_v2_pb2.UpdateClusterMetadata) self.assertEqual(result.metadata.request_time, NOW_PB) - self.assertEqual(result.caller_metadata, - {'request_type': 'UpdateCluster'}) self.assertEqual(len(stub.method_calls), 1) api_name, args, kwargs = stub.method_calls[0] @@ -393,12 +387,14 @@ def test_it(self): instance = _Instance(INSTANCE_ID, client) cluster = Cluster(CLUSTER_ID, instance, serve_nodes=SERVE_NODES) + cluster.location = u'projects/prahj-ekt/locations/zona-tres' request_pb = self._call_fut(cluster) self.assertEqual(request_pb.cluster_id, CLUSTER_ID) self.assertEqual(request_pb.parent, instance.name) self.assertEqual(request_pb.cluster.serve_nodes, SERVE_NODES) + self.assertEqual(request_pb.cluster.location, cluster.location) def _ClusterPB(*args, **kw): @@ -446,6 +442,7 @@ class _Client(object): def __init__(self, project): self.project = project self.project_name = 'projects/' + self.project + self._operations_stub = mock.sentinel.operations_stub def __eq__(self, other): return (other.project == self.project and diff --git a/bigtable/tests/unit/test_instance.py b/bigtable/tests/unit/test_instance.py index cdad3c376d0a..03c0034fc49e 100644 --- a/bigtable/tests/unit/test_instance.py +++ b/bigtable/tests/unit/test_instance.py @@ -15,6 +15,8 @@ import unittest +import mock + class TestInstance(unittest.TestCase): @@ -236,7 +238,7 @@ def test_create(self): bigtable_instance_admin_pb2 as messages_v2_pb2) from google.cloud._helpers import _datetime_to_pb_timestamp from tests.unit._testing import _FakeStub - from google.cloud.operation import Operation + from google.cloud.future import operation from google.cloud.bigtable.cluster import DEFAULT_SERVE_NODES NOW = datetime.datetime.utcnow() @@ -263,15 +265,11 @@ def test_create(self): # Perform the method and check the result. result = instance.create() - self.assertIsInstance(result, Operation) - self.assertEqual(result.name, self.OP_NAME) - self.assertIs(result.target, instance) - self.assertIs(result.client, client) + self.assertIsInstance(result, operation.Operation) + self.assertEqual(result.operation.name, self.OP_NAME) self.assertIsInstance(result.metadata, messages_v2_pb2.CreateInstanceMetadata) self.assertEqual(result.metadata.request_time, NOW_PB) - self.assertEqual(result.caller_metadata, - {'request_type': 'CreateInstance'}) self.assertEqual(len(stub.method_calls), 1) api_name, args, kwargs = stub.method_calls[0] @@ -291,7 +289,7 @@ def test_create_w_explicit_serve_nodes(self): from google.cloud.bigtable._generated import ( bigtable_instance_admin_pb2 as messages_v2_pb2) from tests.unit._testing import _FakeStub - from google.cloud.operation import Operation + from google.cloud.future import operation SERVE_NODES = 5 @@ -308,10 +306,8 @@ def test_create_w_explicit_serve_nodes(self): # Perform the method and check the result. result = instance.create() - self.assertIsInstance(result, Operation) - self.assertEqual(result.name, self.OP_NAME) - self.assertIs(result.target, instance) - self.assertIs(result.client, client) + self.assertIsInstance(result, operation.Operation) + self.assertEqual(result.operation.name, self.OP_NAME) self.assertEqual(len(stub.method_calls), 1) api_name, args, kwargs = stub.method_calls[0] @@ -582,6 +578,7 @@ class _Client(object): def __init__(self, project): self.project = project self.project_name = 'projects/' + self.project + self._operations_stub = mock.sentinel.operations_stub def copy(self): from copy import deepcopy diff --git a/bigtable/tests/unit/test_table.py b/bigtable/tests/unit/test_table.py index 5867e76aff73..dc4d2b5bbad0 100644 --- a/bigtable/tests/unit/test_table.py +++ b/bigtable/tests/unit/test_table.py @@ -255,7 +255,7 @@ def _create_test_helper(self, initial_split_keys, column_families=()): for cf in column_families: cf_pb = table_pb.column_families[cf.column_family_id] if cf.gc_rule is not None: - cf_pb.gc_rule.MergeFrom(cf.gc_rule.to_pb()) + cf_pb.gc_rule.CopyFrom(cf.gc_rule.to_pb()) request_pb = _CreateTableRequestPB( initial_splits=splits_pb, parent=self.INSTANCE_NAME, diff --git a/core/.coveragerc b/core/.coveragerc index 9d89b1db5666..ce75f605a508 100644 --- a/core/.coveragerc +++ b/core/.coveragerc @@ -13,3 +13,6 @@ exclude_lines = pragma: NO COVER # Ignore debug-only repr def __repr__ + # Ignore abstract methods + raise NotImplementedError + raise NotImplementedError() diff --git a/core/.flake8 b/core/.flake8 index 25168dc87605..3db9b737d6bc 100644 --- a/core/.flake8 +++ b/core/.flake8 @@ -1,4 +1,8 @@ [flake8] +import-order-style=google +# Note: this forces all google imports to be in the third group. See +# https://github.com/PyCQA/flake8-import-order/issues/111 +application-import-names=google exclude = __pycache__, .git, diff --git a/core/MANIFEST.in b/core/MANIFEST.in index 24aa72fb370b..1fbc0d0b321e 100644 --- a/core/MANIFEST.in +++ b/core/MANIFEST.in @@ -1,3 +1,3 @@ include README.rst LICENSE -recursive-include unit_tests * +recursive-include tests * global-exclude *.pyc __pycache__ diff --git a/core/README.rst b/core/README.rst index 5088505addc7..53cbd311a50e 100644 --- a/core/README.rst +++ b/core/README.rst @@ -9,7 +9,7 @@ used by all of the ``google-cloud-*``. - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/google-cloud-api.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/core/modules.html Quick Start ----------- @@ -19,6 +19,6 @@ Quick Start $ pip install --upgrade google-cloud-core .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-core.svg - :target: https://pypi.python.org/pypi/google-cloud-core + :target: https://pypi.org/project/google-cloud-core/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-core.svg - :target: https://pypi.python.org/pypi/google-cloud-core + :target: https://pypi.org/project/google-cloud-core/ diff --git a/core/google/__init__.py b/core/google/__init__.py index b2b833373882..a35569c36339 100644 --- a/core/google/__init__.py +++ b/core/google/__init__.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +"""Google namespace package.""" + try: import pkg_resources pkg_resources.declare_namespace(__name__) diff --git a/core/google/cloud/__init__.py b/core/google/cloud/__init__.py index b2b833373882..59a804265f5c 100644 --- a/core/google/cloud/__init__.py +++ b/core/google/cloud/__init__.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +"""Google Cloud namespace package.""" + try: import pkg_resources pkg_resources.declare_namespace(__name__) diff --git a/core/google/cloud/_helpers.py b/core/google/cloud/_helpers.py index 2c2f08dcfb45..fdb22ecdf09c 100644 --- a/core/google/cloud/_helpers.py +++ b/core/google/cloud/_helpers.py @@ -17,7 +17,6 @@ This module is not part of the public API surface. """ -# Avoid the grpc and google.cloud.grpc collision. from __future__ import absolute_import import calendar @@ -26,10 +25,14 @@ import re from threading import local as Local +import google_auth_httplib2 +import httplib2 +import six +from six.moves import http_client + import google.auth from google.protobuf import duration_pb2 from google.protobuf import timestamp_pb2 -import google_auth_httplib2 try: import grpc @@ -37,10 +40,6 @@ except ImportError: # pragma: NO COVER grpc = None -import httplib2 -import six -from six.moves import http_client - _NOW = datetime.datetime.utcnow # To be replaced by tests. _RFC3339_MICROS = '%Y-%m-%dT%H:%M:%S.%fZ' @@ -104,7 +103,7 @@ def top(self): :rtype: object :returns: the top-most item, or None if the stack is empty. """ - if len(self._stack) > 0: + if self._stack: return self._stack[-1] @@ -379,6 +378,29 @@ def _bytes_to_unicode(value): raise ValueError('%r could not be converted to unicode' % (value,)) +def _from_any_pb(pb_type, any_pb): + """Converts an Any protobuf to the specified message type + + Args: + pb_type (type): the type of the message that any_pb stores an instance + of. + any_pb (google.protobuf.any_pb2.Any): the object to be converted. + + Returns: + pb_type: An instance of the pb_type message. + + Raises: + TypeError: if the message could not be converted. + """ + msg = pb_type() + if not any_pb.Unpack(msg): + raise TypeError( + 'Could not convert {} to {}'.format( + any_pb.__class__.__name__, pb_type.__name__)) + + return msg + + def _pb_timestamp_to_datetime(timestamp_pb): """Convert a Timestamp protobuf to a datetime object. diff --git a/core/google/cloud/_http.py b/core/google/cloud/_http.py index e1a481e581a7..b7c17ca91d6d 100644 --- a/core/google/cloud/_http.py +++ b/core/google/cloud/_http.py @@ -16,8 +16,8 @@ import json import platform -from pkg_resources import get_distribution +from pkg_resources import get_distribution import six from six.moves.urllib.parse import urlencode @@ -135,7 +135,7 @@ def build_api_url(cls, path, query_params=None, query_params = query_params or {} if query_params: - url += '?' + urlencode(query_params) + url += '?' + urlencode(query_params, doseq=True) return url @@ -279,7 +279,9 @@ def api_request(self, method, path, query_params=None, can allow custom behavior, for example, to defer an HTTP request and complete initialization of the object at a later time. - :raises: Exception if the response code is not 200 OK. + :raises ~google.cloud.exceptions.GoogleCloudError: if the response code + is not 200 OK. + :raises TypeError: if the response content type is not JSON. :rtype: dict or str :returns: The API response payload, either as a raw string or a dictionary if the response is valid JSON. diff --git a/core/google/cloud/_testing.py b/core/google/cloud/_testing.py index a544fffc5fe4..871b5f631bc7 100644 --- a/core/google/cloud/_testing.py +++ b/core/google/cloud/_testing.py @@ -14,17 +14,15 @@ """Shared testing utilities.""" - -# Avoid the grpc and google.cloud.grpc collision. from __future__ import absolute_import class _Monkey(object): - # context-manager for replacing module names in the scope of a test. + """Context-manager for replacing module names in the scope of a test.""" def __init__(self, module, **kw): self.module = module - if len(kw) == 0: # pragma: NO COVER + if not kw: # pragma: NO COVER raise ValueError('_Monkey was used with nothing to monkey-patch') self.to_restore = {key: getattr(module, key) for key in kw} for key, value in kw.items(): @@ -68,8 +66,12 @@ def _tempdir_mgr(): return _tempdir_mgr +# pylint: disable=invalid-name +# Retain _tempdir as a constant for backwards compatibility despite +# being an invalid name. _tempdir = _tempdir_maker() del _tempdir_maker +# pylint: enable=invalid-name class _GAXBaseAPI(object): @@ -79,7 +81,8 @@ class _GAXBaseAPI(object): def __init__(self, **kw): self.__dict__.update(kw) - def _make_grpc_error(self, status_code, trailing=None): + @staticmethod + def _make_grpc_error(status_code, trailing=None): from grpc._channel import _RPCState from google.cloud.exceptions import GrpcRendezvous @@ -111,6 +114,7 @@ def __init__(self, *pages, **kwargs): self.page_token = kwargs.get('page_token') def next(self): + """Iterate to the next page.""" import six return six.next(self._pages) diff --git a/core/google/cloud/client.py b/core/google/cloud/client.py index e7e43faf1e45..468cf9e40a52 100644 --- a/core/google/cloud/client.py +++ b/core/google/cloud/client.py @@ -18,19 +18,19 @@ import json from pickle import PicklingError -import google.auth.credentials -from google.oauth2 import service_account import google_auth_httplib2 import six +import google.auth +import google.auth.credentials from google.cloud._helpers import _determine_default_project -from google.cloud.credentials import get_credentials +from google.oauth2 import service_account _GOOGLE_AUTH_CREDENTIALS_HELP = ( 'This library only supports credentials from google-auth-library-python. ' - 'See https://google-cloud-python.readthedocs.io/en/latest/' - 'google-cloud-auth.html for help on authentication with this library.' + 'See https://google-cloud-python.readthedocs.io/en/latest/core/auth.html ' + 'for help on authentication with this library.' ) @@ -64,7 +64,7 @@ def from_service_account_json(cls, json_credentials_path, *args, **kwargs): :rtype: :class:`_ClientFactoryMixin` :returns: The client created with the retrieved JSON credentials. - :raises: :class:`TypeError` if there is a conflict with the kwargs + :raises TypeError: if there is a conflict with the kwargs and the credentials created by the factory. """ if 'credentials' in kwargs: @@ -135,7 +135,7 @@ def __init__(self, credentials=None, _http=None): credentials, google.auth.credentials.Credentials)): raise ValueError(_GOOGLE_AUTH_CREDENTIALS_HELP) if credentials is None and _http is None: - credentials = get_credentials() + credentials, _ = google.auth.default() self._credentials = google.auth.credentials.with_scopes_if_required( credentials, self.SCOPE) self._http_internal = _http diff --git a/core/google/cloud/exceptions.py b/core/google/cloud/exceptions.py index 32080de7ff50..e911980c6328 100644 --- a/core/google/cloud/exceptions.py +++ b/core/google/cloud/exceptions.py @@ -22,17 +22,18 @@ import copy import json + import six from google.cloud._helpers import _to_bytes -_HTTP_CODE_TO_EXCEPTION = {} # populated at end of module - try: from grpc._channel import _Rendezvous except ImportError: # pragma: NO COVER _Rendezvous = None +_HTTP_CODE_TO_EXCEPTION = {} # populated at end of module + # pylint: disable=invalid-name GrpcRendezvous = _Rendezvous diff --git a/core/google/cloud/streaming/__init__.py b/core/google/cloud/future/__init__.py similarity index 77% rename from core/google/cloud/streaming/__init__.py rename to core/google/cloud/future/__init__.py index 44e00907cb66..e5cf2b20ce7e 100644 --- a/core/google/cloud/streaming/__init__.py +++ b/core/google/cloud/future/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2017, Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,6 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -# Vendored-in from google-apitools 0.4.11 +"""Futures for dealing with asynchronous operations.""" -"""Base ``google.cloud.streaming`` package.""" +from google.cloud.future.base import Future + +__all__ = [ + 'Future', +] diff --git a/core/google/cloud/future/_helpers.py b/core/google/cloud/future/_helpers.py new file mode 100644 index 000000000000..933d0b8b2d44 --- /dev/null +++ b/core/google/cloud/future/_helpers.py @@ -0,0 +1,39 @@ +# Copyright 2017, Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Private helpers for futures.""" + +import logging +import threading + + +_LOGGER = logging.getLogger(__name__) + + +def start_daemon_thread(*args, **kwargs): + """Starts a thread and marks it as a daemon thread.""" + thread = threading.Thread(*args, **kwargs) + thread.daemon = True + thread.start() + return thread + + +def safe_invoke_callback(callback, *args, **kwargs): + """Invoke a callback, swallowing and logging any exceptions.""" + # pylint: disable=bare-except + # We intentionally want to swallow all exceptions. + try: + return callback(*args, **kwargs) + except: + _LOGGER.exception('Error while executing Future callback.') diff --git a/core/google/cloud/future/base.py b/core/google/cloud/future/base.py new file mode 100644 index 000000000000..243913640d62 --- /dev/null +++ b/core/google/cloud/future/base.py @@ -0,0 +1,67 @@ +# Copyright 2017, Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Abstract and helper bases for Future implementations.""" + +import abc + +import six + + +@six.add_metaclass(abc.ABCMeta) +class Future(object): + # pylint: disable=missing-docstring + # We inherit the interfaces here from concurrent.futures. + + """Future interface. + + This interface is based on :class:`concurrent.futures.Future`. + """ + + @abc.abstractmethod + def cancel(self): + raise NotImplementedError() + + @abc.abstractmethod + def cancelled(self): + raise NotImplementedError() + + @abc.abstractmethod + def running(self): + raise NotImplementedError() + + @abc.abstractmethod + def done(self): + raise NotImplementedError() + + @abc.abstractmethod + def result(self, timeout=None): + raise NotImplementedError() + + @abc.abstractmethod + def exception(self, timeout=None): + raise NotImplementedError() + + @abc.abstractmethod + def add_done_callback(self, fn): + # pylint: disable=invalid-name + raise NotImplementedError() + + @abc.abstractmethod + def set_result(self, result): + raise NotImplementedError() + + @abc.abstractmethod + def set_exception(self, exception): + raise NotImplementedError() diff --git a/core/google/cloud/future/operation.py b/core/google/cloud/future/operation.py new file mode 100644 index 000000000000..ec430cd9c55b --- /dev/null +++ b/core/google/cloud/future/operation.py @@ -0,0 +1,246 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Futures for long-running operations returned from Google Cloud APIs.""" + +import functools +import threading + +from google.cloud import _helpers +from google.cloud import exceptions +from google.cloud.future import polling +from google.longrunning import operations_pb2 +from google.protobuf import json_format +from google.rpc import code_pb2 + + +class Operation(polling.PollingFuture): + """A Future for interacting with a Google API Long-Running Operation. + + Args: + operation (google.longrunning.operations_pb2.Operation): The + initial operation. + refresh (Callable[[], Operation]): A callable that returns the + latest state of the operation. + cancel (Callable[[], None]): A callable that tries to cancel + the operation. + result_type (type): The protobuf type for the operation's result. + metadata_type (type): The protobuf type for the operation's + metadata. + """ + + def __init__( + self, operation, refresh, cancel, + result_type, metadata_type=None): + super(Operation, self).__init__() + self._operation = operation + self._refresh = refresh + self._cancel = cancel + self._result_type = result_type + self._metadata_type = metadata_type + self._completion_lock = threading.Lock() + # Invoke this in case the operation came back already complete. + self._set_result_from_operation() + + @property + def operation(self): + """google.longrunning.Operation: The current long-running operation.""" + return self._operation + + @property + def metadata(self): + """google.protobuf.Message: the current operation metadata.""" + if not self._operation.HasField('metadata'): + return None + + return _helpers._from_any_pb( + self._metadata_type, self._operation.metadata) + + def _set_result_from_operation(self): + """Set the result or exception from the operation if it is complete.""" + # This must be done in a lock to prevent the polling thread + # and main thread from both executing the completion logic + # at the same time. + with self._completion_lock: + # If the operation isn't complete or if the result has already been + # set, do not call set_result/set_exception again. + # Note: self._result_set is set to True in set_result and + # set_exception, in case those methods are invoked directly. + if not self._operation.done or self._result_set: + return + + if self._operation.HasField('response'): + response = _helpers._from_any_pb( + self._result_type, self._operation.response) + self.set_result(response) + elif self._operation.HasField('error'): + exception = exceptions.GoogleCloudError( + self._operation.error.message, + errors=(self._operation.error)) + self.set_exception(exception) + else: + exception = exceptions.GoogleCloudError( + 'Unexpected state: Long-running operation had neither ' + 'response nor error set.') + self.set_exception(exception) + + def _refresh_and_update(self): + """Refresh the operation and update the result if needed.""" + # If the currently cached operation is done, no need to make another + # RPC as it will not change once done. + if not self._operation.done: + self._operation = self._refresh() + self._set_result_from_operation() + + def done(self): + """Checks to see if the operation is complete. + + Returns: + bool: True if the operation is complete, False otherwise. + """ + self._refresh_and_update() + return self._operation.done + + def cancel(self): + """Attempt to cancel the operation. + + Returns: + bool: True if the cancel RPC was made, False if the operation is + already complete. + """ + if self.done(): + return False + + self._cancel() + return True + + def cancelled(self): + """True if the operation was cancelled.""" + self._refresh_and_update() + return (self._operation.HasField('error') and + self._operation.error.code == code_pb2.CANCELLED) + + +def _refresh_http(api_request, operation_name): + """Refresh an operation using a JSON/HTTP client. + + Args: + api_request (Callable): A callable used to make an API request. This + should generally be + :meth:`google.cloud._http.Connection.api_request`. + operation_name (str): The name of the operation. + + Returns: + google.longrunning.operations_pb2.Operation: The operation. + """ + path = 'operations/{}'.format(operation_name) + api_response = api_request(method='GET', path=path) + return json_format.ParseDict( + api_response, operations_pb2.Operation()) + + +def _cancel_http(api_request, operation_name): + """Cancel an operation using a JSON/HTTP client. + + Args: + api_request (Callable): A callable used to make an API request. This + should generally be + :meth:`google.cloud._http.Connection.api_request`. + operation_name (str): The name of the operation. + """ + path = 'operations/{}:cancel'.format(operation_name) + api_request(method='POST', path=path) + + +def from_http_json(operation, api_request, result_type, **kwargs): + """Create an operation future from using a HTTP/JSON client. + + This interacts with the long-running operations `service`_ (specific + to a given API) vis `HTTP/JSON`_. + + .. _HTTP/JSON: https://cloud.google.com/speech/reference/rest/\ + v1beta1/operations#Operation + + Args: + operation (dict): Operation as a dictionary. + api_request (Callable): A callable used to make an API request. This + should generally be + :meth:`google.cloud._http.Connection.api_request`. + result_type (type): The protobuf result type. + kwargs: Keyword args passed into the :class:`Operation` constructor. + + Returns: + Operation: The operation future to track the given operation. + """ + operation_proto = json_format.ParseDict( + operation, operations_pb2.Operation()) + refresh = functools.partial( + _refresh_http, api_request, operation_proto.name) + cancel = functools.partial( + _cancel_http, api_request, operation_proto.name) + return Operation(operation_proto, refresh, cancel, result_type, **kwargs) + + +def _refresh_grpc(operations_stub, operation_name): + """Refresh an operation using a gRPC client. + + Args: + operations_stub (google.longrunning.operations_pb2.OperationsStub): + The gRPC operations stub. + operation_name (str): The name of the operation. + + Returns: + google.longrunning.operations_pb2.Operation: The operation. + """ + request_pb = operations_pb2.GetOperationRequest(name=operation_name) + return operations_stub.GetOperation(request_pb) + + +def _cancel_grpc(operations_stub, operation_name): + """Cancel an operation using a gRPC client. + + Args: + operations_stub (google.longrunning.operations_pb2.OperationsStub): + The gRPC operations stub. + operation_name (str): The name of the operation. + """ + request_pb = operations_pb2.CancelOperationRequest(name=operation_name) + operations_stub.CancelOperation(request_pb) + + +def from_grpc(operation, operations_stub, result_type, **kwargs): + """Create an operation future from using a gRPC client. + + This interacts with the long-running operations `service`_ (specific + to a given API) via gRPC. + + .. _service: https://github.com/googleapis/googleapis/blob/\ + 050400df0fdb16f63b63e9dee53819044bffc857/\ + google/longrunning/operations.proto#L38 + + Args: + operation (google.longrunning.operations_pb2.Operation): The operation. + operations_stub (google.longrunning.operations_pb2.OperationsStub): + The operations stub. + result_type (type): The protobuf result type. + kwargs: Keyword args passed into the :class:`Operation` constructor. + + Returns: + Operation: The operation future to track the given operation. + """ + refresh = functools.partial( + _refresh_grpc, operations_stub, operation.name) + cancel = functools.partial( + _cancel_grpc, operations_stub, operation.name) + return Operation(operation, refresh, cancel, result_type, **kwargs) diff --git a/core/google/cloud/future/polling.py b/core/google/cloud/future/polling.py new file mode 100644 index 000000000000..6b7ae4221f64 --- /dev/null +++ b/core/google/cloud/future/polling.py @@ -0,0 +1,169 @@ +# Copyright 2017, Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Abstract and helper bases for Future implementations.""" + +import abc +import concurrent.futures +import functools +import operator + +import six +import tenacity + +from google.cloud.future import _helpers +from google.cloud.future import base + + +class PollingFuture(base.Future): + """A Future that needs to poll some service to check its status. + + The :meth:`done` method should be implemented by subclasses. The polling + behavior will repeatedly call ``done`` until it returns True. + + .. note: Privacy here is intended to prevent the final class from + overexposing, not to prevent subclasses from accessing methods. + """ + def __init__(self): + super(PollingFuture, self).__init__() + self._result = None + self._exception = None + self._result_set = False + """bool: Set to True when the result has been set via set_result or + set_exception.""" + self._polling_thread = None + self._done_callbacks = [] + + @abc.abstractmethod + def done(self): + """Checks to see if the operation is complete. + + Returns: + bool: True if the operation is complete, False otherwise. + """ + # pylint: disable=redundant-returns-doc, missing-raises-doc + raise NotImplementedError() + + def running(self): + """True if the operation is currently running.""" + return not self.done() + + def _blocking_poll(self, timeout=None): + """Poll and wait for the Future to be resolved. + + Args: + timeout (int): How long to wait for the operation to complete. + If None, wait indefinitely. + """ + if self._result_set: + return + + retry_on = tenacity.retry_if_result( + functools.partial(operator.is_not, True)) + # Use exponential backoff with jitter. + wait_on = ( + tenacity.wait_exponential(multiplier=1, max=10) + + tenacity.wait_random(0, 1)) + + if timeout is None: + retry = tenacity.retry(retry=retry_on, wait=wait_on) + else: + retry = tenacity.retry( + retry=retry_on, + wait=wait_on, + stop=tenacity.stop_after_delay(timeout)) + + try: + retry(self.done)() + except tenacity.RetryError as exc: + six.raise_from( + concurrent.futures.TimeoutError( + 'Operation did not complete within the designated ' + 'timeout.'), + exc) + + def result(self, timeout=None): + """Get the result of the operation, blocking if necessary. + + Args: + timeout (int): How long to wait for the operation to complete. + If None, wait indefinitely. + + Returns: + google.protobuf.Message: The Operation's result. + + Raises: + google.gax.GaxError: If the operation errors or if the timeout is + reached before the operation completes. + """ + self._blocking_poll(timeout=timeout) + + if self._exception is not None: + # pylint: disable=raising-bad-type + # Pylint doesn't recognize that this is valid in this case. + raise self._exception + + return self._result + + def exception(self, timeout=None): + """Get the exception from the operation, blocking if necessary. + + Args: + timeout (int): How long to wait for the operation to complete. + If None, wait indefinitely. + + Returns: + Optional[google.gax.GaxError]: The operation's error. + """ + self._blocking_poll() + return self._exception + + def add_done_callback(self, fn): + """Add a callback to be executed when the operation is complete. + + If the operation is not already complete, this will start a helper + thread to poll for the status of the operation in the background. + + Args: + fn (Callable[Future]): The callback to execute when the operation + is complete. + """ + if self._result_set: + _helpers.safe_invoke_callback(fn, self) + return + + self._done_callbacks.append(fn) + + if self._polling_thread is None: + # The polling thread will exit on its own as soon as the operation + # is done. + self._polling_thread = _helpers.start_daemon_thread( + target=self._blocking_poll) + + def _invoke_callbacks(self, *args, **kwargs): + """Invoke all done callbacks.""" + for callback in self._done_callbacks: + _helpers.safe_invoke_callback(callback, *args, **kwargs) + + def set_result(self, result): + """Set the Future's result.""" + self._result = result + self._result_set = True + self._invoke_callbacks(self) + + def set_exception(self, exception): + """Set the Future's exception.""" + self._exception = exception + self._result_set = True + self._invoke_callbacks(self) diff --git a/core/google/cloud/iam.py b/core/google/cloud/iam.py index 49bb11266cee..bbc31c047a85 100644 --- a/core/google/cloud/iam.py +++ b/core/google/cloud/iam.py @@ -226,14 +226,14 @@ def to_api_repr(self): if self.version is not None: resource['version'] = self.version - if len(self._bindings) > 0: + if self._bindings: bindings = resource['bindings'] = [] for role, members in sorted(self._bindings.items()): - if len(members) > 0: + if members: bindings.append( {'role': role, 'members': sorted(set(members))}) - if len(bindings) == 0: + if not bindings: del resource['bindings'] return resource diff --git a/core/google/cloud/iterator.py b/core/google/cloud/iterator.py index 7bb708e90f09..742443ddc5f9 100644 --- a/core/google/cloud/iterator.py +++ b/core/google/cloud/iterator.py @@ -242,7 +242,8 @@ def _page_iter(self, increment): results per page while an items iterator will want to increment per item. - Yields :class:`Page` instances. + :rtype: :class:`Page` + :returns: pages """ page = self._next_page() while page is not None: @@ -387,6 +388,8 @@ def _get_next_page_response(self): :rtype: dict :returns: The parsed JSON response of the next page's contents. + + :raises ValueError: If the HTTP method is not ``GET`` or ``POST``. """ params = self._get_query_params() if self._HTTP_METHOD == 'GET': diff --git a/core/google/cloud/operation.py b/core/google/cloud/operation.py index 4e700a553e4f..9f53c595f658 100644 --- a/core/google/cloud/operation.py +++ b/core/google/cloud/operation.py @@ -50,7 +50,7 @@ def register_type(klass, type_url=None): :param type_url: (Optional) URL naming the type. If not provided, infers the URL from the type descriptor. - :raises: ValueError if a registration already exists for the URL. + :raises ValueError: if a registration already exists for the URL. """ if type_url is None: type_url = _compute_type_url(klass) @@ -258,7 +258,7 @@ def poll(self): :rtype: bool :returns: A boolean indicating if the current operation has completed. - :raises: :class:`~exceptions.ValueError` if the operation + :raises ValueError: if the operation has already completed. """ if self.complete: diff --git a/core/google/cloud/streaming/buffered_stream.py b/core/google/cloud/streaming/buffered_stream.py deleted file mode 100644 index 24a52176cb66..000000000000 --- a/core/google/cloud/streaming/buffered_stream.py +++ /dev/null @@ -1,106 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Small helper class to provide a small slice of a stream. - -This class reads ahead to detect if we are at the end of the stream. -""" - - -class BufferedStream(object): - """Buffers a stream, reading ahead to determine if we're at the end. - - :type stream: readable file-like object - :param stream: the stream to be buffered - - :type start: int - :param start: the starting point in the stream - - :type size: int - :param size: the size of the buffer - """ - def __init__(self, stream, start, size): - self._stream = stream - self._start_pos = start - self._buffer_pos = 0 - - if not hasattr(self._stream, 'closed') or not self._stream.closed: - self._buffered_data = self._stream.read(size) - else: - self._buffered_data = b'' - - self._stream_at_end = len(self._buffered_data) < size - self._end_pos = self._start_pos + len(self._buffered_data) - - def __repr__(self): - return ('Buffered stream %s from position %s-%s with %s ' - 'bytes remaining' % (self._stream, self._start_pos, - self._end_pos, self._bytes_remaining)) - - def __len__(self): - return len(self._buffered_data) - - @property - def stream_exhausted(self): - """Does the stream have bytes remaining beyond the buffer - - :rtype: bool - :returns: Boolean indicating if the stream is exhausted. - """ - return self._stream_at_end - - @property - def stream_end_position(self): - """Point to which stream was read into the buffer - - :rtype: int - :returns: The end-position of the stream. - """ - return self._end_pos - - @property - def _bytes_remaining(self): - """Bytes remaining to be read from the buffer - - :rtype: int - :returns: The number of bytes remaining. - """ - return len(self._buffered_data) - self._buffer_pos - - def read(self, size=None): - """Read bytes from the buffer. - - :type size: int - :param size: - (Optional) How many bytes to read (defaults to all remaining - bytes). - - :rtype: str - :returns: The data read from the stream. - """ - if size is None or size < 0: - raise ValueError( - 'Illegal read of size %s requested on BufferedStream. ' - 'Wrapped stream %s is at position %s-%s, ' - '%s bytes remaining.' % - (size, self._stream, self._start_pos, self._end_pos, - self._bytes_remaining)) - - if not self._bytes_remaining: - return b'' - - size = min(size, self._bytes_remaining) - data = self._buffered_data[self._buffer_pos:self._buffer_pos + size] - self._buffer_pos += size - return data diff --git a/core/google/cloud/streaming/exceptions.py b/core/google/cloud/streaming/exceptions.py deleted file mode 100644 index cfeb8f8fa41f..000000000000 --- a/core/google/cloud/streaming/exceptions.py +++ /dev/null @@ -1,122 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Exceptions for generated client libraries.""" - - -class Error(Exception): - """Base class for all exceptions.""" - - -class CommunicationError(Error): - """Any communication error talking to an API server.""" - - -class HttpError(CommunicationError): - """Error making a request. Soon to be HttpError. - - :type response: dict - :param response: headers from the response which returned the error - - :type content: bytes - :param content: payload of the response which returned the error - - :type url: str - :param url: URL of the response which returned the error - """ - def __init__(self, response, content, url): - super(HttpError, self).__init__() - self.response = response - self.content = content - self.url = url - - def __str__(self): - content = self.content.decode('ascii', 'replace') - return 'HttpError accessing <%s>: response: <%s>, content <%s>' % ( - self.url, self.response, content) - - @property - def status_code(self): - """Status code for the response. - - :rtype: int - :returns: the code - """ - return int(self.response['status']) - - @classmethod - def from_response(cls, http_response): - """Factory: construct an exception from a response. - - :type http_response: :class:`~.http_wrapper.Response` - :param http_response: the response which returned the error - - :rtype: :class:`HttpError` - :returns: The error created from the response. - """ - return cls(http_response.info, http_response.content, - http_response.request_url) - - -class TransferError(CommunicationError): - """Errors related to transfers.""" - - -class TransferRetryError(TransferError): - """Retryable errors related to transfers.""" - - -class TransferInvalidError(TransferError): - """The given transfer is invalid.""" - - -class RequestError(CommunicationError): - """The request was not successful.""" - - -class RetryAfterError(HttpError): - """The response contained a retry-after header. - - :type response: dict - :param response: headers from the response which returned the error. - - :type content: bytes - :param content: payload of the response which returned the error. - - :type url: str - :param url: URL of the response which returned the error. - - :type retry_after: int - :param retry_after: seconds to wait before retrying. - """ - def __init__(self, response, content, url, retry_after): - super(RetryAfterError, self).__init__(response, content, url) - self.retry_after = int(retry_after) - - @classmethod - def from_response(cls, http_response): - """Factory: construct an exception from a response. - - :type http_response: :class:`~.http_wrapper.Response` - :param http_response: the response which returned the error. - - :rtype: :class:`RetryAfterError` - :returns: The error created from the response. - """ - return cls(http_response.info, http_response.content, - http_response.request_url, http_response.retry_after) - - -class BadStatusCodeError(HttpError): - """The request completed but returned a bad status code.""" diff --git a/core/google/cloud/streaming/http_wrapper.py b/core/google/cloud/streaming/http_wrapper.py deleted file mode 100644 index e80e105175e7..000000000000 --- a/core/google/cloud/streaming/http_wrapper.py +++ /dev/null @@ -1,396 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""HTTP wrapper for apitools. - -This library wraps the underlying http library we use, which is -currently :mod:`httplib2`. -""" - -import collections -import contextlib -import logging -import socket -import time - -import httplib2 -import six -from six.moves import http_client -from six.moves.urllib import parse - -from google.cloud.streaming.exceptions import BadStatusCodeError -from google.cloud.streaming.exceptions import RequestError -from google.cloud.streaming.exceptions import RetryAfterError -from google.cloud.streaming.util import calculate_wait_for_retry - - -_REDIRECTIONS = 5 -# 308 and 429 don't have names in httplib. -RESUME_INCOMPLETE = 308 -TOO_MANY_REQUESTS = 429 - - -_REDIRECT_STATUS_CODES = ( - http_client.MOVED_PERMANENTLY, - http_client.FOUND, - http_client.SEE_OTHER, - http_client.TEMPORARY_REDIRECT, - RESUME_INCOMPLETE, -) - - -_RETRYABLE_EXCEPTIONS = ( - http_client.BadStatusLine, - http_client.IncompleteRead, - http_client.ResponseNotReady, - socket.error, - httplib2.ServerNotFoundError, - ValueError, - RequestError, - BadStatusCodeError, - RetryAfterError, -) - - -@contextlib.contextmanager -def _httplib2_debug_level(http_request, level, http=None): - """Temporarily change the value of httplib2.debuglevel, if necessary. - - If http_request has a `loggable_body` distinct from `body`, then we - need to prevent httplib2 from logging the full body. This sets - httplib2.debuglevel for the duration of the `with` block; however, - that alone won't change the value of existing HTTP connections. If - an httplib2.Http object is provided, we'll also change the level on - any cached connections attached to it. - - :type http_request: :class:`Request` - :param http_request: the request to be logged. - - :type level: int - :param level: the debuglevel for logging. - - :type http: :class:`httplib2.Http` - :param http: - (Optional) the instance on whose connections to set the debuglevel. - """ - if http_request.loggable_body is None: - yield - return - old_level = httplib2.debuglevel - http_levels = {} - httplib2.debuglevel = level - if http is not None and getattr(http, 'connections', None) is not None: - for connection_key, connection in http.connections.items(): - # httplib2 stores two kinds of values in this dict, connection - # classes and instances. Since the connection types are all - # old-style classes, we can't easily distinguish by connection - # type -- so instead we use the key pattern. - if ':' not in connection_key: - continue - http_levels[connection_key] = connection.debuglevel - connection.set_debuglevel(level) - yield - httplib2.debuglevel = old_level - if http is not None: - for connection_key, old_level in http_levels.items(): - http.connections[connection_key].set_debuglevel(old_level) - - -class Request(object): - """Encapsulates the data for an HTTP request. - - :type url: str - :param url: the URL for the request - - :type http_method: str - :param http_method: the HTTP method to use for the request - - :type headers: mapping - :param headers: (Optional) headers to be sent with the request - - :type body: str - :param body: body to be sent with the request - """ - def __init__(self, url='', http_method='GET', headers=None, body=''): - self.url = url - self.http_method = http_method - self.headers = headers or {} - self._body = None - self._loggable_body = None - self.body = body - - @property - def loggable_body(self): - """Request body for logging purposes - - :rtype: str - :returns: The body to be logged. - """ - return self._loggable_body - - @loggable_body.setter - def loggable_body(self, value): - """Update request body for logging purposes - - :type value: str - :param value: updated body - - :raises: :exc:`RequestError` if the request does not have a body. - """ - if self.body is None: - raise RequestError( - 'Cannot set loggable body on request with no body') - self._loggable_body = value - - @property - def body(self): - """Request body - - :rtype: str - :returns: The body of the request. - """ - return self._body - - @body.setter - def body(self, value): - """Update the request body - - Handles logging and length measurement. - - :type value: str - :param value: updated body - """ - self._body = value - if value is not None: - # Avoid calling len() which cannot exceed 4GiB in 32-bit python. - body_length = getattr( - self._body, 'length', None) or len(self._body) - self.headers['content-length'] = str(body_length) - else: - self.headers.pop('content-length', None) - # This line ensures we don't try to print large requests. - if not isinstance(value, (type(None), six.string_types)): - self.loggable_body = '' - - -def _process_content_range(content_range): - """Convert a 'Content-Range' header into a length for the response. - - Helper for :meth:`Response.length`. - - :type content_range: str - :param content_range: the header value being parsed. - - :rtype: int - :returns: the length of the response chunk. - """ - _, _, range_spec = content_range.partition(' ') - byte_range, _, _ = range_spec.partition('/') - start, _, end = byte_range.partition('-') - return int(end) - int(start) + 1 - - -# Note: currently the order of fields here is important, since we want -# to be able to pass in the result from httplib2.request. -_ResponseTuple = collections.namedtuple( - 'HttpResponse', ['info', 'content', 'request_url']) - - -class Response(_ResponseTuple): - """Encapsulates data for an HTTP response. - """ - __slots__ = () - - def __len__(self): - return self.length - - @property - def length(self): - """Length of this response. - - Exposed as an attribute since using ``len()`` directly can fail - for responses larger than ``sys.maxint``. - - :rtype: int or long - :returns: The length of the response. - """ - if 'content-encoding' in self.info and 'content-range' in self.info: - # httplib2 rewrites content-length in the case of a compressed - # transfer; we can't trust the content-length header in that - # case, but we *can* trust content-range, if it's present. - return _process_content_range(self.info['content-range']) - elif 'content-length' in self.info: - return int(self.info.get('content-length')) - elif 'content-range' in self.info: - return _process_content_range(self.info['content-range']) - return len(self.content) - - @property - def status_code(self): - """HTTP status code - - :rtype: int - :returns: The response status code. - """ - return int(self.info['status']) - - @property - def retry_after(self): - """Retry interval (if set). - - :rtype: int - :returns: interval in seconds - """ - if 'retry-after' in self.info: - return int(self.info['retry-after']) - - @property - def is_redirect(self): - """Does this response contain a redirect - - :rtype: bool - :returns: True if the status code indicates a redirect and the - 'location' header is present. - """ - return (self.status_code in _REDIRECT_STATUS_CODES and - 'location' in self.info) - - -def _check_response(response): - """Validate a response - - :type response: :class:`Response` - :param response: the response to validate - - :raises: :exc:`google.cloud.streaming.exceptions.RequestError` if response - is None, :exc:`~.exceptions.BadStatusCodeError` if response status - code indicates an error, or :exc:`~.exceptions.RetryAfterError` - if response indicates a retry interval. - """ - if response is None: - # Caller shouldn't call us if the response is None, but handle anyway. - raise RequestError( - 'Request did not return a response.') - elif (response.status_code >= 500 or - response.status_code == TOO_MANY_REQUESTS): - raise BadStatusCodeError.from_response(response) - elif response.retry_after: - raise RetryAfterError.from_response(response) - - -def _reset_http_connections(http): - """Rebuild all http connections in the httplib2.Http instance. - - httplib2 overloads the map in http.connections to contain two different - types of values: - { scheme string: connection class } and - { scheme + authority string : actual http connection } - Here we remove all of the entries for actual connections so that on the - next request httplib2 will rebuild them from the connection types. - - :type http: :class:`httplib2.Http` - :param http: the instance whose connections are to be rebuilt - """ - if getattr(http, 'connections', None): - for conn_key in list(http.connections.keys()): - if ':' in conn_key: - del http.connections[conn_key] - - -def _make_api_request_no_retry(http, http_request, redirections=_REDIRECTIONS): - """Send an HTTP request via the given http instance. - - This wrapper exists to handle translation between the plain httplib2 - request/response types and the Request and Response types above. - - :type http: :class:`httplib2.Http` - :param http: an instance which impelements the `Http` API. - - :type http_request: :class:`Request` - :param http_request: the request to send. - - :type redirections: int - :param redirections: Number of redirects to follow. - - :rtype: :class:`Response` - :returns: an object representing the server's response - - :raises: :exc:`google.cloud.streaming.exceptions.RequestError` if no - response could be parsed. - """ - connection_type = None - # Handle overrides for connection types. This is used if the caller - # wants control over the underlying connection for managing callbacks - # or hash digestion. - if getattr(http, 'connections', None): - url_scheme = parse.urlsplit(http_request.url).scheme - if url_scheme and url_scheme in http.connections: - connection_type = http.connections[url_scheme] - - # Custom printing only at debuglevel 4 - new_debuglevel = 4 if httplib2.debuglevel == 4 else 0 - with _httplib2_debug_level(http_request, new_debuglevel, http=http): - info, content = http.request( - str(http_request.url), method=str(http_request.http_method), - body=http_request.body, headers=http_request.headers, - redirections=redirections, connection_type=connection_type) - - if info is None: - raise RequestError() - - response = Response(info, content, http_request.url) - _check_response(response) - return response - - -def make_api_request(http, http_request, retries=7, - redirections=_REDIRECTIONS): - """Send an HTTP request via the given http, performing error/retry handling. - - :type http: :class:`httplib2.Http` - :param http: an instance which implements the `Http` API. - - :type http_request: :class:`Request` - :param http_request: the request to send. - - :type retries: int - :param retries: Number of retries to attempt on retryable - responses (such as 429 or 5XX). - - :type redirections: int - :param redirections: Number of redirects to follow. - - :rtype: :class:`Response` - :returns: an object representing the server's response. - - :raises: :exc:`google.cloud.streaming.exceptions.RequestError` if no - response could be parsed. - """ - retry = 0 - while True: - try: - return _make_api_request_no_retry(http, http_request, - redirections=redirections) - except _RETRYABLE_EXCEPTIONS as exc: - retry += 1 - if retry >= retries: - raise - retry_after = getattr(exc, 'retry_after', None) - if retry_after is None: - retry_after = calculate_wait_for_retry(retry) - - _reset_http_connections(http) - logging.debug('Retrying request to url %s after exception %s', - http_request.url, type(exc).__name__) - time.sleep(retry_after) diff --git a/core/google/cloud/streaming/stream_slice.py b/core/google/cloud/streaming/stream_slice.py deleted file mode 100644 index 3a13337bb993..000000000000 --- a/core/google/cloud/streaming/stream_slice.py +++ /dev/null @@ -1,87 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Small helper class to provide a small slice of a stream.""" - -from six.moves import http_client - - -class StreamSlice(object): - """Provides a slice-like object for streams. - - :type stream: readable file-like object - :param stream: the stream to be buffered. - - :type max_bytes: int - :param max_bytes: maximum number of bytes to return in the slice. - """ - def __init__(self, stream, max_bytes): - self._stream = stream - self._remaining_bytes = max_bytes - self._max_bytes = max_bytes - - def __repr__(self): - return 'Slice of stream %s with %s/%s bytes not yet read' % ( - self._stream, self._remaining_bytes, self._max_bytes) - - def __len__(self): - return self._max_bytes - - def __nonzero__(self): - # For 32-bit python2.x, len() cannot exceed a 32-bit number; avoid - # accidental len() calls from httplib in the form of "if this_object:". - return bool(self._max_bytes) - - @property - def length(self): - """Maximum number of bytes to return in the slice. - - .. note:: - - For 32-bit python2.x, len() cannot exceed a 32-bit number. - - :rtype: int - :returns: The max "length" of the stream. - """ - return self._max_bytes - - def read(self, size=None): - """Read bytes from the slice. - - Compared to other streams, there is one case where we may - unexpectedly raise an exception on read: if the underlying stream - is exhausted (i.e. returns no bytes on read), and the size of this - slice indicates we should still be able to read more bytes, we - raise :exc:`IncompleteRead`. - - :type size: int - :param size: - (Optional) If provided, read no more than size bytes from the - stream. - - :rtype: bytes - :returns: bytes read from this slice. - - :raises: :exc:`IncompleteRead` - """ - if size is not None: - read_size = min(size, self._remaining_bytes) - else: - read_size = self._remaining_bytes - data = self._stream.read(read_size) - if read_size > 0 and not data: - raise http_client.IncompleteRead( - self._max_bytes - self._remaining_bytes, self._max_bytes) - self._remaining_bytes -= len(data) - return data diff --git a/core/google/cloud/streaming/transfer.py b/core/google/cloud/streaming/transfer.py deleted file mode 100644 index 3d6d5b8e6016..000000000000 --- a/core/google/cloud/streaming/transfer.py +++ /dev/null @@ -1,1223 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# pylint: disable=too-many-lines - -"""Upload and download support for apitools.""" - -import email.generator as email_generator -import email.mime.multipart as mime_multipart -import email.mime.nonmultipart as mime_nonmultipart -import mimetypes -import os - -import httplib2 -import six -from six.moves import http_client - -from google.cloud._helpers import _to_bytes -from google.cloud.streaming.buffered_stream import BufferedStream -from google.cloud.streaming.exceptions import CommunicationError -from google.cloud.streaming.exceptions import HttpError -from google.cloud.streaming.exceptions import TransferInvalidError -from google.cloud.streaming.exceptions import TransferRetryError -from google.cloud.streaming.http_wrapper import make_api_request -from google.cloud.streaming.http_wrapper import Request -from google.cloud.streaming.http_wrapper import RESUME_INCOMPLETE -from google.cloud.streaming.stream_slice import StreamSlice -from google.cloud.streaming.util import acceptable_mime_type - - -RESUMABLE_UPLOAD_THRESHOLD = 5 << 20 -SIMPLE_UPLOAD = 'simple' -RESUMABLE_UPLOAD = 'resumable' - - -_DEFAULT_CHUNKSIZE = 1 << 20 - - -class _Transfer(object): - """Generic bits common to Uploads and Downloads. - - :type stream: file-like object - :param stream: stream to/from which data is downloaded/uploaded. - - :type close_stream: bool - :param close_stream: should this instance close the stream when deleted - - :type chunksize: int - :param chunksize: the size of chunks used to download/upload a file. - - :type auto_transfer: bool - :param auto_transfer: should this instance automatically begin transfering - data when initialized - - :type http: :class:`httplib2.Http` (or workalike) - :param http: Http instance used to perform requests. - - :type num_retries: int - :param num_retries: how many retries should the transfer attempt - """ - - _num_retries = None - - def __init__(self, stream, close_stream=False, - chunksize=_DEFAULT_CHUNKSIZE, auto_transfer=True, - http=None, num_retries=5): - self._bytes_http = None - self._close_stream = close_stream - self._http = http - self._stream = stream - self._url = None - - # Let the @property do validation. - self.num_retries = num_retries - - self.auto_transfer = auto_transfer - self.chunksize = chunksize - - def __repr__(self): - return str(self) - - @property - def close_stream(self): - """Should this instance close the stream when deleted. - - :rtype: bool - :returns: Boolean indicated if the stream should be closed. - """ - return self._close_stream - - @property - def http(self): - """Http instance used to perform requests. - - :rtype: :class:`httplib2.Http` (or workalike) - :returns: The HTTP object used for requests. - """ - return self._http - - @property - def bytes_http(self): - """Http instance used to perform binary requests. - - Defaults to :attr:`http`. - - :rtype: :class:`httplib2.Http` (or workalike) - :returns: The HTTP object used for binary requests. - """ - return self._bytes_http or self.http - - @bytes_http.setter - def bytes_http(self, value): - """Update Http instance used to perform binary requests. - - :type value: :class:`httplib2.Http` (or workalike) - :param value: new instance - """ - self._bytes_http = value - - @property - def num_retries(self): - """How many retries should the transfer attempt - - :rtype: int - :returns: The number of retries allowed. - """ - return self._num_retries - - @num_retries.setter - def num_retries(self, value): - """Update how many retries should the transfer attempt - - :type value: int - """ - if not isinstance(value, six.integer_types): - raise ValueError("num_retries: pass an integer") - - if value < 0: - raise ValueError( - 'Cannot have negative value for num_retries') - self._num_retries = value - - @property - def stream(self): - """Stream to/from which data is downloaded/uploaded. - - :rtype: file-like object - :returns: The stream that sends/receives data. - """ - return self._stream - - @property - def url(self): - """URL to / from which data is downloaded/uploaded. - - :rtype: str - :returns: The URL where data is sent/received. - """ - return self._url - - def _initialize(self, http, url): - """Initialize this download by setting :attr:`http` and :attr`url`. - - Allow the user to be able to pre-initialize :attr:`http` by setting - the value in the constructor; in that case, we ignore the provided - http. - - :type http: :class:`httplib2.Http` (or a worklike) or None. - :param http: the Http instance to use to make requests. - - :type url: str - :param url: The url for this transfer. - """ - self._ensure_uninitialized() - if self.http is None: - self._http = http or httplib2.Http() - self._url = url - - @property - def initialized(self): - """Has the instance been initialized - - :rtype: bool - :returns: Boolean indicating if the current transfer - has been initialized. - """ - return self.url is not None and self.http is not None - - def _ensure_initialized(self): - """Helper: assert that the instance is initialized. - - :raises: :exc:`google.cloud.streaming.exceptions.TransferInvalidError` - if the instance is not initialized. - """ - if not self.initialized: - raise TransferInvalidError( - 'Cannot use uninitialized %s', type(self).__name__) - - def _ensure_uninitialized(self): - """Helper: assert that the instance is not initialized. - - :raises: :exc:`google.cloud.streaming.exceptions.TransferInvalidError` - if the instance is already initialized. - """ - if self.initialized: - raise TransferInvalidError( - 'Cannot re-initialize %s', type(self).__name__) - - def __del__(self): - if self._close_stream: - self._stream.close() - - -class Download(_Transfer): - """Represent a single download. - - :type stream: file-like object - :param stream: stream to/from which data is downloaded/uploaded. - - :type kwds: dict - :param kwds: keyword arguments: all except ``total_size`` are passed - through to :meth:`_Transfer.__init__()`. - """ - _ACCEPTABLE_STATUSES = set(( - http_client.OK, - http_client.NO_CONTENT, - http_client.PARTIAL_CONTENT, - http_client.REQUESTED_RANGE_NOT_SATISFIABLE, - )) - - def __init__(self, stream, **kwds): - total_size = kwds.pop('total_size', None) - super(Download, self).__init__(stream, **kwds) - self._initial_response = None - self._progress = 0 - self._total_size = total_size - self._encoding = None - - @classmethod - def from_file(cls, filename, overwrite=False, auto_transfer=True, **kwds): - """Create a new download object from a filename. - - :type filename: str - :param filename: path/filename for the target file - - :type overwrite: bool - :param overwrite: should an existing file be overwritten - - :type auto_transfer: bool - :param auto_transfer: should the transfer be started immediately - - :type kwds: dict - :param kwds: keyword arguments: passed - through to :meth:`_Transfer.__init__()`. - - :rtype: :class:`Download` - :returns: The download initiated from the file passed. - """ - path = os.path.expanduser(filename) - if os.path.exists(path) and not overwrite: - raise ValueError( - 'File %s exists and overwrite not specified' % path) - return cls(open(path, 'wb'), close_stream=True, - auto_transfer=auto_transfer, **kwds) - - @classmethod - def from_stream(cls, stream, auto_transfer=True, total_size=None, **kwds): - """Create a new Download object from a stream. - - :type stream: writable file-like object - :param stream: the target file - - :type total_size: int - :param total_size: (Optional) total size of the file to be downloaded - - :type auto_transfer: bool - :param auto_transfer: should the transfer be started immediately - - :type kwds: dict - :param kwds: keyword arguments: passed - through to :meth:`_Transfer.__init__()`. - - :rtype: :class:`Download` - :returns: The download initiated from the stream passed. - """ - return cls(stream, auto_transfer=auto_transfer, total_size=total_size, - **kwds) - - @property - def progress(self): - """Number of bytes have been downloaded. - - :rtype: int >= 0 - :returns: The number of downloaded bytes. - """ - return self._progress - - @property - def total_size(self): - """Total number of bytes to be downloaded. - - :rtype: int or None - :returns: The total number of bytes to download. - """ - return self._total_size - - @property - def encoding(self): - """'Content-Encoding' used to transfer the file - - :rtype: str or None - :returns: The encoding of the downloaded content. - """ - return self._encoding - - def __repr__(self): - if not self.initialized: - return 'Download (uninitialized)' - else: - return 'Download with %d/%s bytes transferred from url %s' % ( - self.progress, self.total_size, self.url) - - def configure_request(self, http_request, url_builder): - """Update http_request/url_builder with download-appropriate values. - - :type http_request: :class:`~.streaming.http_wrapper.Request` - :param http_request: the request to be updated - - :type url_builder: instance with settable 'query_params' attribute. - :param url_builder: transfer policy object to be updated - """ - url_builder.query_params['alt'] = 'media' - http_request.headers['Range'] = 'bytes=0-%d' % (self.chunksize - 1,) - - def _set_total(self, info): - """Update 'total_size' based on data from a response. - - :type info: mapping - :param info: response headers - """ - if 'content-range' in info: - _, _, total = info['content-range'].rpartition('/') - if total != '*': - self._total_size = int(total) - # Note "total_size is None" means we don't know it; if no size - # info was returned on our initial range request, that means we - # have a 0-byte file. (That last statement has been verified - # empirically, but is not clearly documented anywhere.) - if self.total_size is None: - self._total_size = 0 - - def initialize_download(self, http_request, http): - """Initialize this download. - - If the instance has :attr:`auto_transfer` enabled, begins the - download immediately. - - :type http_request: :class:`~.streaming.http_wrapper.Request` - :param http_request: the request to use to initialize this download. - - :type http: :class:`httplib2.Http` (or workalike) - :param http: Http instance for this request. - """ - self._ensure_uninitialized() - url = http_request.url - if self.auto_transfer: - end_byte = self._compute_end_byte(0) - self._set_range_header(http_request, 0, end_byte) - response = make_api_request( - self.bytes_http or http, http_request) - if response.status_code not in self._ACCEPTABLE_STATUSES: - raise HttpError.from_response(response) - self._initial_response = response - self._set_total(response.info) - url = response.info.get('content-location', response.request_url) - self._initialize(http, url) - # Unless the user has requested otherwise, we want to just - # go ahead and pump the bytes now. - if self.auto_transfer: - self.stream_file(use_chunks=True, headers=http_request.headers) - - def _normalize_start_end(self, start, end=None): - """Validate / fix up byte range. - - :type start: int - :param start: start byte of the range: if negative, used as an - offset from the end. - - :type end: int - :param end: end byte of the range. - - :rtype: tuple, (start, end) - :returns: the normalized start, end pair. - :raises: :exc:`google.cloud.streaming.exceptions.TransferInvalidError` - for invalid combinations of start, end. - """ - if end is not None: - if start < 0: - raise TransferInvalidError( - 'Cannot have end index with negative start index') - elif start >= self.total_size: - raise TransferInvalidError( - 'Cannot have start index greater than total size') - end = min(end, self.total_size - 1) - if end < start: - raise TransferInvalidError( - 'Range requested with end[%s] < start[%s]' % (end, start)) - return start, end - else: - if start < 0: - start = max(0, start + self.total_size) - return start, self.total_size - 1 - - @staticmethod - def _set_range_header(request, start, end=None): - """Update the 'Range' header in a request to match a byte range. - - :type request: :class:`google.cloud.streaming.http_wrapper.Request` - :param request: the request to update - - :type start: int - :param start: start byte of the range: if negative, used as an - offset from the end. - - :type end: int - :param end: end byte of the range. - """ - if start < 0: - request.headers['range'] = 'bytes=%d' % start - elif end is None: - request.headers['range'] = 'bytes=%d-' % start - else: - request.headers['range'] = 'bytes=%d-%d' % (start, end) - - def _compute_end_byte(self, start, end=None, use_chunks=True): - """Compute the last byte to fetch for this request. - - Based on the HTTP spec for Range and Content-Range. - - .. note:: - This is potentially confusing in several ways: - - the value for the last byte is 0-based, eg "fetch 10 bytes - from the beginning" would return 9 here. - - if we have no information about size, and don't want to - use the chunksize, we'll return None. - - :type start: int - :param start: start byte of the range. - - :type end: int - :param end: (Optional) suggested last byte of the range. - - :type use_chunks: bool - :param use_chunks: If False, ignore :attr:`chunksize`. - - :rtype: str - :returns: Last byte to use in a 'Range' header, or None. - """ - end_byte = end - - if start < 0 and not self.total_size: - return end_byte - - if use_chunks: - alternate = start + self.chunksize - 1 - if end_byte is not None: - end_byte = min(end_byte, alternate) - else: - end_byte = alternate - - if self.total_size: - alternate = self.total_size - 1 - if end_byte is not None: - end_byte = min(end_byte, alternate) - else: - end_byte = alternate - - return end_byte - - def _get_chunk(self, start, end, headers=None): - """Retrieve a chunk of the file. - - :type start: int - :param start: start byte of the range. - - :type end: int - :param end: (Optional) end byte of the range. - - :type headers: dict - :param headers: (Optional) Headers to be used for the ``Request``. - - :rtype: :class:`google.cloud.streaming.http_wrapper.Response` - :returns: response from the chunk request. - """ - self._ensure_initialized() - request = Request(url=self.url, headers=headers) - self._set_range_header(request, start, end=end) - return make_api_request( - self.bytes_http, request, retries=self.num_retries) - - def _process_response(self, response): - """Update attribtes and writing stream, based on response. - - :type response: :class:`google.cloud.streaming.http_wrapper.Response` - :param response: response from a download request. - - :rtype: :class:`google.cloud.streaming.http_wrapper.Response` - :returns: the response - :raises: :exc:`google.cloud.streaming.exceptions.HttpError` for - missing / unauthorized responses; - :exc:`google.cloud.streaming.exceptions.TransferRetryError` - for other error responses. - """ - if response.status_code not in self._ACCEPTABLE_STATUSES: - # We distinguish errors that mean we made a mistake in setting - # up the transfer versus something we should attempt again. - if response.status_code in (http_client.FORBIDDEN, - http_client.NOT_FOUND): - raise HttpError.from_response(response) - else: - raise TransferRetryError(response.content) - if response.status_code in (http_client.OK, - http_client.PARTIAL_CONTENT): - self.stream.write(response.content) - self._progress += response.length - if response.info and 'content-encoding' in response.info: - self._encoding = response.info['content-encoding'] - elif response.status_code == http_client.NO_CONTENT: - # It's important to write something to the stream for the case - # of a 0-byte download to a file, as otherwise python won't - # create the file. - self.stream.write('') - return response - - def get_range(self, start, end=None, use_chunks=True): - """Retrieve a given byte range from this download, inclusive. - - Writes retrieved bytes into :attr:`stream`. - - Range must be of one of these three forms: - * 0 <= start, end = None: Fetch from start to the end of the file. - * 0 <= start <= end: Fetch the bytes from start to end. - * start < 0, end = None: Fetch the last -start bytes of the file. - - (These variations correspond to those described in the HTTP 1.1 - protocol for range headers in RFC 2616, sec. 14.35.1.) - - :type start: int - :param start: Where to start fetching bytes. (See above.) - - :type end: int - :param end: (Optional) Where to stop fetching bytes. (See above.) - - :type use_chunks: bool - :param use_chunks: If False, ignore :attr:`chunksize` - and fetch this range in a single request. - If True, streams via chunks. - - :raises: :exc:`google.cloud.streaming.exceptions.TransferRetryError` - if a request returns an empty response. - """ - self._ensure_initialized() - progress_end_normalized = False - if self.total_size is not None: - progress, end_byte = self._normalize_start_end(start, end) - progress_end_normalized = True - else: - progress = start - end_byte = end - while (not progress_end_normalized or end_byte is None or - progress <= end_byte): - end_byte = self._compute_end_byte(progress, end=end_byte, - use_chunks=use_chunks) - response = self._get_chunk(progress, end_byte) - if not progress_end_normalized: - self._set_total(response.info) - progress, end_byte = self._normalize_start_end(start, end) - progress_end_normalized = True - response = self._process_response(response) - progress += response.length - if response.length == 0: - raise TransferRetryError( - 'Zero bytes unexpectedly returned in download response') - - def stream_file(self, use_chunks=True, headers=None): - """Stream the entire download. - - Writes retrieved bytes into :attr:`stream`. - - :type use_chunks: bool - :param use_chunks: If False, ignore :attr:`chunksize` - and stream this download in a single request. - If True, streams via chunks. - - :type headers: dict - :param headers: (Optional) Headers to be used for the ``Request``. - """ - self._ensure_initialized() - while True: - if self._initial_response is not None: - response = self._initial_response - self._initial_response = None - else: - end_byte = self._compute_end_byte(self.progress, - use_chunks=use_chunks) - response = self._get_chunk(self.progress, end_byte, - headers=headers) - if self.total_size is None: - self._set_total(response.info) - response = self._process_response(response) - if (response.status_code == http_client.OK or - self.progress >= self.total_size): - break - - -class Upload(_Transfer): - """Represent a single Upload. - - :type stream: file-like object - :param stream: stream to/from which data is downloaded/uploaded. - - :type mime_type: str: - :param mime_type: MIME type of the upload. - - :type total_size: int - :param total_size: (Optional) Total upload size for the stream. - - :type http: :class:`httplib2.Http` (or workalike) - :param http: Http instance used to perform requests. - - :type close_stream: bool - :param close_stream: should this instance close the stream when deleted - - :type auto_transfer: bool - :param auto_transfer: should this instance automatically begin transfering - data when initialized - - :type kwds: dict - :param kwds: keyword arguments: all except ``total_size`` are passed - through to :meth:`_Transfer.__init__()`. - """ - _REQUIRED_SERIALIZATION_KEYS = set(( - 'auto_transfer', 'mime_type', 'total_size', 'url')) - - def __init__(self, stream, mime_type, total_size=None, http=None, - close_stream=False, auto_transfer=True, - **kwds): - super(Upload, self).__init__( - stream, close_stream=close_stream, auto_transfer=auto_transfer, - http=http, **kwds) - self._final_response = None - self._server_chunk_granularity = None - self._complete = False - self._mime_type = mime_type - self._progress = 0 - self._strategy = None - self._total_size = total_size - - @classmethod - def from_file(cls, filename, mime_type=None, auto_transfer=True, **kwds): - """Create a new Upload object from a filename. - - :type filename: str - :param filename: path/filename to the file being uploaded - - :type mime_type: str - :param mime_type: MIMEtype of the file being uploaded - - :type auto_transfer: bool - :param auto_transfer: - (Optional) should the transfer be started immediately - - :type kwds: dict - :param kwds: keyword arguments: passed - through to :meth:`_Transfer.__init__()`. - - :rtype: :class:`Upload` - :returns: The upload initiated from the file passed. - """ - path = os.path.expanduser(filename) - if not mime_type: - mime_type, _ = mimetypes.guess_type(path) - if mime_type is None: - raise ValueError( - 'Could not guess mime type for %s' % path) - size = os.stat(path).st_size - return cls(open(path, 'rb'), mime_type, total_size=size, - close_stream=True, auto_transfer=auto_transfer, **kwds) - - @classmethod - def from_stream(cls, stream, mime_type, - total_size=None, auto_transfer=True, **kwds): - """Create a new Upload object from a stream. - - :type stream: writable file-like object - :param stream: the target file - - :type mime_type: str - :param mime_type: MIMEtype of the file being uploaded - - :type total_size: int - :param total_size: (Optional) Size of the file being uploaded - - :type auto_transfer: bool - :param auto_transfer: - (Optional) should the transfer be started immediately - - :type kwds: dict - :param kwds: keyword arguments: passed - through to :meth:`_Transfer.__init__()`. - - :rtype: :class:`Upload` - :returns: The upload initiated from the stream passed. - """ - if mime_type is None: - raise ValueError( - 'No mime_type specified for stream') - return cls(stream, mime_type, total_size=total_size, - close_stream=False, auto_transfer=auto_transfer, **kwds) - - @property - def complete(self): - """Has the entire stream been uploaded. - - :rtype: bool - :returns: Boolean indicated if the upload is complete. - """ - return self._complete - - @property - def mime_type(self): - """MIMEtype of the file being uploaded. - - :rtype: str - :returns: The mime-type of the upload. - """ - return self._mime_type - - @property - def progress(self): - """Bytes uploaded so far - - :rtype: int - :returns: The amount uploaded so far. - """ - return self._progress - - @property - def strategy(self): - """Upload strategy to use - - :rtype: str or None - :returns: The strategy used to upload the data. - """ - return self._strategy - - @strategy.setter - def strategy(self, value): - """Update upload strategy to use - - :type value: str (one of :data:`SIMPLE_UPLOAD` or - :data:`RESUMABLE_UPLOAD`) - - :raises: :exc:`ValueError` if value is not one of the two allowed - strings. - """ - if value not in (SIMPLE_UPLOAD, RESUMABLE_UPLOAD): - raise ValueError(( - 'Invalid value "%s" for upload strategy, must be one of ' - '"simple" or "resumable".') % value) - self._strategy = value - - @property - def total_size(self): - """Total size of the stream to be uploaded. - - :rtype: int or None - :returns: The total size to be uploaded. - """ - return self._total_size - - @total_size.setter - def total_size(self, value): - """Update total size of the stream to be uploaded. - - :type value: int - :param value: (Optional) the size - """ - self._ensure_uninitialized() - self._total_size = value - - def __repr__(self): - if not self.initialized: - return 'Upload (uninitialized)' - else: - return 'Upload with %d/%s bytes transferred for url %s' % ( - self.progress, self.total_size or '???', self.url) - - def _set_default_strategy(self, upload_config, http_request): - """Determine and set the default upload strategy for this upload. - - We generally prefer simple or multipart, unless we're forced to - use resumable. This happens when any of (1) the upload is too - large, (2) the simple endpoint doesn't support multipart requests - and we have metadata, or (3) there is no simple upload endpoint. - - :type upload_config: instance w/ ``max_size`` and ``accept`` - attributes - :param upload_config: Configuration for the upload endpoint. - - :type http_request: :class:`~.streaming.http_wrapper.Request` - :param http_request: The associated http request. - """ - if upload_config.resumable_path is None: - self.strategy = SIMPLE_UPLOAD - if self.strategy is not None: - return - strategy = SIMPLE_UPLOAD - if (self.total_size is not None and - self.total_size > RESUMABLE_UPLOAD_THRESHOLD): - strategy = RESUMABLE_UPLOAD - if http_request.body and not upload_config.simple_multipart: - strategy = RESUMABLE_UPLOAD - if not upload_config.simple_path: - strategy = RESUMABLE_UPLOAD - self.strategy = strategy - - def configure_request(self, upload_config, http_request, url_builder): - """Configure the request and url for this upload. - - :type upload_config: instance w/ ``max_size`` and ``accept`` - attributes - :param upload_config: transfer policy object to be queried - - :type http_request: :class:`~.streaming.http_wrapper.Request` - :param http_request: the request to be updated - - :type url_builder: instance with settable 'relative_path' and - 'query_params' attributes. - :param url_builder: transfer policy object to be updated - - :raises: :exc:`ValueError` if the requested upload is too big, - or does not have an acceptable MIME type. - """ - # Validate total_size vs. max_size - if (self.total_size and upload_config.max_size and - self.total_size > upload_config.max_size): - raise ValueError( - 'Upload too big: %s larger than max size %s' % ( - self.total_size, upload_config.max_size)) - # Validate mime type - if not acceptable_mime_type(upload_config.accept, self.mime_type): - raise ValueError( - 'MIME type %s does not match any accepted MIME ranges %s' % ( - self.mime_type, upload_config.accept)) - - self._set_default_strategy(upload_config, http_request) - if self.strategy == SIMPLE_UPLOAD: - url_builder.relative_path = upload_config.simple_path - if http_request.body: - url_builder.query_params['uploadType'] = 'multipart' - self._configure_multipart_request(http_request) - else: - url_builder.query_params['uploadType'] = 'media' - self._configure_media_request(http_request) - else: - url_builder.relative_path = upload_config.resumable_path - url_builder.query_params['uploadType'] = 'resumable' - self._configure_resumable_request(http_request) - - def _configure_media_request(self, http_request): - """Helper for 'configure_request': set up simple request.""" - http_request.headers['content-type'] = self.mime_type - http_request.body = self.stream.read() - http_request.loggable_body = '' - - def _configure_multipart_request(self, http_request): - """Helper for 'configure_request': set up multipart request.""" - # This is a multipart/related upload. - msg_root = mime_multipart.MIMEMultipart('related') - # msg_root should not write out its own headers - setattr(msg_root, '_write_headers', lambda self: None) - - # attach the body as one part - msg = mime_nonmultipart.MIMENonMultipart( - *http_request.headers['content-type'].split('/')) - msg.set_payload(http_request.body) - msg_root.attach(msg) - - # attach the media as the second part - msg = mime_nonmultipart.MIMENonMultipart(*self.mime_type.split('/')) - msg['Content-Transfer-Encoding'] = 'binary' - msg.set_payload(self.stream.read()) - msg_root.attach(msg) - - # NOTE: generate multipart message as bytes, not text - stream = six.BytesIO() - if six.PY3: # pragma: NO COVER Python3 - generator_class = email_generator.BytesGenerator - else: - generator_class = email_generator.Generator - generator = generator_class(stream, mangle_from_=False) - generator.flatten(msg_root, unixfrom=False) - http_request.body = stream.getvalue() - - multipart_boundary = msg_root.get_boundary() - http_request.headers['content-type'] = ( - 'multipart/related; boundary="%s"' % multipart_boundary) - - boundary_bytes = _to_bytes(multipart_boundary) - body_components = http_request.body.split(boundary_bytes) - headers, _, _ = body_components[-2].partition(b'\n\n') - body_components[-2] = b'\n\n'.join([headers, b'\n\n--']) - http_request.loggable_body = boundary_bytes.join(body_components) - - def _configure_resumable_request(self, http_request): - """Helper for 'configure_request': set up resumable request.""" - http_request.headers['X-Upload-Content-Type'] = self.mime_type - if self.total_size is not None: - http_request.headers[ - 'X-Upload-Content-Length'] = str(self.total_size) - - def refresh_upload_state(self): - """Refresh the state of a resumable upload via query to the back-end. - """ - if self.strategy != RESUMABLE_UPLOAD: - return - self._ensure_initialized() - # NOTE: Per RFC 2616[1]/7231[2], a 'PUT' request is inappropriate - # here: it is intended to be used to replace the entire - # resource, not to query for a status. - # - # If the back-end doesn't provide a way to query for this state - # via a 'GET' request, somebody should be spanked. - # - # The violation is documented[3]. - # - # [1] http://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html#sec9.6 - # [2] http://tools.ietf.org/html/rfc7231#section-4.3.4 - # [3] - # https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#resume-upload - refresh_request = Request( - url=self.url, http_method='PUT', - headers={'Content-Range': 'bytes */*'}) - refresh_response = make_api_request( - self.http, refresh_request, redirections=0, - retries=self.num_retries) - range_header = self._get_range_header(refresh_response) - if refresh_response.status_code in (http_client.OK, - http_client.CREATED): - self._complete = True - self._progress = self.total_size - self.stream.seek(self.progress) - # If we're finished, the refresh response will contain the metadata - # originally requested. Cache it so it can be returned in - # StreamInChunks. - self._final_response = refresh_response - elif refresh_response.status_code == RESUME_INCOMPLETE: - if range_header is None: - self._progress = 0 - else: - self._progress = self._last_byte(range_header) + 1 - self.stream.seek(self.progress) - else: - raise HttpError.from_response(refresh_response) - - @staticmethod - def _get_range_header(response): - """Return a 'Range' header from a response. - - :type response: :class:`google.cloud.streaming.http_wrapper.Response` - :param response: response to be queried - - :rtype: str - :returns: The header used to determine the bytes range. - """ - # NOTE: Per RFC 2616[1]/7233[2][3], 'Range' is a request header, - # not a response header. If the back-end is actually setting - # 'Range' on responses, somebody should be spanked: it should - # be sending 'Content-Range' (including the # '/' - # trailer). - # - # The violation is documented[4]. - # - # [1] http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html - # [2] http://tools.ietf.org/html/rfc7233#section-3.1 - # [3] http://tools.ietf.org/html/rfc7233#section-4.2 - # [4] - # https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#chunking - return response.info.get('Range', response.info.get('range')) - - def initialize_upload(self, http_request, http): - """Initialize this upload from the given http_request. - - :type http_request: :class:`~.streaming.http_wrapper.Request` - :param http_request: the request to be used - - :type http: :class:`httplib2.Http` (or workalike) - :param http: Http instance for this request. - - :raises: :exc:`ValueError` if the instance has not been configured - with a strategy. - :rtype: :class:`~google.cloud.streaming.http_wrapper.Response` - :returns: The response if the upload is resumable and auto transfer - is not used. - """ - if self.strategy is None: - raise ValueError( - 'No upload strategy set; did you call configure_request?') - if self.strategy != RESUMABLE_UPLOAD: - return - self._ensure_uninitialized() - http_response = make_api_request(http, http_request, - retries=self.num_retries) - if http_response.status_code != http_client.OK: - raise HttpError.from_response(http_response) - - granularity = http_response.info.get('X-Goog-Upload-Chunk-Granularity') - if granularity is not None: - granularity = int(granularity) - self._server_chunk_granularity = granularity - url = http_response.info['location'] - self._initialize(http, url) - - # Unless the user has requested otherwise, we want to just - # go ahead and pump the bytes now. - if self.auto_transfer: - return self.stream_file(use_chunks=True) - else: - return http_response - - @staticmethod - def _last_byte(range_header): - """Parse the last byte from a 'Range' header. - - :type range_header: str - :param range_header: 'Range' header value per RFC 2616/7233 - - :rtype: int - :returns: The last byte from a range header. - """ - _, _, end = range_header.partition('-') - return int(end) - - def _validate_chunksize(self, chunksize=None): - """Validate chunksize against server-specified granularity. - - Helper for :meth:`stream_file`. - - :type chunksize: int - :param chunksize: (Optional) the chunk size to be tested. - - :raises: :exc:`ValueError` if ``chunksize`` is not a multiple - of the server-specified granulariy. - """ - if self._server_chunk_granularity is None: - return - chunksize = chunksize or self.chunksize - if chunksize % self._server_chunk_granularity: - raise ValueError( - 'Server requires chunksize to be a multiple of %d', - self._server_chunk_granularity) - - def stream_file(self, use_chunks=True): - """Upload the stream. - - :type use_chunks: bool - :param use_chunks: If False, send the stream in a single request. - Otherwise, send it in chunks. - - :rtype: :class:`google.cloud.streaming.http_wrapper.Response` - :returns: The response for the final request made. - """ - if self.strategy != RESUMABLE_UPLOAD: - raise ValueError( - 'Cannot stream non-resumable upload') - # final_response is set if we resumed an already-completed upload. - response = self._final_response - send_func = self._send_chunk if use_chunks else self._send_media_body - if use_chunks: - self._validate_chunksize(self.chunksize) - self._ensure_initialized() - while not self.complete: - response = send_func(self.stream.tell()) - if response.status_code in (http_client.OK, http_client.CREATED): - self._complete = True - break - self._progress = self._last_byte(response.info['range']) - if self.progress + 1 != self.stream.tell(): - raise CommunicationError( - 'Failed to transfer all bytes in chunk, upload paused at ' - 'byte %d' % self.progress) - if self.complete and hasattr(self.stream, 'seek'): - if not hasattr(self.stream, 'seekable') or self.stream.seekable(): - current_pos = self.stream.tell() - self.stream.seek(0, os.SEEK_END) - end_pos = self.stream.tell() - self.stream.seek(current_pos) - if current_pos != end_pos: - raise TransferInvalidError( - 'Upload complete with %s ' - 'additional bytes left in stream' % - (int(end_pos) - int(current_pos))) - return response - - def _send_media_request(self, request, end): - """Peform API upload request. - - Helper for _send_media_body & _send_chunk: - - :type request: :class:`google.cloud.streaming.http_wrapper.Request` - :param request: the request to upload - - :type end: int - :param end: end byte of the to be uploaded - - :rtype: :class:`google.cloud.streaming.http_wrapper.Response` - :returns: the response - :raises: :exc:`~.streaming.exceptions.HttpError` if the status - code from the response indicates an error. - """ - response = make_api_request( - self.bytes_http, request, retries=self.num_retries) - if response.status_code not in (http_client.OK, http_client.CREATED, - RESUME_INCOMPLETE): - # We want to reset our state to wherever the server left us - # before this failed request, and then raise. - self.refresh_upload_state() - raise HttpError.from_response(response) - if response.status_code == RESUME_INCOMPLETE: - last_byte = self._last_byte( - self._get_range_header(response)) - if last_byte + 1 != end: - self.stream.seek(last_byte) - return response - - def _send_media_body(self, start): - """Send the entire stream in a single request. - - Helper for :meth:`stream_file`: - - :type start: int - :param start: start byte of the range. - - :rtype: :class:`google.cloud.streaming.http_wrapper.Response` - :returns: The response from the media upload request. - """ - self._ensure_initialized() - if self.total_size is None: - raise TransferInvalidError( - 'Total size must be known for SendMediaBody') - body_stream = StreamSlice(self.stream, self.total_size - start) - - request = Request(url=self.url, http_method='PUT', body=body_stream) - request.headers['Content-Type'] = self.mime_type - if start == self.total_size: - # End of an upload with 0 bytes left to send; just finalize. - range_string = 'bytes */%s' % self.total_size - else: - range_string = 'bytes %s-%s/%s' % (start, self.total_size - 1, - self.total_size) - - request.headers['Content-Range'] = range_string - - return self._send_media_request(request, self.total_size) - - def _send_chunk(self, start): - """Send a chunk of the stream. - - Helper for :meth:`stream_file`: - - :type start: int - :param start: start byte of the range. - - :rtype: :class:`google.cloud.streaming.http_wrapper.Response` - :returns: The response from the chunked upload request. - """ - self._ensure_initialized() - no_log_body = self.total_size is None - if self.total_size is None: - # For the streaming resumable case, we need to detect when - # we're at the end of the stream. - body_stream = BufferedStream( - self.stream, start, self.chunksize) - end = body_stream.stream_end_position - if body_stream.stream_exhausted: - self._total_size = end - # Here, change body_stream from a stream to a string object, - # which means reading a chunk into memory. This works around - # https://code.google.com/p/httplib2/issues/detail?id=176 which can - # cause httplib2 to skip bytes on 401's for file objects. - body_stream = body_stream.read(self.chunksize) - else: - end = min(start + self.chunksize, self.total_size) - body_stream = StreamSlice(self.stream, end - start) - request = Request(url=self.url, http_method='PUT', body=body_stream) - request.headers['Content-Type'] = self.mime_type - if no_log_body: - # Disable logging of streaming body. - request.loggable_body = '' - if self.total_size is None: - # Streaming resumable upload case, unknown total size. - range_string = 'bytes %s-%s/*' % (start, end - 1) - elif end == start: - # End of an upload with 0 bytes left to send; just finalize. - range_string = 'bytes */%s' % self.total_size - else: - # Normal resumable upload case with known sizes. - range_string = 'bytes %s-%s/%s' % (start, end - 1, self.total_size) - - request.headers['Content-Range'] = range_string - - return self._send_media_request(request, end) diff --git a/core/google/cloud/streaming/util.py b/core/google/cloud/streaming/util.py deleted file mode 100644 index e896052f8a1c..000000000000 --- a/core/google/cloud/streaming/util.py +++ /dev/null @@ -1,74 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Assorted utilities shared between parts of apitools.""" - -import random - - -_MAX_RETRY_WAIT = 60 - - -def calculate_wait_for_retry(retry_attempt): - """Calculate the amount of time to wait before a retry attempt. - - Wait time grows exponentially with the number of attempts. A - random amount of jitter is added to spread out retry attempts from - different clients. - - :type retry_attempt: int - :param retry_attempt: Retry attempt counter. - - :rtype: int - :returns: Number of seconds to wait before retrying request. - """ - wait_time = 2 ** retry_attempt - max_jitter = wait_time / 4.0 - wait_time += random.uniform(-max_jitter, max_jitter) - return max(1, min(wait_time, _MAX_RETRY_WAIT)) - - -def acceptable_mime_type(accept_patterns, mime_type): - """Check that ``mime_type`` matches one of ``accept_patterns``. - - Note that this function assumes that all patterns in accept_patterns - will be simple types of the form "type/subtype", where one or both - of these can be "*". We do not support parameters (i.e. "; q=") in - patterns. - - :type accept_patterns: list of string - :param accept_patterns: acceptable MIME types. - - :type mime_type: str - :param mime_type: the MIME being checked - - :rtype: bool - :returns: True if the supplied MIME type matches at least one of the - patterns, else False. - """ - if '/' not in mime_type: - raise ValueError( - 'Invalid MIME type: "%s"' % mime_type) - unsupported_patterns = [p for p in accept_patterns if ';' in p] - if unsupported_patterns: - raise ValueError( - 'MIME patterns with parameter unsupported: "%s"' % ', '.join( - unsupported_patterns)) - - def _match(pattern, mime_type): - """Return True iff mime_type is acceptable for pattern.""" - return all(accept in ('*', provided) for accept, provided - in zip(pattern.split('/'), mime_type.split('/'))) - - return any(_match(pattern, mime_type) for pattern in accept_patterns) diff --git a/core/nox.py b/core/nox.py index d941d60092b8..1dca10eb9b69 100644 --- a/core/nox.py +++ b/core/nox.py @@ -13,7 +13,6 @@ # limitations under the License. from __future__ import absolute_import - import os import nox @@ -27,37 +26,62 @@ def unit_tests(session, python_version): # Run unit tests against all supported versions of Python. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'unit-' + python_version + # Install all test dependencies, then install this package in-place. - session.install('mock', 'pytest', 'pytest-cov', - 'grpcio >= 1.0.2') + session.install( + 'mock', + 'pytest', + 'pytest-cov', + 'grpcio >= 1.0.2', + ) session.install('-e', '.') # Run py.test against the unit tests. session.run( - 'py.test', '--quiet', - '--cov=google.cloud', '--cov=tests.unit', '--cov-append', - '--cov-config=.coveragerc', '--cov-report=', '--cov-fail-under=97', - 'tests/unit', + 'py.test', + '--quiet', + '--cov=google.cloud', + '--cov=tests.unit', + '--cov-append', + '--cov-config=.coveragerc', + '--cov-report=', + '--cov-fail-under=97', + os.path.join('tests', 'unit'), + *session.posargs ) @nox.session def lint(session): - """Run flake8. + """Run linters. - Returns a failure if flake8 finds linting errors or sufficiently + Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ session.interpreter = 'python3.6' - session.install('flake8') + session.install( + 'flake8', 'flake8-import-order', 'pylint', 'gcp-devrel-py-tools') session.install('.') - session.run('flake8', 'google/cloud/core') + session.run('flake8', 'google', 'tests') + session.run( + 'gcp-devrel-py-tools', 'run-pylint', + '--config', 'pylint.config.py', + '--library-filesets', 'google', + '--test-filesets', 'tests', + # Temporarily allow this to fail. + success_codes=range(0, 100)) @nox.session def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.interpreter = 'python3.6' + + # Set the virtualenv dirname. + session.virtualenv_dirname = 'setup' + session.install('docutils', 'Pygments') session.run( 'python', 'setup.py', 'check', '--restructuredtext', '--strict') diff --git a/core/pylint.config.py b/core/pylint.config.py new file mode 100644 index 000000000000..b618319b8b61 --- /dev/null +++ b/core/pylint.config.py @@ -0,0 +1,25 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module is used to configure gcp-devrel-py-tools run-pylint.""" + +# Library configuration + +# library_additions = {} +# library_replacements = {} + +# Test configuration + +# test_additions = copy.deepcopy(library_additions) +# test_replacements = copy.deepcopy(library_replacements) diff --git a/core/setup.py b/core/setup.py index 3dfa13ef5284..ba84f2347d18 100644 --- a/core/setup.py +++ b/core/setup.py @@ -27,7 +27,7 @@ # consolidate. SETUP_BASE = { 'author': 'Google Cloud Platform', - 'author_email': 'jjg+google-cloud-python@google.com', + 'author_email': 'googleapis-publisher@google.com', 'scripts': [], 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', 'license': 'Apache 2.0', @@ -57,11 +57,12 @@ 'google-auth >= 0.4.0, < 2.0.0dev', 'google-auth-httplib2', 'six', + 'tenacity >= 4.0.0, <5.0.0dev' ] setup( name='google-cloud-core', - version='0.24.1', + version='0.25.0', description='API Client library for Google Cloud: Core Helpers', long_description=README, namespace_packages=[ diff --git a/core/tests/unit/future/__init__.py b/core/tests/unit/future/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/core/tests/unit/future/test__helpers.py b/core/tests/unit/future/test__helpers.py new file mode 100644 index 000000000000..cbca5ba4d4df --- /dev/null +++ b/core/tests/unit/future/test__helpers.py @@ -0,0 +1,37 @@ +# Copyright 2017, Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import mock + +from google.cloud.future import _helpers + + +@mock.patch('threading.Thread', autospec=True) +def test_start_deamon_thread(unused_thread): + deamon_thread = _helpers.start_daemon_thread(target=mock.sentinel.target) + assert deamon_thread.daemon is True + + +def test_safe_invoke_callback(): + callback = mock.Mock(spec=['__call__'], return_value=42) + result = _helpers.safe_invoke_callback(callback, 'a', b='c') + assert result == 42 + callback.assert_called_once_with('a', b='c') + + +def test_safe_invoke_callback_exception(): + callback = mock.Mock(spec=['__call__'], side_effect=ValueError()) + result = _helpers.safe_invoke_callback(callback, 'a', b='c') + assert result is None + callback.assert_called_once_with('a', b='c') diff --git a/core/tests/unit/future/test_operation.py b/core/tests/unit/future/test_operation.py new file mode 100644 index 000000000000..2d281694001a --- /dev/null +++ b/core/tests/unit/future/test_operation.py @@ -0,0 +1,207 @@ +# Copyright 2017, Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import mock + +from google.cloud.future import operation +from google.longrunning import operations_pb2 +from google.protobuf import struct_pb2 +from google.rpc import code_pb2 +from google.rpc import status_pb2 + +TEST_OPERATION_NAME = 'test/operation' + + +def make_operation_proto( + name=TEST_OPERATION_NAME, metadata=None, response=None, + error=None, **kwargs): + operation_proto = operations_pb2.Operation( + name=name, **kwargs) + + if metadata is not None: + operation_proto.metadata.Pack(metadata) + + if response is not None: + operation_proto.response.Pack(response) + + if error is not None: + operation_proto.error.CopyFrom(error) + + return operation_proto + + +def make_operation_future(client_operations_responses=None): + if client_operations_responses is None: + client_operations_responses = [make_operation_proto()] + + refresh = mock.Mock( + spec=['__call__'], side_effect=client_operations_responses) + refresh.responses = client_operations_responses + cancel = mock.Mock(spec=['__call__']) + operation_future = operation.Operation( + client_operations_responses[0], + refresh, + cancel, + result_type=struct_pb2.Struct, + metadata_type=struct_pb2.Struct) + + return operation_future, refresh, cancel + + +def test_constructor(): + future, refresh, _ = make_operation_future() + + assert future.operation == refresh.responses[0] + assert future.operation.done is False + assert future.operation.name == TEST_OPERATION_NAME + assert future.metadata is None + assert future.running() + + +def test_metadata(): + expected_metadata = struct_pb2.Struct() + future, _, _ = make_operation_future( + [make_operation_proto(metadata=expected_metadata)]) + + assert future.metadata == expected_metadata + + +def test_cancellation(): + responses = [ + make_operation_proto(), + # Second response indicates that the operation was cancelled. + make_operation_proto( + done=True, + error=status_pb2.Status(code=code_pb2.CANCELLED))] + future, _, cancel = make_operation_future(responses) + + assert future.cancel() + assert future.cancelled() + cancel.assert_called_once_with() + + # Cancelling twice should have no effect. + assert not future.cancel() + cancel.assert_called_once_with() + + +def test_result(): + expected_result = struct_pb2.Struct() + responses = [ + make_operation_proto(), + # Second operation response includes the result. + make_operation_proto(done=True, response=expected_result)] + future, _, _ = make_operation_future(responses) + + result = future.result() + + assert result == expected_result + assert future.done() + + +def test_exception(): + expected_exception = status_pb2.Status(message='meep') + responses = [ + make_operation_proto(), + # Second operation response includes the error. + make_operation_proto(done=True, error=expected_exception)] + future, _, _ = make_operation_future(responses) + + exception = future.exception() + + assert expected_exception.message in '{!r}'.format(exception) + + +def test_unexpected_result(): + responses = [ + make_operation_proto(), + # Second operation response is done, but has not error or response. + make_operation_proto(done=True)] + future, _, _ = make_operation_future(responses) + + exception = future.exception() + + assert 'Unexpected state' in '{!r}'.format(exception) + + +def test__refresh_http(): + api_request = mock.Mock( + return_value={'name': TEST_OPERATION_NAME, 'done': True}) + + result = operation._refresh_http(api_request, TEST_OPERATION_NAME) + + assert result.name == TEST_OPERATION_NAME + assert result.done is True + api_request.assert_called_once_with( + method='GET', path='operations/{}'.format(TEST_OPERATION_NAME)) + + +def test__cancel_http(): + api_request = mock.Mock() + + operation._cancel_http(api_request, TEST_OPERATION_NAME) + + api_request.assert_called_once_with( + method='POST', path='operations/{}:cancel'.format(TEST_OPERATION_NAME)) + + +def test_from_http_json(): + operation_json = {'name': TEST_OPERATION_NAME, 'done': True} + api_request = mock.sentinel.api_request + + future = operation.from_http_json( + operation_json, api_request, struct_pb2.Struct, + metadata_type=struct_pb2.Struct) + + assert future._result_type == struct_pb2.Struct + assert future._metadata_type == struct_pb2.Struct + assert future.operation.name == TEST_OPERATION_NAME + assert future.done + + +def test__refresh_grpc(): + operations_stub = mock.Mock(spec=['GetOperation']) + expected_result = make_operation_proto(done=True) + operations_stub.GetOperation.return_value = expected_result + + result = operation._refresh_grpc(operations_stub, TEST_OPERATION_NAME) + + assert result == expected_result + expected_request = operations_pb2.GetOperationRequest( + name=TEST_OPERATION_NAME) + operations_stub.GetOperation.assert_called_once_with(expected_request) + + +def test__cancel_grpc(): + operations_stub = mock.Mock(spec=['CancelOperation']) + + operation._cancel_grpc(operations_stub, TEST_OPERATION_NAME) + + expected_request = operations_pb2.CancelOperationRequest( + name=TEST_OPERATION_NAME) + operations_stub.CancelOperation.assert_called_once_with(expected_request) + + +def test_from_grpc(): + operation_proto = make_operation_proto(done=True) + operations_stub = mock.sentinel.operations_stub + + future = operation.from_grpc( + operation_proto, operations_stub, struct_pb2.Struct, + metadata_type=struct_pb2.Struct) + + assert future._result_type == struct_pb2.Struct + assert future._metadata_type == struct_pb2.Struct + assert future.operation.name == TEST_OPERATION_NAME + assert future.done diff --git a/core/tests/unit/future/test_polling.py b/core/tests/unit/future/test_polling.py new file mode 100644 index 000000000000..c8fde1c20385 --- /dev/null +++ b/core/tests/unit/future/test_polling.py @@ -0,0 +1,157 @@ +# Copyright 2017, Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import concurrent.futures +import threading +import time + +import mock +import pytest + +from google.cloud.future import polling + + +class PollingFutureImpl(polling.PollingFuture): + def done(self): + return False + + def cancel(self): + return True + + def cancelled(self): + return False + + def running(self): + return True + + +def test_polling_future_constructor(): + future = PollingFutureImpl() + assert not future.done() + assert not future.cancelled() + assert future.running() + assert future.cancel() + + +def test_set_result(): + future = PollingFutureImpl() + callback = mock.Mock() + + future.set_result(1) + + assert future.result() == 1 + future.add_done_callback(callback) + callback.assert_called_once_with(future) + + +def test_set_exception(): + future = PollingFutureImpl() + exception = ValueError('meep') + + future.set_exception(exception) + + assert future.exception() == exception + with pytest.raises(ValueError): + future.result() + + callback = mock.Mock() + future.add_done_callback(callback) + callback.assert_called_once_with(future) + + +def test_invoke_callback_exception(): + future = PollingFutureImplWithPoll() + future.set_result(42) + + # This should not raise, despite the callback causing an exception. + callback = mock.Mock(side_effect=ValueError) + future.add_done_callback(callback) + callback.assert_called_once_with(future) + + +class PollingFutureImplWithPoll(PollingFutureImpl): + def __init__(self): + super(PollingFutureImplWithPoll, self).__init__() + self.poll_count = 0 + self.event = threading.Event() + + def done(self): + self.poll_count += 1 + self.event.wait() + self.set_result(42) + return True + + +def test_result_with_polling(): + future = PollingFutureImplWithPoll() + + future.event.set() + result = future.result() + + assert result == 42 + assert future.poll_count == 1 + # Repeated calls should not cause additional polling + assert future.result() == result + assert future.poll_count == 1 + + +class PollingFutureImplTimeout(PollingFutureImplWithPoll): + def done(self): + time.sleep(1) + return False + + +def test_result_timeout(): + future = PollingFutureImplTimeout() + with pytest.raises(concurrent.futures.TimeoutError): + future.result(timeout=1) + + +def test_callback_background_thread(): + future = PollingFutureImplWithPoll() + callback = mock.Mock() + + future.add_done_callback(callback) + + assert future._polling_thread is not None + + # Give the thread a second to poll + time.sleep(1) + assert future.poll_count == 1 + + future.event.set() + future._polling_thread.join() + + callback.assert_called_once_with(future) + + +def test_double_callback_background_thread(): + future = PollingFutureImplWithPoll() + callback = mock.Mock() + callback2 = mock.Mock() + + future.add_done_callback(callback) + current_thread = future._polling_thread + assert current_thread is not None + + # only one polling thread should be created. + future.add_done_callback(callback2) + assert future._polling_thread is current_thread + + future.event.set() + future._polling_thread.join() + + assert future.poll_count == 1 + callback.assert_called_once_with(future) + callback2.assert_called_once_with(future) diff --git a/core/tests/unit/streaming/test_buffered_stream.py b/core/tests/unit/streaming/test_buffered_stream.py deleted file mode 100644 index 797ceea2d280..000000000000 --- a/core/tests/unit/streaming/test_buffered_stream.py +++ /dev/null @@ -1,141 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class Test_BufferedStream(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.streaming.buffered_stream import BufferedStream - - return BufferedStream - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_closed_stream(self): - class _Stream(object): - closed = True - - start = 0 - bufsize = 4 - bufstream = self._make_one(_Stream, start, bufsize) - self.assertIs(bufstream._stream, _Stream) - self.assertEqual(bufstream._start_pos, start) - self.assertEqual(bufstream._buffer_pos, 0) - self.assertEqual(bufstream._buffered_data, b'') - self.assertTrue(bufstream._stream_at_end) - self.assertEqual(bufstream._end_pos, 0) - - def test_ctor_start_zero_longer_than_buffer(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - START = 0 - BUFSIZE = 4 - stream = BytesIO(CONTENT) - bufstream = self._make_one(stream, START, BUFSIZE) - self.assertIs(bufstream._stream, stream) - self.assertEqual(bufstream._start_pos, START) - self.assertEqual(bufstream._buffer_pos, 0) - self.assertEqual(bufstream._buffered_data, CONTENT[:BUFSIZE]) - self.assertEqual(len(bufstream), BUFSIZE) - self.assertFalse(bufstream.stream_exhausted) - self.assertEqual(bufstream.stream_end_position, BUFSIZE) - - def test_ctor_start_nonzero_shorter_than_buffer(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - START = 8 - BUFSIZE = 10 - stream = BytesIO(CONTENT) - stream.read(START) # already consumed - bufstream = self._make_one(stream, START, BUFSIZE) - self.assertIs(bufstream._stream, stream) - self.assertEqual(bufstream._start_pos, START) - self.assertEqual(bufstream._buffer_pos, 0) - self.assertEqual(bufstream._buffered_data, CONTENT[START:]) - self.assertEqual(len(bufstream), len(CONTENT) - START) - self.assertTrue(bufstream.stream_exhausted) - self.assertEqual(bufstream.stream_end_position, len(CONTENT)) - - def test__bytes_remaining_start_zero_longer_than_buffer(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - START = 0 - BUFSIZE = 4 - stream = BytesIO(CONTENT) - bufstream = self._make_one(stream, START, BUFSIZE) - self.assertEqual(bufstream._bytes_remaining, BUFSIZE) - - def test__bytes_remaining_start_zero_shorter_than_buffer(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - START = 8 - BUFSIZE = 10 - stream = BytesIO(CONTENT) - stream.read(START) # already consumed - bufstream = self._make_one(stream, START, BUFSIZE) - self.assertEqual(bufstream._bytes_remaining, len(CONTENT) - START) - - def test_read_w_none(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - START = 0 - BUFSIZE = 4 - stream = BytesIO(CONTENT) - bufstream = self._make_one(stream, START, BUFSIZE) - with self.assertRaises(ValueError): - bufstream.read(None) - - def test_read_w_negative_size(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - START = 0 - BUFSIZE = 4 - stream = BytesIO(CONTENT) - bufstream = self._make_one(stream, START, BUFSIZE) - with self.assertRaises(ValueError): - bufstream.read(-2) - - def test_read_from_start(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - START = 0 - BUFSIZE = 4 - stream = BytesIO(CONTENT) - bufstream = self._make_one(stream, START, BUFSIZE) - self.assertEqual(bufstream.read(4), CONTENT[:4]) - - def test_read_exhausted(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - START = len(CONTENT) - BUFSIZE = 10 - stream = BytesIO(CONTENT) - stream.read(START) # already consumed - bufstream = self._make_one(stream, START, BUFSIZE) - self.assertTrue(bufstream.stream_exhausted) - self.assertEqual(bufstream.stream_end_position, len(CONTENT)) - self.assertEqual(bufstream._bytes_remaining, 0) - self.assertEqual(bufstream.read(10), b'') diff --git a/core/tests/unit/streaming/test_exceptions.py b/core/tests/unit/streaming/test_exceptions.py deleted file mode 100644 index b31c562c8e9d..000000000000 --- a/core/tests/unit/streaming/test_exceptions.py +++ /dev/null @@ -1,105 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class Test_HttpError(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.streaming.exceptions import HttpError - - return HttpError - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): - RESPONSE = {'status': '404'} - CONTENT = b'CONTENT' - URL = 'http://www.example.com' - exception = self._make_one(RESPONSE, CONTENT, URL) - self.assertEqual(exception.response, RESPONSE) - self.assertEqual(exception.content, CONTENT) - self.assertEqual(exception.url, URL) - self.assertEqual(exception.status_code, 404) - self.assertEqual( - str(exception), - "HttpError accessing : " - "response: <{'status': '404'}>, content ") - - def test_from_response(self): - RESPONSE = {'status': '404'} - CONTENT = b'CONTENT' - URL = 'http://www.example.com' - - class _Response(object): - info = RESPONSE - content = CONTENT - request_url = URL - - klass = self._get_target_class() - exception = klass.from_response(_Response()) - self.assertIsInstance(exception, klass) - self.assertEqual(exception.response, RESPONSE) - self.assertEqual(exception.content, CONTENT) - self.assertEqual(exception.url, URL) - - -class Test_RetryAfterError(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.streaming.exceptions import RetryAfterError - - return RetryAfterError - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): - RESPONSE = {'status': '404'} - CONTENT = b'CONTENT' - URL = 'http://www.example.com' - RETRY_AFTER = 60 - exception = self._make_one(RESPONSE, CONTENT, URL, RETRY_AFTER) - self.assertEqual(exception.response, RESPONSE) - self.assertEqual(exception.content, CONTENT) - self.assertEqual(exception.url, URL) - self.assertEqual(exception.retry_after, RETRY_AFTER) - self.assertEqual( - str(exception), - "HttpError accessing : " - "response: <{'status': '404'}>, content ") - - def test_from_response(self): - RESPONSE = {'status': '404'} - CONTENT = b'CONTENT' - URL = 'http://www.example.com' - RETRY_AFTER = 60 - - class _Response(object): - info = RESPONSE - content = CONTENT - request_url = URL - retry_after = RETRY_AFTER - - klass = self._get_target_class() - exception = klass.from_response(_Response()) - self.assertIsInstance(exception, klass) - self.assertEqual(exception.response, RESPONSE) - self.assertEqual(exception.content, CONTENT) - self.assertEqual(exception.url, URL) - self.assertEqual(exception.retry_after, RETRY_AFTER) diff --git a/core/tests/unit/streaming/test_http_wrapper.py b/core/tests/unit/streaming/test_http_wrapper.py deleted file mode 100644 index b0d3156ba42f..000000000000 --- a/core/tests/unit/streaming/test_http_wrapper.py +++ /dev/null @@ -1,498 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class Test__httplib2_debug_level(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.streaming.http_wrapper import _httplib2_debug_level - - return _httplib2_debug_level - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_wo_loggable_body_wo_http(self): - from google.cloud._testing import _Monkey - from google.cloud.streaming import http_wrapper as MUT - - request = _Request() - LEVEL = 1 - _httplib2 = _Dummy(debuglevel=0) - with _Monkey(MUT, httplib2=_httplib2): - with self._make_one(request, LEVEL): - self.assertEqual(_httplib2.debuglevel, 0) - - def test_w_loggable_body_wo_http(self): - from google.cloud._testing import _Monkey - from google.cloud.streaming import http_wrapper as MUT - - request = _Request(loggable_body=object()) - LEVEL = 1 - _httplib2 = _Dummy(debuglevel=0) - with _Monkey(MUT, httplib2=_httplib2): - with self._make_one(request, LEVEL): - self.assertEqual(_httplib2.debuglevel, LEVEL) - self.assertEqual(_httplib2.debuglevel, 0) - - def test_w_loggable_body_w_http(self): - from google.cloud._testing import _Monkey - from google.cloud.streaming import http_wrapper as MUT - - class _Connection(object): - debuglevel = 0 - - def set_debuglevel(self, value): - self.debuglevel = value - - request = _Request(loggable_body=object()) - LEVEL = 1 - _httplib2 = _Dummy(debuglevel=0) - update_me = _Connection() - skip_me = _Connection() - connections = {'update:me': update_me, 'skip_me': skip_me} - _http = _Dummy(connections=connections) - with _Monkey(MUT, httplib2=_httplib2): - with self._make_one(request, LEVEL, _http): - self.assertEqual(_httplib2.debuglevel, LEVEL) - self.assertEqual(update_me.debuglevel, LEVEL) - self.assertEqual(skip_me.debuglevel, 0) - self.assertEqual(_httplib2.debuglevel, 0) - self.assertEqual(update_me.debuglevel, 0) - self.assertEqual(skip_me.debuglevel, 0) - - -class Test_Request(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.streaming.http_wrapper import Request - - return Request - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_defaults(self): - request = self._make_one() - self.assertEqual(request.url, '') - self.assertEqual(request.http_method, 'GET') - self.assertEqual(request.headers, {'content-length': '0'}) - self.assertEqual(request.body, '') - self.assertIsNone(request.loggable_body) - - def test_loggable_body_setter_w_body_None(self): - from google.cloud.streaming.exceptions import RequestError - - request = self._make_one(body=None) - with self.assertRaises(RequestError): - request.loggable_body = 'abc' - - def test_body_setter_w_None(self): - request = self._make_one() - request.loggable_body = 'abc' - request.body = None - self.assertEqual(request.headers, {}) - self.assertIsNone(request.body) - self.assertEqual(request.loggable_body, 'abc') - - def test_body_setter_w_non_string(self): - request = self._make_one() - request.loggable_body = 'abc' - request.body = body = _Dummy(length=123) - self.assertEqual(request.headers, {'content-length': '123'}) - self.assertIs(request.body, body) - self.assertEqual(request.loggable_body, '') - - -class Test_Response(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.streaming.http_wrapper import Response - - return Response - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): - CONTENT = 'CONTENT' - URL = 'http://example.com/api' - info = {'status': '200'} - response = self._make_one(info, CONTENT, URL) - self.assertEqual(len(response), len(CONTENT)) - self.assertEqual(response.status_code, 200) - self.assertIsNone(response.retry_after) - self.assertFalse(response.is_redirect) - - def test_length_w_content_encoding_w_content_range(self): - CONTENT = 'CONTENT' - URL = 'http://example.com/api' - RANGE = 'bytes 0-122/5678' - info = { - 'status': '200', - 'content-length': len(CONTENT), - 'content-encoding': 'testing', - 'content-range': RANGE, - } - response = self._make_one(info, CONTENT, URL) - self.assertEqual(len(response), 123) - - def test_length_w_content_encoding_wo_content_range(self): - CONTENT = 'CONTENT' - URL = 'http://example.com/api' - info = { - 'status': '200', - 'content-length': len(CONTENT), - 'content-encoding': 'testing', - } - response = self._make_one(info, CONTENT, URL) - self.assertEqual(len(response), len(CONTENT)) - - def test_length_w_content_length_w_content_range(self): - CONTENT = 'CONTENT' - URL = 'http://example.com/api' - RANGE = 'bytes 0-12/5678' - info = { - 'status': '200', - 'content-length': len(CONTENT) * 2, - 'content-range': RANGE, - } - response = self._make_one(info, CONTENT, URL) - self.assertEqual(len(response), len(CONTENT) * 2) - - def test_length_wo_content_length_w_content_range(self): - CONTENT = 'CONTENT' - URL = 'http://example.com/api' - RANGE = 'bytes 0-122/5678' - info = { - 'status': '200', - 'content-range': RANGE, - } - response = self._make_one(info, CONTENT, URL) - self.assertEqual(len(response), 123) - - def test_retry_after_w_header(self): - CONTENT = 'CONTENT' - URL = 'http://example.com/api' - info = { - 'status': '200', - 'retry-after': '123', - } - response = self._make_one(info, CONTENT, URL) - self.assertEqual(response.retry_after, 123) - - def test_is_redirect_w_code_wo_location(self): - CONTENT = 'CONTENT' - URL = 'http://example.com/api' - info = { - 'status': '301', - } - response = self._make_one(info, CONTENT, URL) - self.assertFalse(response.is_redirect) - - def test_is_redirect_w_code_w_location(self): - CONTENT = 'CONTENT' - URL = 'http://example.com/api' - info = { - 'status': '301', - 'location': 'http://example.com/other', - } - response = self._make_one(info, CONTENT, URL) - self.assertTrue(response.is_redirect) - - -class Test__check_response(unittest.TestCase): - - def _call_fut(self, *args, **kw): - from google.cloud.streaming.http_wrapper import _check_response - - return _check_response(*args, **kw) - - def test_w_none(self): - from google.cloud.streaming.exceptions import RequestError - - with self.assertRaises(RequestError): - self._call_fut(None) - - def test_w_TOO_MANY_REQUESTS(self): - from google.cloud.streaming.exceptions import BadStatusCodeError - from google.cloud.streaming.http_wrapper import TOO_MANY_REQUESTS - - with self.assertRaises(BadStatusCodeError): - self._call_fut(_Response(TOO_MANY_REQUESTS)) - - def test_w_50x(self): - from google.cloud.streaming.exceptions import BadStatusCodeError - - with self.assertRaises(BadStatusCodeError): - self._call_fut(_Response(500)) - - with self.assertRaises(BadStatusCodeError): - self._call_fut(_Response(503)) - - def test_w_retry_after(self): - from google.cloud.streaming.exceptions import RetryAfterError - - with self.assertRaises(RetryAfterError): - self._call_fut(_Response(200, 20)) - - def test_pass(self): - self._call_fut(_Response(200)) - - -class Test__reset_http_connections(unittest.TestCase): - - def _call_fut(self, *args, **kw): - from google.cloud.streaming.http_wrapper import _reset_http_connections - - return _reset_http_connections(*args, **kw) - - def test_wo_connections(self): - http = object() - self._call_fut(http) - - def test_w_connections(self): - connections = {'delete:me': object(), 'skip_me': object()} - http = _Dummy(connections=connections) - self._call_fut(http) - self.assertFalse('delete:me' in connections) - self.assertTrue('skip_me' in connections) - - -class Test___make_api_request_no_retry(unittest.TestCase): - - def _call_fut(self, *args, **kw): - from google.cloud.streaming.http_wrapper import ( - _make_api_request_no_retry) - return _make_api_request_no_retry(*args, **kw) - - def _verify_requested(self, http, request, - redirections=5, connection_type=None): - self.assertEqual(len(http._requested), 1) - url, kw = http._requested[0] - self.assertEqual(url, request.url) - self.assertEqual(kw['method'], request.http_method) - self.assertEqual(kw['body'], request.body) - self.assertEqual(kw['headers'], request.headers) - self.assertEqual(kw['redirections'], redirections) - self.assertEqual(kw['connection_type'], connection_type) - - def test_defaults_wo_connections(self): - from google.cloud._testing import _Monkey - from google.cloud.streaming import http_wrapper as MUT - - INFO = {'status': '200'} - CONTENT = 'CONTENT' - _http = _Http((INFO, CONTENT)) - _httplib2 = _Dummy(debuglevel=1) - _request = _Request() - _checked = [] - with _Monkey(MUT, httplib2=_httplib2, - _check_response=_checked.append): - response = self._call_fut(_http, _request) - - self.assertIsInstance(response, MUT.Response) - self.assertEqual(response.info, INFO) - self.assertEqual(response.content, CONTENT) - self.assertEqual(response.request_url, _request.url) - self.assertEqual(_checked, [response]) - self._verify_requested(_http, _request) - - def test_w_http_connections_miss(self): - from google.cloud._testing import _Monkey - from google.cloud.streaming import http_wrapper as MUT - - INFO = {'status': '200'} - CONTENT = 'CONTENT' - CONN_TYPE = object() - _http = _Http((INFO, CONTENT)) - _http.connections = {'https': CONN_TYPE} - _httplib2 = _Dummy(debuglevel=1) - _request = _Request() - _checked = [] - with _Monkey(MUT, httplib2=_httplib2, - _check_response=_checked.append): - response = self._call_fut(_http, _request) - - self.assertIsInstance(response, MUT.Response) - self.assertEqual(response.info, INFO) - self.assertEqual(response.content, CONTENT) - self.assertEqual(response.request_url, _request.url) - self.assertEqual(_checked, [response]) - self._verify_requested(_http, _request) - - def test_w_http_connections_hit(self): - from google.cloud._testing import _Monkey - from google.cloud.streaming import http_wrapper as MUT - - INFO = {'status': '200'} - CONTENT = 'CONTENT' - CONN_TYPE = object() - _http = _Http((INFO, CONTENT)) - _http.connections = {'http': CONN_TYPE} - _httplib2 = _Dummy(debuglevel=1) - _request = _Request() - _checked = [] - with _Monkey(MUT, httplib2=_httplib2, - _check_response=_checked.append): - response = self._call_fut(_http, _request) - - self.assertIsInstance(response, MUT.Response) - self.assertEqual(response.info, INFO) - self.assertEqual(response.content, CONTENT) - self.assertEqual(response.request_url, _request.url) - self.assertEqual(_checked, [response]) - self._verify_requested(_http, _request, connection_type=CONN_TYPE) - - def test_w_request_returning_None(self): - from google.cloud._testing import _Monkey - from google.cloud.streaming import http_wrapper as MUT - from google.cloud.streaming.exceptions import RequestError - - INFO = None - CONTENT = None - CONN_TYPE = object() - _http = _Http((INFO, CONTENT)) - _http.connections = {'http': CONN_TYPE} - _httplib2 = _Dummy(debuglevel=1) - _request = _Request() - with _Monkey(MUT, httplib2=_httplib2): - with self.assertRaises(RequestError): - self._call_fut(_http, _request) - self._verify_requested(_http, _request, connection_type=CONN_TYPE) - - -class Test_make_api_request(unittest.TestCase): - - def _call_fut(self, *args, **kw): - from google.cloud.streaming.http_wrapper import make_api_request - - return make_api_request(*args, **kw) - - def test_wo_exception(self): - from google.cloud.streaming import http_wrapper as MUT - from google.cloud._testing import _Monkey - - HTTP, REQUEST, RESPONSE = object(), object(), object() - _created, _checked = [], [] - - def _wo_exception(*args, **kw): - _created.append((args, kw)) - return RESPONSE - - with _Monkey(MUT, _make_api_request_no_retry=_wo_exception, - _check_response=_checked.append): - response = self._call_fut(HTTP, REQUEST) - - self.assertIs(response, RESPONSE) - expected_kw = {'redirections': MUT._REDIRECTIONS} - self.assertEqual(_created, [((HTTP, REQUEST), expected_kw)]) - self.assertEqual(_checked, []) # not called by '_wo_exception' - - def test_w_exceptions_lt_max_retries(self): - from google.cloud.streaming.exceptions import RetryAfterError - from google.cloud.streaming import http_wrapper as MUT - from google.cloud._testing import _Monkey - - HTTP, RESPONSE = object(), object() - REQUEST = _Request() - _created, _checked = [], [] - _counter = [None] * 4 - - def _wo_exception(*args, **kw): - _created.append((args, kw)) - if _counter: - _counter.pop() - raise RetryAfterError(RESPONSE, '', REQUEST.url, 0.1) - return RESPONSE - - with _Monkey(MUT, _make_api_request_no_retry=_wo_exception, - _check_response=_checked.append): - response = self._call_fut(HTTP, REQUEST, retries=5) - - self.assertIs(response, RESPONSE) - self.assertEqual(len(_created), 5) - expected_kw = {'redirections': MUT._REDIRECTIONS} - for attempt in _created: - self.assertEqual(attempt, ((HTTP, REQUEST), expected_kw)) - self.assertEqual(_checked, []) # not called by '_wo_exception' - - def test_w_exceptions_gt_max_retries(self): - from google.cloud._testing import _Monkey - from google.cloud.streaming import http_wrapper as MUT - - HTTP = object() - REQUEST = _Request() - _created, _checked = [], [] - - def _wo_exception(*args, **kw): - _created.append((args, kw)) - raise ValueError('Retryable') - - with _Monkey(MUT, calculate_wait_for_retry=lambda *ignored: 0.1, - _make_api_request_no_retry=_wo_exception, - _check_response=_checked.append): - with self.assertRaises(ValueError): - self._call_fut(HTTP, REQUEST, retries=3) - - self.assertEqual(len(_created), 3) - expected_kw = {'redirections': MUT._REDIRECTIONS} - for attempt in _created: - self.assertEqual(attempt, ((HTTP, REQUEST), expected_kw)) - self.assertEqual(_checked, []) # not called by '_wo_exception' - - -class _Dummy(object): - def __init__(self, **kw): - self.__dict__.update(kw) - - -class _Request(object): - __slots__ = ('url', 'http_method', 'body', 'headers', 'loggable_body',) - URL = 'http://example.com/api' - - def __init__(self, url=URL, http_method='GET', body='', - loggable_body=None): - self.url = url - self.http_method = http_method - self.body = body - self.headers = {} - self.loggable_body = loggable_body - - -class _Response(object): - content = '' - request_url = _Request.URL - - def __init__(self, status_code, retry_after=None): - self.info = {'status': status_code} - self.status_code = status_code - self.retry_after = retry_after - - -class _Http(object): - - def __init__(self, *responses): - self._responses = responses - self._requested = [] - - def request(self, url, **kw): - self._requested.append((url, kw)) - response, self._responses = self._responses[0], self._responses[1:] - return response diff --git a/core/tests/unit/streaming/test_stream_slice.py b/core/tests/unit/streaming/test_stream_slice.py deleted file mode 100644 index 47820078447d..000000000000 --- a/core/tests/unit/streaming/test_stream_slice.py +++ /dev/null @@ -1,90 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class Test_StreamSlice(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.streaming.stream_slice import StreamSlice - - return StreamSlice - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - MAXSIZE = 4 - stream = BytesIO(CONTENT) - stream_slice = self._make_one(stream, MAXSIZE) - self.assertIs(stream_slice._stream, stream) - self.assertEqual(stream_slice._remaining_bytes, MAXSIZE) - self.assertEqual(stream_slice._max_bytes, MAXSIZE) - self.assertEqual(len(stream_slice), MAXSIZE) - self.assertEqual(stream_slice.length, MAXSIZE) - - def test___nonzero___empty(self): - from io import BytesIO - - CONTENT = b'' - MAXSIZE = 0 - stream = BytesIO(CONTENT) - stream_slice = self._make_one(stream, MAXSIZE) - self.assertFalse(stream_slice) - - def test___nonzero___nonempty(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - MAXSIZE = 4 - stream = BytesIO(CONTENT) - stream_slice = self._make_one(stream, MAXSIZE) - self.assertTrue(stream_slice) - - def test_read_exhausted(self): - from io import BytesIO - from six.moves import http_client - - CONTENT = b'' - MAXSIZE = 4 - stream = BytesIO(CONTENT) - stream_slice = self._make_one(stream, MAXSIZE) - with self.assertRaises(http_client.IncompleteRead): - stream_slice.read() - - def test_read_implicit_size(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - MAXSIZE = 4 - stream = BytesIO(CONTENT) - stream_slice = self._make_one(stream, MAXSIZE) - self.assertEqual(stream_slice.read(), CONTENT[:MAXSIZE]) - self.assertEqual(stream_slice._remaining_bytes, 0) - - def test_read_explicit_size(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - MAXSIZE = 4 - SIZE = 3 - stream = BytesIO(CONTENT) - stream_slice = self._make_one(stream, MAXSIZE) - self.assertEqual(stream_slice.read(SIZE), CONTENT[:SIZE]) - self.assertEqual(stream_slice._remaining_bytes, MAXSIZE - SIZE) diff --git a/core/tests/unit/streaming/test_transfer.py b/core/tests/unit/streaming/test_transfer.py deleted file mode 100644 index 8bafd4a1cc47..000000000000 --- a/core/tests/unit/streaming/test_transfer.py +++ /dev/null @@ -1,2035 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class Test__Transfer(unittest.TestCase): - URL = 'http://example.com/api' - - @staticmethod - def _get_target_class(): - from google.cloud.streaming.transfer import _Transfer - - return _Transfer - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_defaults(self): - from google.cloud.streaming.transfer import _DEFAULT_CHUNKSIZE - - stream = _Stream() - xfer = self._make_one(stream) - self.assertIs(xfer.stream, stream) - self.assertFalse(xfer.close_stream) - self.assertEqual(xfer.chunksize, _DEFAULT_CHUNKSIZE) - self.assertTrue(xfer.auto_transfer) - self.assertIsNone(xfer.bytes_http) - self.assertIsNone(xfer.http) - self.assertEqual(xfer.num_retries, 5) - self.assertIsNone(xfer.url) - self.assertFalse(xfer.initialized) - - def test_ctor_explicit(self): - stream = _Stream() - HTTP = object() - CHUNK_SIZE = 1 << 18 - NUM_RETRIES = 8 - xfer = self._make_one(stream, - close_stream=True, - chunksize=CHUNK_SIZE, - auto_transfer=False, - http=HTTP, - num_retries=NUM_RETRIES) - self.assertIs(xfer.stream, stream) - self.assertTrue(xfer.close_stream) - self.assertEqual(xfer.chunksize, CHUNK_SIZE) - self.assertFalse(xfer.auto_transfer) - self.assertIs(xfer.bytes_http, HTTP) - self.assertIs(xfer.http, HTTP) - self.assertEqual(xfer.num_retries, NUM_RETRIES) - - def test_bytes_http_fallback_to_http(self): - stream = _Stream() - HTTP = object() - xfer = self._make_one(stream, http=HTTP) - self.assertIs(xfer.bytes_http, HTTP) - - def test_bytes_http_setter(self): - stream = _Stream() - HTTP = object() - BYTES_HTTP = object() - xfer = self._make_one(stream, http=HTTP) - xfer.bytes_http = BYTES_HTTP - self.assertIs(xfer.bytes_http, BYTES_HTTP) - - def test_num_retries_setter_invalid(self): - stream = _Stream() - xfer = self._make_one(stream) - with self.assertRaises(ValueError): - xfer.num_retries = object() - - def test_num_retries_setter_negative(self): - stream = _Stream() - xfer = self._make_one(stream) - with self.assertRaises(ValueError): - xfer.num_retries = -1 - - def test__initialize_not_already_initialized_w_http(self): - HTTP = object() - stream = _Stream() - xfer = self._make_one(stream) - xfer._initialize(HTTP, self.URL) - self.assertTrue(xfer.initialized) - self.assertIs(xfer.http, HTTP) - self.assertIs(xfer.url, self.URL) - - def test__initialize_not_already_initialized_wo_http(self): - from httplib2 import Http - - stream = _Stream() - xfer = self._make_one(stream) - xfer._initialize(None, self.URL) - self.assertTrue(xfer.initialized) - self.assertIsInstance(xfer.http, Http) - self.assertIs(xfer.url, self.URL) - - def test__initialize_w_existing_http(self): - HTTP_1, HTTP_2 = object(), object() - stream = _Stream() - xfer = self._make_one(stream, http=HTTP_1) - xfer._initialize(HTTP_2, self.URL) - self.assertTrue(xfer.initialized) - self.assertIs(xfer.http, HTTP_1) - self.assertIs(xfer.url, self.URL) - - def test__initialize_already_initialized(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - URL_2 = 'http://example.com/other' - HTTP_1, HTTP_2 = object(), object() - stream = _Stream() - xfer = self._make_one(stream) - xfer._initialize(HTTP_1, self.URL) - with self.assertRaises(TransferInvalidError): - xfer._initialize(HTTP_2, URL_2) - - def test__ensure_initialized_hit(self): - HTTP = object() - stream = _Stream() - xfer = self._make_one(stream) - xfer._initialize(HTTP, self.URL) - xfer._ensure_initialized() # no raise - - def test__ensure_initialized_miss(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - stream = _Stream() - xfer = self._make_one(stream) - with self.assertRaises(TransferInvalidError): - xfer._ensure_initialized() - - def test__ensure_uninitialized_hit(self): - stream = _Stream() - xfer = self._make_one(stream) - xfer._ensure_uninitialized() # no raise - - def test__ensure_uninitialized_miss(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - stream = _Stream() - HTTP = object() - xfer = self._make_one(stream) - xfer._initialize(HTTP, self.URL) - with self.assertRaises(TransferInvalidError): - xfer._ensure_uninitialized() - - def test___del___closes_stream(self): - - stream = _Stream() - xfer = self._make_one(stream, close_stream=True) - - self.assertFalse(stream._closed) - del xfer - self.assertTrue(stream._closed) - - -class Test_Download(unittest.TestCase): - URL = "http://example.com/api" - - @staticmethod - def _get_target_class(): - from google.cloud.streaming.transfer import Download - - return Download - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_defaults(self): - stream = _Stream() - download = self._make_one(stream) - self.assertIs(download.stream, stream) - self.assertIsNone(download._initial_response) - self.assertEqual(download.progress, 0) - self.assertIsNone(download.total_size) - self.assertIsNone(download.encoding) - - def test_ctor_w_kwds(self): - stream = _Stream() - CHUNK_SIZE = 123 - download = self._make_one(stream, chunksize=CHUNK_SIZE) - self.assertIs(download.stream, stream) - self.assertEqual(download.chunksize, CHUNK_SIZE) - - def test_ctor_w_total_size(self): - stream = _Stream() - SIZE = 123 - download = self._make_one(stream, total_size=SIZE) - self.assertIs(download.stream, stream) - self.assertEqual(download.total_size, SIZE) - - def test_from_file_w_existing_file_no_override(self): - import os - from google.cloud._testing import _tempdir - - klass = self._get_target_class() - with _tempdir() as tempdir: - filename = os.path.join(tempdir, 'file.out') - with open(filename, 'w') as fileobj: - fileobj.write('EXISTING FILE') - with self.assertRaises(ValueError): - klass.from_file(filename) - - def test_from_file_w_existing_file_w_override_wo_auto_transfer(self): - import os - from google.cloud._testing import _tempdir - - klass = self._get_target_class() - with _tempdir() as tempdir: - filename = os.path.join(tempdir, 'file.out') - with open(filename, 'w') as fileobj: - fileobj.write('EXISTING FILE') - download = klass.from_file(filename, overwrite=True, - auto_transfer=False) - self.assertFalse(download.auto_transfer) - del download # closes stream - with open(filename, 'rb') as fileobj: - self.assertEqual(fileobj.read(), b'') - - def test_from_stream_defaults(self): - stream = _Stream() - klass = self._get_target_class() - download = klass.from_stream(stream) - self.assertIs(download.stream, stream) - self.assertTrue(download.auto_transfer) - self.assertIsNone(download.total_size) - - def test_from_stream_explicit(self): - CHUNK_SIZE = 1 << 18 - SIZE = 123 - stream = _Stream() - klass = self._get_target_class() - download = klass.from_stream(stream, auto_transfer=False, - total_size=SIZE, chunksize=CHUNK_SIZE) - self.assertIs(download.stream, stream) - self.assertFalse(download.auto_transfer) - self.assertEqual(download.total_size, SIZE) - self.assertEqual(download.chunksize, CHUNK_SIZE) - - def test_configure_request(self): - CHUNK_SIZE = 100 - download = self._make_one(_Stream(), chunksize=CHUNK_SIZE) - request = _Dummy(headers={}) - url_builder = _Dummy(query_params={}) - download.configure_request(request, url_builder) - self.assertEqual(request.headers, {'Range': 'bytes=0-99'}) - self.assertEqual(url_builder.query_params, {'alt': 'media'}) - - def test__set_total_wo_content_range_wo_existing_total(self): - info = {} - download = self._make_one(_Stream()) - download._set_total(info) - self.assertEqual(download.total_size, 0) - - def test__set_total_wo_content_range_w_existing_total(self): - SIZE = 123 - info = {} - download = self._make_one(_Stream(), total_size=SIZE) - download._set_total(info) - self.assertEqual(download.total_size, SIZE) - - def test__set_total_w_content_range_w_existing_total(self): - SIZE = 123 - info = {'content-range': 'bytes 123-234/4567'} - download = self._make_one(_Stream(), total_size=SIZE) - download._set_total(info) - self.assertEqual(download.total_size, 4567) - - def test__set_total_w_content_range_w_asterisk_total(self): - info = {'content-range': 'bytes 123-234/*'} - download = self._make_one(_Stream()) - download._set_total(info) - self.assertEqual(download.total_size, 0) - - def test_initialize_download_already_initialized(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - request = _Request() - download = self._make_one(_Stream()) - download._initialize(None, self.URL) - with self.assertRaises(TransferInvalidError): - download.initialize_download(request, http=object()) - - def test_initialize_download_wo_autotransfer(self): - request = _Request() - http = object() - download = self._make_one(_Stream(), auto_transfer=False) - download.initialize_download(request, http) - self.assertIs(download.http, http) - self.assertEqual(download.url, request.url) - - def test_initialize_download_w_autotransfer_failing(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.exceptions import HttpError - - request = _Request() - http = object() - download = self._make_one(_Stream(), auto_transfer=True) - - response = _makeResponse(http_client.BAD_REQUEST) - requester = _MakeRequest(response) - - with _Monkey(MUT, make_api_request=requester): - with self.assertRaises(HttpError): - download.initialize_download(request, http) - - self.assertTrue(len(requester._requested), 1) - self.assertIs(requester._requested[0][0], request) - - def test_initialize_download_w_autotransfer_w_content_location(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - - REDIRECT_URL = 'http://example.com/other' - request = _Request() - http = object() - info = {'content-location': REDIRECT_URL} - download = self._make_one(_Stream(), auto_transfer=True) - - response = _makeResponse(http_client.NO_CONTENT, info) - requester = _MakeRequest(response) - - with _Monkey(MUT, make_api_request=requester): - download.initialize_download(request, http) - - self.assertIsNone(download._initial_response) - self.assertEqual(download.total_size, 0) - self.assertIs(download.http, http) - self.assertEqual(download.url, REDIRECT_URL) - self.assertTrue(len(requester._requested), 1) - self.assertIs(requester._requested[0][0], request) - - def test__normalize_start_end_w_end_w_start_lt_0(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - download = self._make_one(_Stream()) - - with self.assertRaises(TransferInvalidError): - download._normalize_start_end(-1, 0) - - def test__normalize_start_end_w_end_w_start_gt_total(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - download = self._make_one(_Stream()) - download._set_total({'content-range': 'bytes 0-1/2'}) - - with self.assertRaises(TransferInvalidError): - download._normalize_start_end(3, 0) - - def test__normalize_start_end_w_end_lt_start(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - download = self._make_one(_Stream()) - download._set_total({'content-range': 'bytes 0-1/2'}) - - with self.assertRaises(TransferInvalidError): - download._normalize_start_end(1, 0) - - def test__normalize_start_end_w_end_gt_start(self): - download = self._make_one(_Stream()) - download._set_total({'content-range': 'bytes 0-1/2'}) - self.assertEqual(download._normalize_start_end(1, 2), (1, 1)) - - def test__normalize_start_end_wo_end_w_start_lt_0(self): - download = self._make_one(_Stream()) - download._set_total({'content-range': 'bytes 0-1/2'}) - self.assertEqual(download._normalize_start_end(-2), (0, 1)) - self.assertEqual(download._normalize_start_end(-1), (1, 1)) - - def test__normalize_start_end_wo_end_w_start_ge_0(self): - download = self._make_one(_Stream()) - download._set_total({'content-range': 'bytes 0-1/100'}) - self.assertEqual(download._normalize_start_end(0), (0, 99)) - self.assertEqual(download._normalize_start_end(1), (1, 99)) - - def test__set_range_header_w_start_lt_0(self): - request = _Request() - download = self._make_one(_Stream()) - download._set_range_header(request, -1) - self.assertEqual(request.headers['range'], 'bytes=-1') - - def test__set_range_header_w_start_ge_0_wo_end(self): - request = _Request() - download = self._make_one(_Stream()) - download._set_range_header(request, 0) - self.assertEqual(request.headers['range'], 'bytes=0-') - - def test__set_range_header_w_start_ge_0_w_end(self): - request = _Request() - download = self._make_one(_Stream()) - download._set_range_header(request, 0, 1) - self.assertEqual(request.headers['range'], 'bytes=0-1') - - def test__compute_end_byte_w_start_lt_0_w_end(self): - download = self._make_one(_Stream()) - self.assertEqual(download._compute_end_byte(-1, 1), 1) - - def test__compute_end_byte_w_start_ge_0_wo_end_w_use_chunks(self): - CHUNK_SIZE = 5 - download = self._make_one(_Stream(), chunksize=CHUNK_SIZE) - self.assertEqual(download._compute_end_byte(0, use_chunks=True), 4) - - def test__compute_end_byte_w_start_ge_0_w_end_w_use_chunks(self): - CHUNK_SIZE = 5 - download = self._make_one(_Stream(), chunksize=CHUNK_SIZE) - self.assertEqual(download._compute_end_byte(0, 3, use_chunks=True), 3) - self.assertEqual(download._compute_end_byte(0, 5, use_chunks=True), 4) - - def test__compute_end_byte_w_start_ge_0_w_end_w_total_size(self): - CHUNK_SIZE = 50 - download = self._make_one(_Stream(), chunksize=CHUNK_SIZE) - download._set_total({'content-range': 'bytes 0-1/10'}) - self.assertEqual(download._compute_end_byte(0, 100, use_chunks=False), - 9) - self.assertEqual(download._compute_end_byte(0, 8, use_chunks=False), 8) - - def test__compute_end_byte_w_start_ge_0_wo_end_w_total_size(self): - CHUNK_SIZE = 50 - download = self._make_one(_Stream(), chunksize=CHUNK_SIZE) - download._set_total({'content-range': 'bytes 0-1/10'}) - self.assertEqual(download._compute_end_byte(0, use_chunks=False), 9) - - def test__get_chunk_not_initialized(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - download = self._make_one(_Stream()) - - with self.assertRaises(TransferInvalidError): - download._get_chunk(0, 10) - - def test__get_chunk(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - - http = object() - download = self._make_one(_Stream()) - download._initialize(http, self.URL) - response = _makeResponse(http_client.OK) - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - found = download._get_chunk(0, 10) - - self.assertIs(found, response) - self.assertTrue(len(requester._requested), 1) - request = requester._requested[0][0] - self.assertEqual(request.headers['range'], 'bytes=0-10') - - def test__process_response_w_FORBIDDEN(self): - from google.cloud.streaming.exceptions import HttpError - from six.moves import http_client - - download = self._make_one(_Stream()) - response = _makeResponse(http_client.FORBIDDEN) - with self.assertRaises(HttpError): - download._process_response(response) - - def test__process_response_w_NOT_FOUND(self): - from google.cloud.streaming.exceptions import HttpError - from six.moves import http_client - - download = self._make_one(_Stream()) - response = _makeResponse(http_client.NOT_FOUND) - with self.assertRaises(HttpError): - download._process_response(response) - - def test__process_response_w_other_error(self): - from google.cloud.streaming.exceptions import TransferRetryError - from six.moves import http_client - - download = self._make_one(_Stream()) - response = _makeResponse(http_client.BAD_REQUEST) - with self.assertRaises(TransferRetryError): - download._process_response(response) - - def test__process_response_w_OK_wo_encoding(self): - from six.moves import http_client - - stream = _Stream() - download = self._make_one(stream) - response = _makeResponse(http_client.OK, content='OK') - found = download._process_response(response) - self.assertIs(found, response) - self.assertEqual(stream._written, ['OK']) - self.assertEqual(download.progress, 2) - self.assertIsNone(download.encoding) - - def test__process_response_w_PARTIAL_CONTENT_w_encoding(self): - from six.moves import http_client - - stream = _Stream() - download = self._make_one(stream) - info = {'content-encoding': 'blah'} - response = _makeResponse(http_client.OK, info, 'PARTIAL') - found = download._process_response(response) - self.assertIs(found, response) - self.assertEqual(stream._written, ['PARTIAL']) - self.assertEqual(download.progress, 7) - self.assertEqual(download.encoding, 'blah') - - def test__process_response_w_REQUESTED_RANGE_NOT_SATISFIABLE(self): - from six.moves import http_client - - stream = _Stream() - download = self._make_one(stream) - response = _makeResponse( - http_client.REQUESTED_RANGE_NOT_SATISFIABLE) - found = download._process_response(response) - self.assertIs(found, response) - self.assertEqual(stream._written, []) - self.assertEqual(download.progress, 0) - self.assertIsNone(download.encoding) - - def test__process_response_w_NO_CONTENT(self): - from six.moves import http_client - - stream = _Stream() - download = self._make_one(stream) - response = _makeResponse(status_code=http_client.NO_CONTENT) - found = download._process_response(response) - self.assertIs(found, response) - self.assertEqual(stream._written, ['']) - self.assertEqual(download.progress, 0) - self.assertIsNone(download.encoding) - - def test_get_range_not_initialized(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - download = self._make_one(_Stream()) - with self.assertRaises(TransferInvalidError): - download.get_range(0, 10) - - def test_get_range_wo_total_size_complete(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - REQ_RANGE = 'bytes=0-%d' % (LEN,) - RESP_RANGE = 'bytes 0-%d/%d' % (LEN - 1, LEN) - http = object() - stream = _Stream() - download = self._make_one(stream) - download._initialize(http, self.URL) - info = {'content-range': RESP_RANGE} - response = _makeResponse(http_client.OK, info, CONTENT) - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - download.get_range(0, LEN) - - self.assertTrue(len(requester._requested), 1) - request = requester._requested[0][0] - self.assertEqual(request.headers, {'range': REQ_RANGE}) - self.assertEqual(stream._written, [CONTENT]) - self.assertEqual(download.total_size, LEN) - - def test_get_range_wo_total_size_wo_end(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - START = 5 - CHUNK_SIZE = 123 - REQ_RANGE = 'bytes=%d-%d' % (START, START + CHUNK_SIZE - 1,) - RESP_RANGE = 'bytes %d-%d/%d' % (START, LEN - 1, LEN) - http = object() - stream = _Stream() - download = self._make_one(stream, chunksize=CHUNK_SIZE) - download._initialize(http, self.URL) - info = {'content-range': RESP_RANGE} - response = _makeResponse(http_client.OK, info, CONTENT[START:]) - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - download.get_range(START) - - self.assertTrue(len(requester._requested), 1) - request = requester._requested[0][0] - self.assertEqual(request.headers, {'range': REQ_RANGE}) - self.assertEqual(stream._written, [CONTENT[START:]]) - self.assertEqual(download.total_size, LEN) - - def test_get_range_w_total_size_partial(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - PARTIAL_LEN = 5 - REQ_RANGE = 'bytes=0-%d' % (PARTIAL_LEN,) - RESP_RANGE = 'bytes 0-%d/%d' % (PARTIAL_LEN, LEN,) - http = object() - stream = _Stream() - download = self._make_one(stream, total_size=LEN) - download._initialize(http, self.URL) - info = {'content-range': RESP_RANGE} - response = _makeResponse(http_client.OK, info, CONTENT[:PARTIAL_LEN]) - response.length = LEN - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - download.get_range(0, PARTIAL_LEN) - - self.assertTrue(len(requester._requested), 1) - request = requester._requested[0][0] - self.assertEqual(request.headers, {'range': REQ_RANGE}) - self.assertEqual(stream._written, [CONTENT[:PARTIAL_LEN]]) - self.assertEqual(download.total_size, LEN) - - def test_get_range_w_empty_chunk(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.exceptions import TransferRetryError - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - START = 5 - CHUNK_SIZE = 123 - REQ_RANGE = 'bytes=%d-%d' % (START, START + CHUNK_SIZE - 1,) - RESP_RANGE = 'bytes %d-%d/%d' % (START, LEN - 1, LEN) - http = object() - stream = _Stream() - download = self._make_one(stream, chunksize=CHUNK_SIZE) - download._initialize(http, self.URL) - info = {'content-range': RESP_RANGE} - response = _makeResponse(http_client.OK, info) - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - with self.assertRaises(TransferRetryError): - download.get_range(START) - - self.assertTrue(len(requester._requested), 1) - request = requester._requested[0][0] - self.assertEqual(request.headers, {'range': REQ_RANGE}) - self.assertEqual(stream._written, ['']) - self.assertEqual(download.total_size, LEN) - - def test_get_range_w_total_size_wo_use_chunks(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - CHUNK_SIZE = 3 - REQ_RANGE = 'bytes=0-%d' % (LEN - 1,) - RESP_RANGE = 'bytes 0-%d/%d' % (LEN - 1, LEN,) - http = object() - stream = _Stream() - download = self._make_one(stream, total_size=LEN, chunksize=CHUNK_SIZE) - download._initialize(http, self.URL) - info = {'content-range': RESP_RANGE} - response = _makeResponse(http_client.OK, info, CONTENT) - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - download.get_range(0, use_chunks=False) - - self.assertTrue(len(requester._requested), 1) - request = requester._requested[0][0] - self.assertEqual(request.headers, {'range': REQ_RANGE}) - self.assertEqual(stream._written, [CONTENT]) - self.assertEqual(download.total_size, LEN) - - def test_get_range_w_multiple_chunks(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - - CONTENT = b'ABCDE' - LEN = len(CONTENT) - CHUNK_SIZE = 3 - REQ_RANGE_1 = 'bytes=0-%d' % (CHUNK_SIZE - 1,) - RESP_RANGE_1 = 'bytes 0-%d/%d' % (CHUNK_SIZE - 1, LEN) - REQ_RANGE_2 = 'bytes=%d-%d' % (CHUNK_SIZE, LEN - 1) - RESP_RANGE_2 = 'bytes %d-%d/%d' % (CHUNK_SIZE, LEN - 1, LEN) - http = object() - stream = _Stream() - download = self._make_one(stream, chunksize=CHUNK_SIZE) - download._initialize(http, self.URL) - info_1 = {'content-range': RESP_RANGE_1} - response_1 = _makeResponse(http_client.PARTIAL_CONTENT, info_1, - CONTENT[:CHUNK_SIZE]) - info_2 = {'content-range': RESP_RANGE_2} - response_2 = _makeResponse(http_client.OK, info_2, - CONTENT[CHUNK_SIZE:]) - requester = _MakeRequest(response_1, response_2) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - download.get_range(0) - - self.assertTrue(len(requester._requested), 2) - request_1 = requester._requested[0][0] - self.assertEqual(request_1.headers, {'range': REQ_RANGE_1}) - request_2 = requester._requested[1][0] - self.assertEqual(request_2.headers, {'range': REQ_RANGE_2}) - self.assertEqual(stream._written, [b'ABC', b'DE']) - self.assertEqual(download.total_size, LEN) - - def test_stream_file_not_initialized(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - download = self._make_one(_Stream()) - - with self.assertRaises(TransferInvalidError): - download.stream_file() - - def test_stream_file_w_initial_response_complete(self): - from six.moves import http_client - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - RESP_RANGE = 'bytes 0-%d/%d' % (LEN - 1, LEN,) - stream = _Stream() - download = self._make_one(stream, total_size=LEN) - info = {'content-range': RESP_RANGE} - download._initial_response = _makeResponse( - http_client.OK, info, CONTENT) - http = object() - download._initialize(http, _Request.URL) - - download.stream_file() - - self.assertEqual(stream._written, [CONTENT]) - self.assertEqual(download.total_size, LEN) - - def test_stream_file_w_initial_response_incomplete(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - - CHUNK_SIZE = 3 - CONTENT = b'ABCDEF' - LEN = len(CONTENT) - RESP_RANGE_1 = 'bytes 0-%d/%d' % (CHUNK_SIZE - 1, LEN,) - REQ_RANGE_2 = 'bytes=%d-%d' % (CHUNK_SIZE, LEN - 1) - RESP_RANGE_2 = 'bytes %d-%d/%d' % (CHUNK_SIZE, LEN - 1, LEN,) - stream = _Stream() - http = object() - download = self._make_one(stream, chunksize=CHUNK_SIZE) - info_1 = {'content-range': RESP_RANGE_1} - download._initial_response = _makeResponse( - http_client.PARTIAL_CONTENT, info_1, CONTENT[:CHUNK_SIZE]) - info_2 = {'content-range': RESP_RANGE_2} - response_2 = _makeResponse( - http_client.OK, info_2, CONTENT[CHUNK_SIZE:]) - requester = _MakeRequest(response_2) - - download._initialize(http, _Request.URL) - - request = _Request() - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - download.stream_file() - - self.assertTrue(len(requester._requested), 1) - request = requester._requested[0][0] - self.assertEqual(request.headers, {'range': REQ_RANGE_2}) - self.assertEqual(stream._written, - [CONTENT[:CHUNK_SIZE], CONTENT[CHUNK_SIZE:]]) - self.assertEqual(download.total_size, LEN) - - def test_stream_file_wo_initial_response_wo_total_size(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - CHUNK_SIZE = 123 - REQ_RANGE = 'bytes=0-%d' % (CHUNK_SIZE - 1) - RESP_RANGE = 'bytes 0-%d/%d' % (LEN - 1, LEN,) - stream = _Stream() - http = object() - download = self._make_one(stream, chunksize=CHUNK_SIZE) - info = {'content-range': RESP_RANGE} - response = _makeResponse(http_client.OK, info, CONTENT) - requester = _MakeRequest(response) - download._initialize(http, _Request.URL) - - request = _Request() - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - download.stream_file() - - self.assertTrue(len(requester._requested), 1) - request = requester._requested[0][0] - self.assertEqual(request.headers, {'range': REQ_RANGE}) - self.assertEqual(stream._written, [CONTENT]) - self.assertEqual(download.total_size, LEN) - - -class Test_Upload(unittest.TestCase): - URL = "http://example.com/api" - MIME_TYPE = 'application/octet-stream' - UPLOAD_URL = 'http://example.com/upload/id=foobar' - - @staticmethod - def _get_target_class(): - from google.cloud.streaming.transfer import Upload - - return Upload - - def _make_one(self, stream, mime_type=MIME_TYPE, *args, **kw): - return self._get_target_class()(stream, mime_type, *args, **kw) - - def test_ctor_defaults(self): - from google.cloud.streaming.transfer import _DEFAULT_CHUNKSIZE - - stream = _Stream() - upload = self._make_one(stream) - self.assertIs(upload.stream, stream) - self.assertIsNone(upload._final_response) - self.assertIsNone(upload._server_chunk_granularity) - self.assertFalse(upload.complete) - self.assertEqual(upload.mime_type, self.MIME_TYPE) - self.assertEqual(upload.progress, 0) - self.assertIsNone(upload.strategy) - self.assertIsNone(upload.total_size) - self.assertEqual(upload.chunksize, _DEFAULT_CHUNKSIZE) - - def test_ctor_w_kwds(self): - stream = _Stream() - CHUNK_SIZE = 123 - upload = self._make_one(stream, chunksize=CHUNK_SIZE) - self.assertIs(upload.stream, stream) - self.assertEqual(upload.mime_type, self.MIME_TYPE) - self.assertEqual(upload.chunksize, CHUNK_SIZE) - - def test_from_file_w_nonesuch_file(self): - klass = self._get_target_class() - filename = '~nosuchuser/file.txt' - with self.assertRaises(OSError): - klass.from_file(filename) - - def test_from_file_wo_mimetype_w_unguessable_filename(self): - import os - from google.cloud._testing import _tempdir - - klass = self._get_target_class() - CONTENT = b'EXISTING FILE W/ UNGUESSABLE MIMETYPE' - with _tempdir() as tempdir: - filename = os.path.join(tempdir, 'file.unguessable') - with open(filename, 'wb') as fileobj: - fileobj.write(CONTENT) - with self.assertRaises(ValueError): - klass.from_file(filename) - - def test_from_file_wo_mimetype_w_guessable_filename(self): - import os - from google.cloud._testing import _tempdir - - klass = self._get_target_class() - CONTENT = b'EXISTING FILE W/ GUESSABLE MIMETYPE' - with _tempdir() as tempdir: - filename = os.path.join(tempdir, 'file.txt') - with open(filename, 'wb') as fileobj: - fileobj.write(CONTENT) - upload = klass.from_file(filename) - self.assertEqual(upload.mime_type, 'text/plain') - self.assertTrue(upload.auto_transfer) - self.assertEqual(upload.total_size, len(CONTENT)) - upload._stream.close() - - def test_from_file_w_mimetype_w_auto_transfer_w_kwds(self): - import os - from google.cloud._testing import _tempdir - - klass = self._get_target_class() - CONTENT = b'EXISTING FILE W/ GUESSABLE MIMETYPE' - CHUNK_SIZE = 3 - with _tempdir() as tempdir: - filename = os.path.join(tempdir, 'file.unguessable') - with open(filename, 'wb') as fileobj: - fileobj.write(CONTENT) - upload = klass.from_file( - filename, - mime_type=self.MIME_TYPE, - auto_transfer=False, - chunksize=CHUNK_SIZE) - self.assertEqual(upload.mime_type, self.MIME_TYPE) - self.assertFalse(upload.auto_transfer) - self.assertEqual(upload.total_size, len(CONTENT)) - self.assertEqual(upload.chunksize, CHUNK_SIZE) - upload._stream.close() - - def test_from_stream_wo_mimetype(self): - klass = self._get_target_class() - stream = _Stream() - with self.assertRaises(ValueError): - klass.from_stream(stream, mime_type=None) - - def test_from_stream_defaults(self): - klass = self._get_target_class() - stream = _Stream() - upload = klass.from_stream(stream, mime_type=self.MIME_TYPE) - self.assertEqual(upload.mime_type, self.MIME_TYPE) - self.assertTrue(upload.auto_transfer) - self.assertIsNone(upload.total_size) - - def test_from_stream_explicit(self): - klass = self._get_target_class() - stream = _Stream() - SIZE = 10 - CHUNK_SIZE = 3 - upload = klass.from_stream( - stream, - mime_type=self.MIME_TYPE, - auto_transfer=False, - total_size=SIZE, - chunksize=CHUNK_SIZE) - self.assertEqual(upload.mime_type, self.MIME_TYPE) - self.assertFalse(upload.auto_transfer) - self.assertEqual(upload.total_size, SIZE) - self.assertEqual(upload.chunksize, CHUNK_SIZE) - - def test_strategy_setter_invalid(self): - upload = self._make_one(_Stream()) - with self.assertRaises(ValueError): - upload.strategy = object() - with self.assertRaises(ValueError): - upload.strategy = 'unknown' - - def test_strategy_setter_SIMPLE_UPLOAD(self): - from google.cloud.streaming.transfer import SIMPLE_UPLOAD - - upload = self._make_one(_Stream()) - upload.strategy = SIMPLE_UPLOAD - self.assertEqual(upload.strategy, SIMPLE_UPLOAD) - - def test_strategy_setter_RESUMABLE_UPLOAD(self): - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - upload = self._make_one(_Stream()) - upload.strategy = RESUMABLE_UPLOAD - self.assertEqual(upload.strategy, RESUMABLE_UPLOAD) - - def test_total_size_setter_initialized(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - SIZE = 123 - upload = self._make_one(_Stream) - http = object() - upload._initialize(http, _Request.URL) - with self.assertRaises(TransferInvalidError): - upload.total_size = SIZE - - def test_total_size_setter_not_initialized(self): - SIZE = 123 - upload = self._make_one(_Stream) - upload.total_size = SIZE - self.assertEqual(upload.total_size, SIZE) - - def test__set_default_strategy_w_existing_strategy(self): - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - config = _Dummy( - resumable_path='/resumable/endpoint', - simple_multipart=True, - simple_path='/upload/endpoint', - ) - request = _Request() - upload = self._make_one(_Stream) - upload.strategy = RESUMABLE_UPLOAD - upload._set_default_strategy(config, request) - self.assertEqual(upload.strategy, RESUMABLE_UPLOAD) - - def test__set_default_strategy_wo_resumable_path(self): - from google.cloud.streaming.transfer import SIMPLE_UPLOAD - - config = _Dummy( - resumable_path=None, - simple_multipart=True, - simple_path='/upload/endpoint', - ) - request = _Request() - upload = self._make_one(_Stream()) - upload._set_default_strategy(config, request) - self.assertEqual(upload.strategy, SIMPLE_UPLOAD) - - def test__set_default_strategy_w_total_size_gt_threshhold(self): - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD_THRESHOLD - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - config = _UploadConfig() - request = _Request() - upload = self._make_one( - _Stream(), total_size=RESUMABLE_UPLOAD_THRESHOLD + 1) - upload._set_default_strategy(config, request) - self.assertEqual(upload.strategy, RESUMABLE_UPLOAD) - - def test__set_default_strategy_w_body_wo_multipart(self): - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - config = _UploadConfig() - config.simple_multipart = False - request = _Request(body=CONTENT) - upload = self._make_one(_Stream(), total_size=len(CONTENT)) - upload._set_default_strategy(config, request) - self.assertEqual(upload.strategy, RESUMABLE_UPLOAD) - - def test__set_default_strategy_w_body_w_multipart_wo_simple_path(self): - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - config = _UploadConfig() - config.simple_path = None - request = _Request(body=CONTENT) - upload = self._make_one(_Stream(), total_size=len(CONTENT)) - upload._set_default_strategy(config, request) - self.assertEqual(upload.strategy, RESUMABLE_UPLOAD) - - def test__set_default_strategy_w_body_w_multipart_w_simple_path(self): - from google.cloud.streaming.transfer import SIMPLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - config = _UploadConfig() - request = _Request(body=CONTENT) - upload = self._make_one(_Stream(), total_size=len(CONTENT)) - upload._set_default_strategy(config, request) - self.assertEqual(upload.strategy, SIMPLE_UPLOAD) - - def test_configure_request_w_total_size_gt_max_size(self): - MAX_SIZE = 1000 - config = _UploadConfig() - config.max_size = MAX_SIZE - request = _Request() - url_builder = _Dummy() - upload = self._make_one(_Stream(), total_size=MAX_SIZE + 1) - with self.assertRaises(ValueError): - upload.configure_request(config, request, url_builder) - - def test_configure_request_w_invalid_mimetype(self): - config = _UploadConfig() - config.accept = ('text/*',) - request = _Request() - url_builder = _Dummy() - upload = self._make_one(_Stream()) - with self.assertRaises(ValueError): - upload.configure_request(config, request, url_builder) - - def test_configure_request_w_simple_wo_body(self): - from google.cloud.streaming.transfer import SIMPLE_UPLOAD - - CONTENT = b'CONTENT' - config = _UploadConfig() - request = _Request() - url_builder = _Dummy(query_params={}) - upload = self._make_one(_Stream(CONTENT)) - upload.strategy = SIMPLE_UPLOAD - - upload.configure_request(config, request, url_builder) - - self.assertEqual(url_builder.query_params, {'uploadType': 'media'}) - self.assertEqual(url_builder.relative_path, config.simple_path) - - self.assertEqual(request.headers, {'content-type': self.MIME_TYPE}) - self.assertEqual(request.body, CONTENT) - self.assertEqual(request.loggable_body, '') - - def test_configure_request_w_simple_w_body(self): - from google.cloud._helpers import _to_bytes - from google.cloud.streaming.transfer import SIMPLE_UPLOAD - - CONTENT = b'CONTENT' - BODY = b'BODY' - config = _UploadConfig() - request = _Request(body=BODY) - request.headers['content-type'] = 'text/plain' - url_builder = _Dummy(query_params={}) - upload = self._make_one(_Stream(CONTENT)) - upload.strategy = SIMPLE_UPLOAD - - upload.configure_request(config, request, url_builder) - - self.assertEqual(url_builder.query_params, {'uploadType': 'multipart'}) - self.assertEqual(url_builder.relative_path, config.simple_path) - - self.assertEqual(list(request.headers), ['content-type']) - ctype, boundary = [x.strip() - for x in request.headers['content-type'].split(';')] - self.assertEqual(ctype, 'multipart/related') - self.assertTrue(boundary.startswith('boundary="==')) - self.assertTrue(boundary.endswith('=="')) - - divider = b'--' + _to_bytes(boundary[len('boundary="'):-1]) - chunks = request.body.split(divider)[1:-1] # discard prolog / epilog - self.assertEqual(len(chunks), 2) - - parse_chunk = _email_chunk_parser() - text_msg = parse_chunk(chunks[0].strip()) - self.assertEqual(dict(text_msg._headers), - {'Content-Type': 'text/plain', - 'MIME-Version': '1.0'}) - self.assertEqual(text_msg._payload, BODY.decode('ascii')) - - app_msg = parse_chunk(chunks[1].strip()) - self.assertEqual(dict(app_msg._headers), - {'Content-Type': self.MIME_TYPE, - 'Content-Transfer-Encoding': 'binary', - 'MIME-Version': '1.0'}) - self.assertEqual(app_msg._payload, CONTENT.decode('ascii')) - self.assertTrue(b'' in request.loggable_body) - - def test_configure_request_w_resumable_wo_total_size(self): - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'CONTENT' - config = _UploadConfig() - request = _Request() - url_builder = _Dummy(query_params={}) - upload = self._make_one(_Stream(CONTENT)) - upload.strategy = RESUMABLE_UPLOAD - - upload.configure_request(config, request, url_builder) - - self.assertEqual(url_builder.query_params, {'uploadType': 'resumable'}) - self.assertEqual(url_builder.relative_path, config.resumable_path) - - self.assertEqual(request.headers, - {'X-Upload-Content-Type': self.MIME_TYPE}) - - def test_configure_request_w_resumable_w_total_size(self): - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'CONTENT' - LEN = len(CONTENT) - config = _UploadConfig() - request = _Request() - url_builder = _Dummy(query_params={}) - upload = self._make_one(_Stream(CONTENT)) - upload.total_size = LEN - upload.strategy = RESUMABLE_UPLOAD - - upload.configure_request(config, request, url_builder) - - self.assertEqual(url_builder.query_params, {'uploadType': 'resumable'}) - self.assertEqual(url_builder.relative_path, config.resumable_path) - - self.assertEqual(request.headers, - {'X-Upload-Content-Type': self.MIME_TYPE, - 'X-Upload-Content-Length': '%d' % (LEN,)}) - - def test_refresh_upload_state_w_simple_strategy(self): - from google.cloud.streaming.transfer import SIMPLE_UPLOAD - - upload = self._make_one(_Stream()) - upload.strategy = SIMPLE_UPLOAD - upload.refresh_upload_state() # no-op - - def test_refresh_upload_state_not_initialized(self): - from google.cloud.streaming.exceptions import TransferInvalidError - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - upload = self._make_one(_Stream()) - upload.strategy = RESUMABLE_UPLOAD - with self.assertRaises(TransferInvalidError): - upload.refresh_upload_state() - - def test_refresh_upload_state_w_OK(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - RESP_RANGE = 'bytes 0-%d/%d' % (LEN - 1, LEN,) - http = object() - stream = _Stream() - upload = self._make_one(stream, total_size=LEN) - upload.strategy = RESUMABLE_UPLOAD - upload._initialize(http, _Request.URL) - info = {'content-range': RESP_RANGE} - response = _makeResponse(http_client.OK, info, CONTENT) - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - upload.refresh_upload_state() - - self.assertTrue(upload.complete) - self.assertEqual(upload.progress, LEN) - self.assertEqual(stream.tell(), LEN) - self.assertIs(upload._final_response, response) - - def test_refresh_upload_state_w_CREATED(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - RESP_RANGE = 'bytes 0-%d/%d' % (LEN - 1, LEN,) - http = object() - stream = _Stream() - upload = self._make_one(stream, total_size=LEN) - upload.strategy = RESUMABLE_UPLOAD - upload._initialize(http, _Request.URL) - info = {'content-range': RESP_RANGE} - response = _makeResponse(http_client.CREATED, info, CONTENT) - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - upload.refresh_upload_state() - - self.assertTrue(upload.complete) - self.assertEqual(upload.progress, LEN) - self.assertEqual(stream.tell(), LEN) - self.assertIs(upload._final_response, response) - - def test_refresh_upload_state_w_RESUME_INCOMPLETE_w_range(self): - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.http_wrapper import RESUME_INCOMPLETE - from google.cloud._testing import _Monkey - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - LAST = 5 - http = object() - stream = _Stream() - upload = self._make_one(stream, total_size=LEN) - upload.strategy = RESUMABLE_UPLOAD - upload._initialize(http, _Request.URL) - info = {'range': '0-%d' % (LAST - 1,)} - response = _makeResponse(RESUME_INCOMPLETE, info, CONTENT) - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - upload.refresh_upload_state() - - self.assertFalse(upload.complete) - self.assertEqual(upload.progress, LAST) - self.assertEqual(stream.tell(), LAST) - self.assertIsNot(upload._final_response, response) - - def test_refresh_upload_state_w_RESUME_INCOMPLETE_wo_range(self): - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.http_wrapper import RESUME_INCOMPLETE - from google.cloud._testing import _Monkey - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - http = object() - stream = _Stream() - upload = self._make_one(stream, total_size=LEN) - upload.strategy = RESUMABLE_UPLOAD - upload._initialize(http, _Request.URL) - response = _makeResponse(RESUME_INCOMPLETE, content=CONTENT) - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - upload.refresh_upload_state() - - self.assertFalse(upload.complete) - self.assertEqual(upload.progress, 0) - self.assertEqual(stream.tell(), 0) - self.assertIsNot(upload._final_response, response) - - def test_refresh_upload_state_w_error(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.exceptions import HttpError - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - http = object() - stream = _Stream() - upload = self._make_one(stream, total_size=LEN) - upload.strategy = RESUMABLE_UPLOAD - upload._initialize(http, _Request.URL) - response = _makeResponse(http_client.FORBIDDEN) - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - with self.assertRaises(HttpError): - upload.refresh_upload_state() - - def test__get_range_header_miss(self): - upload = self._make_one(_Stream()) - response = _makeResponse(None) - self.assertIsNone(upload._get_range_header(response)) - - def test__get_range_header_w_Range(self): - upload = self._make_one(_Stream()) - response = _makeResponse(None, {'Range': '123'}) - self.assertEqual(upload._get_range_header(response), '123') - - def test__get_range_header_w_range(self): - upload = self._make_one(_Stream()) - response = _makeResponse(None, {'range': '123'}) - self.assertEqual(upload._get_range_header(response), '123') - - def test_initialize_upload_no_strategy(self): - request = _Request() - upload = self._make_one(_Stream()) - with self.assertRaises(ValueError): - upload.initialize_upload(request, http=object()) - - def test_initialize_upload_simple_w_http(self): - from google.cloud.streaming.transfer import SIMPLE_UPLOAD - - request = _Request() - upload = self._make_one(_Stream()) - upload.strategy = SIMPLE_UPLOAD - upload.initialize_upload(request, http=object()) # no-op - - def test_initialize_upload_resumable_already_initialized(self): - from google.cloud.streaming.exceptions import TransferInvalidError - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - request = _Request() - upload = self._make_one(_Stream()) - upload.strategy = RESUMABLE_UPLOAD - upload._initialize(None, self.URL) - with self.assertRaises(TransferInvalidError): - upload.initialize_upload(request, http=object()) - - def test_initialize_upload_w_http_resumable_not_initialized_w_error(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.exceptions import HttpError - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - request = _Request() - upload = self._make_one(_Stream()) - upload.strategy = RESUMABLE_UPLOAD - response = _makeResponse(http_client.FORBIDDEN) - requester = _MakeRequest(response) - - with _Monkey(MUT, make_api_request=requester): - with self.assertRaises(HttpError): - upload.initialize_upload(request, http=object()) - - def test_initialize_upload_w_http_wo_auto_transfer_w_OK(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - request = _Request() - upload = self._make_one(_Stream(), auto_transfer=False) - upload.strategy = RESUMABLE_UPLOAD - info = {'location': self.UPLOAD_URL} - response = _makeResponse(http_client.OK, info) - requester = _MakeRequest(response) - - with _Monkey(MUT, make_api_request=requester): - upload.initialize_upload(request, http=object()) - - self.assertIsNone(upload._server_chunk_granularity) - self.assertEqual(upload.url, self.UPLOAD_URL) - self.assertEqual(requester._responses, []) - self.assertEqual(len(requester._requested), 1) - self.assertIs(requester._requested[0][0], request) - - def test_initialize_upload_w_granularity_w_auto_transfer_w_OK(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - http = object() - request = _Request() - upload = self._make_one(_Stream(CONTENT), chunksize=1000) - upload.strategy = RESUMABLE_UPLOAD - info = {'X-Goog-Upload-Chunk-Granularity': '100', - 'location': self.UPLOAD_URL} - response = _makeResponse(http_client.OK, info) - chunk_response = _makeResponse(http_client.OK) - requester = _MakeRequest(response, chunk_response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - upload.initialize_upload(request, http) - - self.assertEqual(upload._server_chunk_granularity, 100) - self.assertEqual(upload.url, self.UPLOAD_URL) - self.assertEqual(requester._responses, []) - self.assertEqual(len(requester._requested), 2) - self.assertIs(requester._requested[0][0], request) - chunk_request = requester._requested[1][0] - self.assertIsInstance(chunk_request, _Request) - self.assertEqual(chunk_request.url, self.UPLOAD_URL) - self.assertEqual(chunk_request.http_method, 'PUT') - self.assertEqual(chunk_request.body, CONTENT) - - def test__last_byte(self): - upload = self._make_one(_Stream()) - self.assertEqual(upload._last_byte('123-456'), 456) - - def test__validate_chunksize_wo__server_chunk_granularity(self): - upload = self._make_one(_Stream()) - upload._validate_chunksize(123) # no-op - - def test__validate_chunksize_w__server_chunk_granularity_miss(self): - upload = self._make_one(_Stream()) - upload._server_chunk_granularity = 100 - with self.assertRaises(ValueError): - upload._validate_chunksize(123) - - def test__validate_chunksize_w__server_chunk_granularity_hit(self): - upload = self._make_one(_Stream()) - upload._server_chunk_granularity = 100 - upload._validate_chunksize(400) - - def test_stream_file_w_simple_strategy(self): - from google.cloud.streaming.transfer import SIMPLE_UPLOAD - - upload = self._make_one(_Stream()) - upload.strategy = SIMPLE_UPLOAD - with self.assertRaises(ValueError): - upload.stream_file() - - def test_stream_file_w_use_chunks_invalid_chunk_size(self): - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - upload = self._make_one(_Stream(), chunksize=1024) - upload.strategy = RESUMABLE_UPLOAD - upload._server_chunk_granularity = 100 - with self.assertRaises(ValueError): - upload.stream_file(use_chunks=True) - - def test_stream_file_not_initialized(self): - from google.cloud.streaming.exceptions import TransferInvalidError - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - upload = self._make_one(_Stream(), chunksize=1024) - upload.strategy = RESUMABLE_UPLOAD - upload._server_chunk_granularity = 128 - with self.assertRaises(TransferInvalidError): - upload.stream_file() - - def test_stream_file_already_complete_w_unseekable_stream(self): - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - http = object() - stream = object() - response = object() - upload = self._make_one(stream, chunksize=1024) - upload.strategy = RESUMABLE_UPLOAD - upload._server_chunk_granularity = 128 - upload._initialize(http, _Request.URL) - upload._final_response = response - upload._complete = True - self.assertIs(upload.stream_file(), response) - - def test_stream_file_already_complete_w_seekable_stream_unsynced(self): - from google.cloud.streaming.exceptions import CommunicationError - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - http = object() - stream = _Stream(CONTENT) - response = object() - upload = self._make_one(stream, chunksize=1024) - upload.strategy = RESUMABLE_UPLOAD - upload._server_chunk_granularity = 128 - upload._initialize(http, _Request.URL) - upload._final_response = response - upload._complete = True - with self.assertRaises(CommunicationError): - upload.stream_file() - - def test_stream_file_already_complete_wo_seekable_method_synced(self): - import os - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - http = object() - stream = _Stream(CONTENT) - stream.seek(0, os.SEEK_END) - response = object() - upload = self._make_one(stream, chunksize=1024) - upload.strategy = RESUMABLE_UPLOAD - upload._server_chunk_granularity = 128 - upload._initialize(http, _Request.URL) - upload._final_response = response - upload._complete = True - self.assertIs(upload.stream_file(use_chunks=False), response) - - def test_stream_file_already_complete_w_seekable_method_true_synced(self): - import os - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - http = object() - stream = _StreamWithSeekableMethod(CONTENT, True) - stream.seek(0, os.SEEK_END) - response = object() - upload = self._make_one(stream, chunksize=1024) - upload.strategy = RESUMABLE_UPLOAD - upload._server_chunk_granularity = 128 - upload._initialize(http, _Request.URL) - upload._final_response = response - upload._complete = True - self.assertIs(upload.stream_file(use_chunks=False), response) - - def test_stream_file_already_complete_w_seekable_method_false(self): - import os - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - http = object() - stream = _StreamWithSeekableMethod(CONTENT, False) - stream.seek(0, os.SEEK_END) - response = object() - upload = self._make_one(stream, chunksize=1024) - upload.strategy = RESUMABLE_UPLOAD - upload._server_chunk_granularity = 128 - upload._initialize(http, _Request.URL) - upload._final_response = response - upload._complete = True - self.assertIs(upload.stream_file(use_chunks=False), response) - - def test_stream_file_incomplete(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.http_wrapper import RESUME_INCOMPLETE - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - http = object() - stream = _Stream(CONTENT) - upload = self._make_one(stream, chunksize=6) - upload.strategy = RESUMABLE_UPLOAD - upload._server_chunk_granularity = 6 - upload._initialize(http, self.UPLOAD_URL) - - info_1 = {'content-length': '0', 'range': 'bytes=0-5'} - response_1 = _makeResponse(RESUME_INCOMPLETE, info_1) - info_2 = {'content-length': '0', 'range': 'bytes=6-9'} - response_2 = _makeResponse(http_client.OK, info_2) - requester = _MakeRequest(response_1, response_2) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - response = upload.stream_file() - - self.assertIs(response, response_2) - self.assertEqual(len(requester._responses), 0) - self.assertEqual(len(requester._requested), 2) - - request_1 = requester._requested[0][0] - self.assertEqual(request_1.url, self.UPLOAD_URL) - self.assertEqual(request_1.http_method, 'PUT') - self.assertEqual(request_1.headers, - {'Content-Range': 'bytes 0-5/*', - 'Content-Type': self.MIME_TYPE}) - self.assertEqual(request_1.body, CONTENT[:6]) - - request_2 = requester._requested[1][0] - self.assertEqual(request_2.url, self.UPLOAD_URL) - self.assertEqual(request_2.http_method, 'PUT') - self.assertEqual(request_2.headers, - {'Content-Range': 'bytes 6-9/10', - 'Content-Type': self.MIME_TYPE}) - self.assertEqual(request_2.body, CONTENT[6:]) - - def test_stream_file_incomplete_w_transfer_error(self): - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.exceptions import CommunicationError - from google.cloud.streaming.http_wrapper import RESUME_INCOMPLETE - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - http = object() - stream = _Stream(CONTENT) - upload = self._make_one(stream, chunksize=6) - upload.strategy = RESUMABLE_UPLOAD - upload._server_chunk_granularity = 6 - upload._initialize(http, self.UPLOAD_URL) - - info = { - 'content-length': '0', - 'range': 'bytes=0-4', # simulate error, s.b. '0-5' - } - response = _makeResponse(RESUME_INCOMPLETE, info) - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - with self.assertRaises(CommunicationError): - upload.stream_file() - - self.assertEqual(len(requester._responses), 0) - self.assertEqual(len(requester._requested), 1) - - request = requester._requested[0][0] - self.assertEqual(request.url, self.UPLOAD_URL) - self.assertEqual(request.http_method, 'PUT') - self.assertEqual(request.headers, - {'Content-Range': 'bytes 0-5/*', - 'Content-Type': self.MIME_TYPE}) - self.assertEqual(request.body, CONTENT[:6]) - - def test__send_media_request_wo_error(self): - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.http_wrapper import RESUME_INCOMPLETE - - CONTENT = b'ABCDEFGHIJ' - bytes_http = object() - stream = _Stream(CONTENT) - upload = self._make_one(stream) - upload.bytes_http = bytes_http - - headers = {'Content-Range': 'bytes 0-9/10', - 'Content-Type': self.MIME_TYPE} - request = _Request(self.UPLOAD_URL, 'PUT', CONTENT, headers) - info = {'content-length': '0', 'range': 'bytes=0-4'} - response = _makeResponse(RESUME_INCOMPLETE, info) - requester = _MakeRequest(response) - - with _Monkey(MUT, make_api_request=requester): - upload._send_media_request(request, 9) - - self.assertEqual(len(requester._responses), 0) - self.assertEqual(len(requester._requested), 1) - used_request, used_http, _ = requester._requested[0] - self.assertIs(used_request, request) - self.assertIs(used_http, bytes_http) - self.assertEqual(stream.tell(), 4) - - def test__send_media_request_w_error(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.exceptions import HttpError - from google.cloud.streaming.http_wrapper import RESUME_INCOMPLETE - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - bytes_http = object() - http = object() - stream = _Stream(CONTENT) - upload = self._make_one(stream) - upload.strategy = RESUMABLE_UPLOAD - upload._initialize(http, self.UPLOAD_URL) - upload.bytes_http = bytes_http - - headers = {'Content-Range': 'bytes 0-9/10', - 'Content-Type': self.MIME_TYPE} - request = _Request(self.UPLOAD_URL, 'PUT', CONTENT, headers) - info_1 = {'content-length': '0', 'range': 'bytes=0-4'} - response_1 = _makeResponse(http_client.FORBIDDEN, info_1) - info_2 = {'Content-Length': '0', 'Range': 'bytes=0-4'} - response_2 = _makeResponse(RESUME_INCOMPLETE, info_2) - requester = _MakeRequest(response_1, response_2) - - with _Monkey(MUT, Request=_Request, make_api_request=requester): - with self.assertRaises(HttpError): - upload._send_media_request(request, 9) - - self.assertEqual(len(requester._responses), 0) - self.assertEqual(len(requester._requested), 2) - first_request, first_http, _ = requester._requested[0] - self.assertIs(first_request, request) - self.assertIs(first_http, bytes_http) - second_request, second_http, _ = requester._requested[1] - self.assertEqual(second_request.url, self.UPLOAD_URL) - self.assertEqual(second_request.http_method, 'PUT') # ACK! - self.assertEqual(second_request.headers, - {'Content-Range': 'bytes */*'}) - self.assertIs(second_http, http) - - def test__send_media_body_not_initialized(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - upload = self._make_one(_Stream()) - with self.assertRaises(TransferInvalidError): - upload._send_media_body(0) - - def test__send_media_body_wo_total_size(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - http = object() - upload = self._make_one(_Stream()) - upload._initialize(http, _Request.URL) - with self.assertRaises(TransferInvalidError): - upload._send_media_body(0) - - def test__send_media_body_start_lt_total_size(self): - from google.cloud.streaming.stream_slice import StreamSlice - - SIZE = 1234 - http = object() - stream = _Stream() - upload = self._make_one(stream, total_size=SIZE) - upload._initialize(http, self.UPLOAD_URL) - response = object() - streamer = _MediaStreamer(response) - upload._send_media_request = streamer - - found = upload._send_media_body(0) - - self.assertIs(found, response) - request, end = streamer._called_with - self.assertEqual(request.url, self.UPLOAD_URL) - self.assertEqual(request.http_method, 'PUT') - body_stream = request.body - self.assertIsInstance(body_stream, StreamSlice) - self.assertIs(body_stream._stream, stream) - self.assertEqual(len(body_stream), SIZE) - self.assertEqual(request.headers, - {'content-length': '%d' % (SIZE,), # speling! - 'Content-Type': self.MIME_TYPE, - 'Content-Range': 'bytes 0-%d/%d' % (SIZE - 1, SIZE)}) - self.assertEqual(end, SIZE) - - def test__send_media_body_start_eq_total_size(self): - from google.cloud.streaming.stream_slice import StreamSlice - - SIZE = 1234 - http = object() - stream = _Stream() - upload = self._make_one(stream, total_size=SIZE) - upload._initialize(http, self.UPLOAD_URL) - response = object() - streamer = _MediaStreamer(response) - upload._send_media_request = streamer - - found = upload._send_media_body(SIZE) - - self.assertIs(found, response) - request, end = streamer._called_with - self.assertEqual(request.url, self.UPLOAD_URL) - self.assertEqual(request.http_method, 'PUT') - body_stream = request.body - self.assertIsInstance(body_stream, StreamSlice) - self.assertIs(body_stream._stream, stream) - self.assertEqual(len(body_stream), 0) - self.assertEqual(request.headers, - {'content-length': '0', # speling! - 'Content-Type': self.MIME_TYPE, - 'Content-Range': 'bytes */%d' % (SIZE,)}) - self.assertEqual(end, SIZE) - - def test__send_chunk_not_initialized(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - upload = self._make_one(_Stream()) - with self.assertRaises(TransferInvalidError): - upload._send_chunk(0) - - def test__send_chunk_wo_total_size_stream_exhausted(self): - CONTENT = b'ABCDEFGHIJ' - SIZE = len(CONTENT) - http = object() - upload = self._make_one(_Stream(CONTENT), chunksize=1000) - upload._initialize(http, self.UPLOAD_URL) - response = object() - streamer = _MediaStreamer(response) - upload._send_media_request = streamer - self.assertIsNone(upload.total_size) - - found = upload._send_chunk(0) - - self.assertIs(found, response) - self.assertEqual(upload.total_size, SIZE) - request, end = streamer._called_with - self.assertEqual(request.url, self.UPLOAD_URL) - self.assertEqual(request.http_method, 'PUT') - self.assertEqual(request.body, CONTENT) - self.assertEqual(request.headers, - {'content-length': '%d' % SIZE, # speling! - 'Content-Type': self.MIME_TYPE, - 'Content-Range': 'bytes 0-%d/%d' % (SIZE - 1, SIZE)}) - self.assertEqual(end, SIZE) - - def test__send_chunk_wo_total_size_stream_not_exhausted(self): - CONTENT = b'ABCDEFGHIJ' - SIZE = len(CONTENT) - CHUNK_SIZE = SIZE - 5 - http = object() - upload = self._make_one(_Stream(CONTENT), chunksize=CHUNK_SIZE) - upload._initialize(http, self.UPLOAD_URL) - response = object() - streamer = _MediaStreamer(response) - upload._send_media_request = streamer - self.assertIsNone(upload.total_size) - - found = upload._send_chunk(0) - - self.assertIs(found, response) - self.assertIsNone(upload.total_size) - request, end = streamer._called_with - self.assertEqual(request.url, self.UPLOAD_URL) - self.assertEqual(request.http_method, 'PUT') - self.assertEqual(request.body, CONTENT[:CHUNK_SIZE]) - expected_headers = { - 'content-length': '%d' % CHUNK_SIZE, # speling! - 'Content-Type': self.MIME_TYPE, - 'Content-Range': 'bytes 0-%d/*' % (CHUNK_SIZE - 1,), - } - self.assertEqual(request.headers, expected_headers) - self.assertEqual(end, CHUNK_SIZE) - - def test__send_chunk_w_total_size_stream_not_exhausted(self): - from google.cloud.streaming.stream_slice import StreamSlice - - CONTENT = b'ABCDEFGHIJ' - SIZE = len(CONTENT) - CHUNK_SIZE = SIZE - 5 - http = object() - stream = _Stream(CONTENT) - upload = self._make_one(stream, total_size=SIZE, chunksize=CHUNK_SIZE) - upload._initialize(http, self.UPLOAD_URL) - response = object() - streamer = _MediaStreamer(response) - upload._send_media_request = streamer - - found = upload._send_chunk(0) - - self.assertIs(found, response) - request, end = streamer._called_with - self.assertEqual(request.url, self.UPLOAD_URL) - self.assertEqual(request.http_method, 'PUT') - body_stream = request.body - self.assertIsInstance(body_stream, StreamSlice) - self.assertIs(body_stream._stream, stream) - self.assertEqual(len(body_stream), CHUNK_SIZE) - expected_headers = { - 'content-length': '%d' % CHUNK_SIZE, # speling! - 'Content-Type': self.MIME_TYPE, - 'Content-Range': 'bytes 0-%d/%d' % (CHUNK_SIZE - 1, SIZE), - } - self.assertEqual(request.headers, expected_headers) - self.assertEqual(end, CHUNK_SIZE) - - def test__send_chunk_w_total_size_stream_exhausted(self): - from google.cloud.streaming.stream_slice import StreamSlice - - CONTENT = b'ABCDEFGHIJ' - SIZE = len(CONTENT) - CHUNK_SIZE = 1000 - http = object() - stream = _Stream(CONTENT) - upload = self._make_one(stream, total_size=SIZE, chunksize=CHUNK_SIZE) - upload._initialize(http, self.UPLOAD_URL) - response = object() - streamer = _MediaStreamer(response) - upload._send_media_request = streamer - - found = upload._send_chunk(SIZE) - - self.assertIs(found, response) - request, end = streamer._called_with - self.assertEqual(request.url, self.UPLOAD_URL) - self.assertEqual(request.http_method, 'PUT') - body_stream = request.body - self.assertIsInstance(body_stream, StreamSlice) - self.assertIs(body_stream._stream, stream) - self.assertEqual(len(body_stream), 0) - self.assertEqual(request.headers, - {'content-length': '0', # speling! - 'Content-Type': self.MIME_TYPE, - 'Content-Range': 'bytes */%d' % (SIZE,)}) - self.assertEqual(end, SIZE) - - -def _email_chunk_parser(): - import six - - if six.PY3: # pragma: NO COVER Python3 - from email.parser import BytesParser - - parser = BytesParser() - return parser.parsebytes - else: - from email.parser import Parser - - parser = Parser() - return parser.parsestr - - -class _Dummy(object): - def __init__(self, **kw): - self.__dict__.update(kw) - - -class _UploadConfig(object): - accept = ('*/*',) - max_size = None - resumable_path = '/resumable/endpoint' - simple_multipart = True - simple_path = '/upload/endpoint' - - -class _Stream(object): - _closed = False - - def __init__(self, to_read=b''): - import io - - self._written = [] - self._to_read = io.BytesIO(to_read) - - def write(self, to_write): - self._written.append(to_write) - - def seek(self, offset, whence=0): - self._to_read.seek(offset, whence) - - def read(self, size=None): - if size is not None: - return self._to_read.read(size) - return self._to_read.read() - - def tell(self): - return self._to_read.tell() - - def close(self): - self._closed = True - - -class _StreamWithSeekableMethod(_Stream): - - def __init__(self, to_read=b'', seekable=True): - super(_StreamWithSeekableMethod, self).__init__(to_read) - self._seekable = seekable - - def seekable(self): - return self._seekable - - -class _Request(object): - __slots__ = ('url', 'http_method', 'body', 'headers', 'loggable_body') - URL = 'http://example.com/api' - - def __init__(self, url=URL, http_method='GET', body='', headers=None): - self.url = url - self.http_method = http_method - self.body = self.loggable_body = body - if headers is None: - headers = {} - self.headers = headers - - -class _MakeRequest(object): - - def __init__(self, *responses): - self._responses = list(responses) - self._requested = [] - - def __call__(self, http, request, **kw): - self._requested.append((request, http, kw)) - return self._responses.pop(0) - - -def _makeResponse(status_code, info=None, content='', - request_url=_Request.URL): - if info is None: - info = {} - return _Dummy(status_code=status_code, - info=info, - content=content, - length=len(content), - request_url=request_url) - - -class _MediaStreamer(object): - - _called_with = None - - def __init__(self, response): - self._response = response - - def __call__(self, request, end): - assert self._called_with is None - self._called_with = (request, end) - return self._response diff --git a/core/tests/unit/streaming/test_util.py b/core/tests/unit/streaming/test_util.py deleted file mode 100644 index 4da788182cb9..000000000000 --- a/core/tests/unit/streaming/test_util.py +++ /dev/null @@ -1,66 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class Test_calculate_wait_for_retry(unittest.TestCase): - - def _call_fut(self, *args, **kw): - from google.cloud.streaming.util import calculate_wait_for_retry - - return calculate_wait_for_retry(*args, **kw) - - def test_w_negative_jitter_lt_max_wait(self): - import random - from google.cloud._testing import _Monkey - - with _Monkey(random, uniform=lambda lower, upper: lower): - self.assertEqual(self._call_fut(1), 1.5) - - def test_w_positive_jitter_gt_max_wait(self): - import random - from google.cloud._testing import _Monkey - - with _Monkey(random, uniform=lambda lower, upper: upper): - self.assertEqual(self._call_fut(4), 20) - - -class Test_acceptable_mime_type(unittest.TestCase): - - def _call_fut(self, *args, **kw): - from google.cloud.streaming.util import acceptable_mime_type - - return acceptable_mime_type(*args, **kw) - - def test_pattern_wo_slash(self): - with self.assertRaises(ValueError) as err: - self._call_fut(['text/*'], 'BOGUS') - self.assertEqual( - err.exception.args, - ('Invalid MIME type: "BOGUS"',)) - - def test_accept_pattern_w_semicolon(self): - with self.assertRaises(ValueError) as err: - self._call_fut(['text/*;charset=utf-8'], 'text/plain') - self.assertEqual( - err.exception.args, - ('MIME patterns with parameter unsupported: ' - '"text/*;charset=utf-8"',)) - - def test_miss(self): - self.assertFalse(self._call_fut(['image/*'], 'text/plain')) - - def test_hit(self): - self.assertTrue(self._call_fut(['text/*'], 'text/plain')) diff --git a/core/tests/unit/test__helpers.py b/core/tests/unit/test__helpers.py index fcd47f7535bc..f7ba1b2c109f 100644 --- a/core/tests/unit/test__helpers.py +++ b/core/tests/unit/test__helpers.py @@ -554,6 +554,35 @@ def test_it(self): self.assertEqual(self._call_fut(timestamp), dt_stamp) +class Test__from_any_pb(unittest.TestCase): + + def _call_fut(self, pb_type, any_pb): + from google.cloud._helpers import _from_any_pb + + return _from_any_pb(pb_type, any_pb) + + def test_success(self): + from google.protobuf import any_pb2 + from google.type import date_pb2 + + in_message = date_pb2.Date(year=1990) + in_message_any = any_pb2.Any() + in_message_any.Pack(in_message) + out_message = self._call_fut(date_pb2.Date, in_message_any) + self.assertEqual(in_message, out_message) + + def test_failure(self, ): + from google.protobuf import any_pb2 + from google.type import date_pb2 + from google.type import timeofday_pb2 + + in_message = any_pb2.Any() + in_message.Pack(date_pb2.Date(year=1990)) + + with self.assertRaises(TypeError): + self._call_fut(timeofday_pb2.TimeOfDay, in_message) + + class Test__pb_timestamp_to_rfc3339(unittest.TestCase): def _call_fut(self, timestamp): diff --git a/core/tests/unit/test__http.py b/core/tests/unit/test__http.py index 1226042b5859..22df11566811 100644 --- a/core/tests/unit/test__http.py +++ b/core/tests/unit/test__http.py @@ -94,12 +94,15 @@ def test_build_api_url_no_extra_query_params(self): self.assertEqual(conn.build_api_url('/foo'), URI) def test_build_api_url_w_extra_query_params(self): - from six.moves.urllib.parse import parse_qsl + from six.moves.urllib.parse import parse_qs from six.moves.urllib.parse import urlsplit client = object() conn = self._make_mock_one(client) - uri = conn.build_api_url('/foo', {'bar': 'baz'}) + uri = conn.build_api_url('/foo', { + 'bar': 'baz', + 'qux': ['quux', 'corge'] + }) scheme, netloc, path, qs, _ = urlsplit(uri) self.assertEqual('%s://%s' % (scheme, netloc), conn.API_BASE_URL) @@ -111,8 +114,9 @@ def test_build_api_url_w_extra_query_params(self): 'foo', ]) self.assertEqual(path, PATH) - parms = dict(parse_qsl(qs)) - self.assertEqual(parms['bar'], 'baz') + parms = dict(parse_qs(qs)) + self.assertEqual(parms['bar'], ['baz']) + self.assertEqual(parms['qux'], ['quux', 'corge']) def test__make_request_no_data_no_content_type_no_headers(self): http = _Http( @@ -222,7 +226,7 @@ def test_api_request_wo_json_expected(self): b'CONTENT') def test_api_request_w_query_params(self): - from six.moves.urllib.parse import parse_qsl + from six.moves.urllib.parse import parse_qs from six.moves.urllib.parse import urlsplit http = _Http( @@ -231,7 +235,10 @@ def test_api_request_w_query_params(self): ) client = mock.Mock(_http=http, spec=['_http']) conn = self._make_mock_one(client) - self.assertEqual(conn.api_request('GET', '/', {'foo': 'bar'}), {}) + self.assertEqual(conn.api_request('GET', '/', { + 'foo': 'bar', + 'baz': ['qux', 'quux'] + }), {}) self.assertEqual(http._called_with['method'], 'GET') uri = http._called_with['uri'] scheme, netloc, path, qs, _ = urlsplit(uri) @@ -244,8 +251,9 @@ def test_api_request_w_query_params(self): '', ]) self.assertEqual(path, PATH) - parms = dict(parse_qsl(qs)) - self.assertEqual(parms['foo'], 'bar') + parms = dict(parse_qs(qs)) + self.assertEqual(parms['foo'], ['bar']) + self.assertEqual(parms['baz'], ['qux', 'quux']) self.assertIsNone(http._called_with['body']) expected_headers = { 'Accept-Encoding': 'gzip', diff --git a/core/tests/unit/test_client.py b/core/tests/unit/test_client.py index 14eac68abee3..25667712c69a 100644 --- a/core/tests/unit/test_client.py +++ b/core/tests/unit/test_client.py @@ -59,37 +59,31 @@ def test_unpickleable(self): with self.assertRaises(pickle.PicklingError): pickle.dumps(client_obj) - def test_ctor_defaults(self): - from google.cloud._testing import _Monkey - from google.cloud import client - - CREDENTIALS = _make_credentials() - FUNC_CALLS = [] - - def mock_get_credentials(): - FUNC_CALLS.append('get_credentials') - return CREDENTIALS + def test_constructor_defaults(self): + credentials = _make_credentials() - with _Monkey(client, get_credentials=mock_get_credentials): + patch = mock.patch( + 'google.auth.default', return_value=(credentials, None)) + with patch as default: client_obj = self._make_one() - self.assertIs(client_obj._credentials, CREDENTIALS) + self.assertIs(client_obj._credentials, credentials) self.assertIsNone(client_obj._http_internal) - self.assertEqual(FUNC_CALLS, ['get_credentials']) + default.assert_called_once_with() - def test_ctor_explicit(self): - CREDENTIALS = _make_credentials() - HTTP = object() - client_obj = self._make_one(credentials=CREDENTIALS, _http=HTTP) + def test_constructor_explicit(self): + credentials = _make_credentials() + http = mock.sentinel.http + client_obj = self._make_one(credentials=credentials, _http=http) - self.assertIs(client_obj._credentials, CREDENTIALS) - self.assertIs(client_obj._http_internal, HTTP) + self.assertIs(client_obj._credentials, credentials) + self.assertIs(client_obj._http_internal, http) - def test_ctor_bad_credentials(self): - CREDENTIALS = object() + def test_constructor_bad_credentials(self): + credentials = mock.sentinel.credentials with self.assertRaises(ValueError): - self._make_one(credentials=CREDENTIALS) + self._make_one(credentials=credentials) def test_from_service_account_json(self): from google.cloud import _helpers @@ -162,34 +156,27 @@ def _get_target_class(): def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) - def test_ctor_defaults(self): - from google.cloud._testing import _Monkey - from google.cloud import client - - PROJECT = 'PROJECT' - CREDENTIALS = _make_credentials() - FUNC_CALLS = [] - - def mock_determine_proj(project): - FUNC_CALLS.append((project, '_determine_default_project')) - return PROJECT + def test_constructor_defaults(self): + credentials = _make_credentials() + patch1 = mock.patch( + 'google.auth.default', return_value=(credentials, None)) - def mock_get_credentials(): - FUNC_CALLS.append('get_credentials') - return CREDENTIALS + project = 'prahj-ekt' + patch2 = mock.patch( + 'google.cloud.client._determine_default_project', + return_value=project) - with _Monkey(client, get_credentials=mock_get_credentials, - _determine_default_project=mock_determine_proj): - client_obj = self._make_one() + with patch1 as default: + with patch2 as _determine_default_project: + client_obj = self._make_one() - self.assertEqual(client_obj.project, PROJECT) - self.assertIs(client_obj._credentials, CREDENTIALS) + self.assertEqual(client_obj.project, project) + self.assertIs(client_obj._credentials, credentials) self.assertIsNone(client_obj._http_internal) - self.assertEqual( - FUNC_CALLS, - [(None, '_determine_default_project'), 'get_credentials']) + default.assert_called_once_with() + _determine_default_project.assert_called_once_with(None) - def test_ctor_missing_project(self): + def test_constructor_missing_project(self): from google.cloud._testing import _Monkey from google.cloud import client @@ -204,7 +191,7 @@ def mock_determine_proj(project): self.assertEqual(FUNC_CALLS, [(None, '_determine_default_project')]) - def test_ctor_w_invalid_project(self): + def test_constructor_w_invalid_project(self): CREDENTIALS = _make_credentials() HTTP = object() with self.assertRaises(ValueError): @@ -227,11 +214,11 @@ def _explicit_ctor_helper(self, project): self.assertIs(client_obj._credentials, CREDENTIALS) self.assertIs(client_obj._http_internal, HTTP) - def test_ctor_explicit_bytes(self): + def test_constructor_explicit_bytes(self): PROJECT = b'PROJECT' self._explicit_ctor_helper(PROJECT) - def test_ctor_explicit_unicode(self): + def test_constructor_explicit_unicode(self): PROJECT = u'PROJECT' self._explicit_ctor_helper(PROJECT) diff --git a/core/tests/unit/test_credentials.py b/core/tests/unit/test_credentials.py deleted file mode 100644 index 53370a061494..000000000000 --- a/core/tests/unit/test_credentials.py +++ /dev/null @@ -1,234 +0,0 @@ -# Copyright 2014 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - - -class Test_get_credentials(unittest.TestCase): - - def _call_fut(self): - from google.cloud import credentials - - return credentials.get_credentials() - - def test_it(self): - with mock.patch('google.auth.default', autospec=True) as default: - default.return_value = ( - mock.sentinel.credentials, mock.sentinel.project) - found = self._call_fut() - - self.assertIs(found, mock.sentinel.credentials) - default.assert_called_once_with() - - -class Test_generate_signed_url(unittest.TestCase): - - def _call_fut(self, *args, **kwargs): - from google.cloud.credentials import generate_signed_url - - return generate_signed_url(*args, **kwargs) - - def _generate_helper(self, response_type=None, response_disposition=None, - generation=None): - import base64 - from six.moves.urllib.parse import parse_qs - from six.moves.urllib.parse import urlsplit - import google.auth.credentials - from google.cloud._testing import _Monkey - from google.cloud import credentials as MUT - - ENDPOINT = 'http://api.example.com' - RESOURCE = '/name/path' - SIGNED = base64.b64encode(b'DEADBEEF') - CREDENTIALS = mock.Mock(spec=google.auth.credentials.Signing) - CREDENTIALS.signer_email = 'service@example.com' - - def _get_signed_query_params(*args): - credentials, expiration = args[:2] - return { - 'GoogleAccessId': credentials.signer_email, - 'Expires': str(expiration), - 'Signature': SIGNED, - } - - with _Monkey(MUT, _get_signed_query_params=_get_signed_query_params): - url = self._call_fut(CREDENTIALS, RESOURCE, 1000, - api_access_endpoint=ENDPOINT, - response_type=response_type, - response_disposition=response_disposition, - generation=generation) - - scheme, netloc, path, qs, frag = urlsplit(url) - self.assertEqual(scheme, 'http') - self.assertEqual(netloc, 'api.example.com') - self.assertEqual(path, RESOURCE) - params = parse_qs(qs) - # In Py3k, parse_qs gives us text values: - self.assertEqual(params.pop('Signature'), [SIGNED.decode('ascii')]) - self.assertEqual(params.pop('Expires'), ['1000']) - self.assertEqual(params.pop('GoogleAccessId'), - [CREDENTIALS.signer_email]) - if response_type is not None: - self.assertEqual(params.pop('response-content-type'), - [response_type]) - if response_disposition is not None: - self.assertEqual(params.pop('response-content-disposition'), - [response_disposition]) - if generation is not None: - self.assertEqual(params.pop('generation'), [generation]) - # Make sure we have checked them all. - self.assertEqual(len(params), 0) - self.assertEqual(frag, '') - - def test_w_expiration_int(self): - self._generate_helper() - - def test_w_custom_fields(self): - response_type = 'text/plain' - response_disposition = 'attachment; filename=blob.png' - generation = '123' - self._generate_helper(response_type=response_type, - response_disposition=response_disposition, - generation=generation) - - -class Test_generate_signed_url_exception(unittest.TestCase): - def test_with_google_credentials(self): - import time - import google.auth.credentials - from google.cloud.credentials import generate_signed_url - - RESOURCE = '/name/path' - - credentials = mock.Mock(spec=google.auth.credentials.Credentials) - expiration = int(time.time() + 5) - self.assertRaises(AttributeError, generate_signed_url, credentials, - resource=RESOURCE, expiration=expiration) - - -class Test__get_signed_query_params(unittest.TestCase): - - def _call_fut(self, credentials, expiration, string_to_sign): - from google.cloud.credentials import _get_signed_query_params - - return _get_signed_query_params(credentials, expiration, - string_to_sign) - - def test_it(self): - import base64 - import google.auth.credentials - - SIG_BYTES = b'DEADBEEF' - ACCOUNT_NAME = mock.sentinel.service_account_email - CREDENTIALS = mock.Mock(spec=google.auth.credentials.Signing) - CREDENTIALS.signer_email = ACCOUNT_NAME - CREDENTIALS.sign_bytes.return_value = SIG_BYTES - EXPIRATION = 100 - STRING_TO_SIGN = 'dummy_signature' - result = self._call_fut(CREDENTIALS, EXPIRATION, - STRING_TO_SIGN) - - self.assertEqual(result, { - 'GoogleAccessId': ACCOUNT_NAME, - 'Expires': str(EXPIRATION), - 'Signature': base64.b64encode(b'DEADBEEF'), - }) - CREDENTIALS.sign_bytes.assert_called_once_with(STRING_TO_SIGN) - - -class Test__get_expiration_seconds(unittest.TestCase): - - def _call_fut(self, expiration): - from google.cloud.credentials import _get_expiration_seconds - - return _get_expiration_seconds(expiration) - - def _utc_seconds(self, when): - import calendar - - return int(calendar.timegm(when.timetuple())) - - def test_w_invalid(self): - self.assertRaises(TypeError, self._call_fut, object()) - self.assertRaises(TypeError, self._call_fut, None) - - def test_w_int(self): - self.assertEqual(self._call_fut(123), 123) - - def test_w_long(self): - try: - long - except NameError: # pragma: NO COVER Py3K - pass - else: - self.assertEqual(self._call_fut(long(123)), 123) - - def test_w_naive_datetime(self): - import datetime - - expiration_no_tz = datetime.datetime(2004, 8, 19, 0, 0, 0, 0) - utc_seconds = self._utc_seconds(expiration_no_tz) - self.assertEqual(self._call_fut(expiration_no_tz), utc_seconds) - - def test_w_utc_datetime(self): - import datetime - from google.cloud._helpers import UTC - - expiration_utc = datetime.datetime(2004, 8, 19, 0, 0, 0, 0, UTC) - utc_seconds = self._utc_seconds(expiration_utc) - self.assertEqual(self._call_fut(expiration_utc), utc_seconds) - - def test_w_other_zone_datetime(self): - import datetime - from google.cloud._helpers import _UTC - - class CET(_UTC): - _tzname = 'CET' - _utcoffset = datetime.timedelta(hours=1) - - zone = CET() - expiration_other = datetime.datetime(2004, 8, 19, 0, 0, 0, 0, zone) - utc_seconds = self._utc_seconds(expiration_other) - cet_seconds = utc_seconds - (60 * 60) # CET one hour earlier than UTC - self.assertEqual(self._call_fut(expiration_other), cet_seconds) - - def test_w_timedelta_seconds(self): - import datetime - from google.cloud._testing import _Monkey - from google.cloud import credentials as MUT - - dummy_utcnow = datetime.datetime(2004, 8, 19, 0, 0, 0, 0) - utc_seconds = self._utc_seconds(dummy_utcnow) - expiration_as_delta = datetime.timedelta(seconds=10) - - with _Monkey(MUT, _NOW=lambda: dummy_utcnow): - result = self._call_fut(expiration_as_delta) - - self.assertEqual(result, utc_seconds + 10) - - def test_w_timedelta_days(self): - import datetime - from google.cloud._testing import _Monkey - from google.cloud import credentials as MUT - - dummy_utcnow = datetime.datetime(2004, 8, 19, 0, 0, 0, 0) - utc_seconds = self._utc_seconds(dummy_utcnow) - expiration_as_delta = datetime.timedelta(days=1) - - with _Monkey(MUT, _NOW=lambda: dummy_utcnow): - result = self._call_fut(expiration_as_delta) - - self.assertEqual(result, utc_seconds + 86400) diff --git a/core/tests/unit/test_iam.py b/core/tests/unit/test_iam.py index d076edd6eba9..4a17c61ce173 100644 --- a/core/tests/unit/test_iam.py +++ b/core/tests/unit/test_iam.py @@ -200,7 +200,6 @@ def test_from_api_repr_complete(self): {'role': VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, ], } - empty = frozenset() klass = self._get_target_class() policy = klass.from_api_repr(RESOURCE) self.assertEqual(policy.etag, 'DEADBEEF') diff --git a/datastore/MANIFEST.in b/datastore/MANIFEST.in index 9f7100c9528a..fc77f8c82ff0 100644 --- a/datastore/MANIFEST.in +++ b/datastore/MANIFEST.in @@ -1,4 +1,4 @@ include README.rst LICENSE recursive-include google *.json *.proto -recursive-include unit_tests * +recursive-include tests * global-exclude *.pyc __pycache__ diff --git a/datastore/README.rst b/datastore/README.rst index d913abc7821f..89ba561baed3 100644 --- a/datastore/README.rst +++ b/datastore/README.rst @@ -9,7 +9,7 @@ Python Client for Google Cloud Datastore - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/datastore-client.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/datastore/client.html Quick Start ----------- @@ -26,7 +26,7 @@ possible. Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html .. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication Using the API @@ -44,7 +44,7 @@ queries, and eventual consistency for all other queries. See the ``google-cloud-python`` API `datastore documentation`_ to learn how to interact with the Cloud Datastore using this Client Library. -.. _datastore documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/datastore-client.html +.. _datastore documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/datastore/client.html See the `official Google Cloud Datastore documentation`_ for more details on how to activate Cloud Datastore for your project. @@ -67,6 +67,6 @@ how to activate Cloud Datastore for your project. print(result) .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-datastore.svg - :target: https://pypi.python.org/pypi/google-cloud-datastore + :target: https://pypi.org/project/google-cloud-datastore/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-datastore.svg - :target: https://pypi.python.org/pypi/google-cloud-datastore + :target: https://pypi.org/project/google-cloud-datastore/ diff --git a/datastore/google/cloud/datastore/key.py b/datastore/google/cloud/datastore/key.py index 166a5afde46b..f1733f8f5d8e 100644 --- a/datastore/google/cloud/datastore/key.py +++ b/datastore/google/cloud/datastore/key.py @@ -304,7 +304,8 @@ def to_legacy_urlsafe(self): This is intended to work with the "legacy" representation of a datastore "Key" used within Google App Engine (a so-called "Reference"). The returned string can be used as the ``urlsafe`` - argument to ``ndb.Key(urlsafe=...)``. + argument to ``ndb.Key(urlsafe=...)``. The base64 encoded values + will have padding removed. :rtype: bytes :returns: A bytestring containing the key encoded as URL-safe base64. @@ -315,7 +316,7 @@ def to_legacy_urlsafe(self): name_space=self.namespace, ) raw_bytes = reference.SerializeToString() - return base64.urlsafe_b64encode(raw_bytes) + return base64.urlsafe_b64encode(raw_bytes).strip(b'=') @classmethod def from_legacy_urlsafe(cls, urlsafe): @@ -334,6 +335,8 @@ def from_legacy_urlsafe(cls, urlsafe): :returns: The key corresponding to ``urlsafe``. """ urlsafe = _to_bytes(urlsafe, encoding='ascii') + padding = b'=' * (-len(urlsafe) % 4) + urlsafe += padding raw_bytes = base64.urlsafe_b64decode(urlsafe) reference = _app_engine_key_pb2.Reference() diff --git a/datastore/nox.py b/datastore/nox.py index 7894b1ae0b23..f93b02944631 100644 --- a/datastore/nox.py +++ b/datastore/nox.py @@ -30,15 +30,25 @@ def unit_tests(session, python_version): # Run unit tests against all supported versions of Python. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'unit-' + python_version + # Install all test dependencies, then install this package in-place. session.install('mock', 'pytest', 'pytest-cov', *LOCAL_DEPS) session.install('-e', '.') # Run py.test against the unit tests. - session.run('py.test', '--quiet', - '--cov=google.cloud.datastore', '--cov=tests.unit', '--cov-append', - '--cov-config=.coveragerc', '--cov-report=', '--cov-fail-under=97', - 'tests/unit', + session.run( + 'py.test', + '--quiet', + '--cov=google.cloud.datastore', + '--cov=tests.unit', + '--cov-append', + '--cov-config=.coveragerc', + '--cov-report=', + '--cov-fail-under=97', + os.path.join('tests', 'unit'), + *session.posargs ) @@ -49,11 +59,14 @@ def system_tests(session, python_version): # Sanity check: Only run system tests if the environment variable is set. if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): - return + session.skip('Credentials must be set via environment variable.') # Run the system tests against latest Python 2 and Python 3 only. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'sys-' + python_version + # Install all test dependencies, then install this package into the # virutalenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) @@ -70,7 +83,7 @@ def doctests(session): # Sanity check: Only run system tests if the environment variable is set. if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): - return + session.skip('Credentials must be set via environment variable.') # Doctests run against Python 3.6 only. # It is difficult to make doctests run against both Python 2 and Python 3 @@ -90,21 +103,32 @@ def doctests(session): @nox.session def lint(session): - """Run flake8. + """Run linters. - Returns a failure if flake8 finds linting errors or sufficiently + Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ session.interpreter = 'python3.6' - session.install('flake8', *LOCAL_DEPS) + session.install('flake8', 'pylint', 'gcp-devrel-py-tools', *LOCAL_DEPS) session.install('.') session.run('flake8', 'google/cloud/datastore') + session.run( + 'gcp-devrel-py-tools', 'run-pylint', + '--config', 'pylint.config.py', + '--library-filesets', 'google', + '--test-filesets', 'tests', + # Temporarily allow this to fail. + success_codes=range(0, 100)) @nox.session def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.interpreter = 'python3.6' + + # Set the virtualenv dirname. + session.virtualenv_dirname = 'setup' + session.install('docutils', 'Pygments') session.run( 'python', 'setup.py', 'check', '--restructuredtext', '--strict') diff --git a/datastore/pylint.config.py b/datastore/pylint.config.py new file mode 100644 index 000000000000..b618319b8b61 --- /dev/null +++ b/datastore/pylint.config.py @@ -0,0 +1,25 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module is used to configure gcp-devrel-py-tools run-pylint.""" + +# Library configuration + +# library_additions = {} +# library_replacements = {} + +# Test configuration + +# test_additions = copy.deepcopy(library_additions) +# test_replacements = copy.deepcopy(library_replacements) diff --git a/datastore/setup.py b/datastore/setup.py index cc82802315ae..692dd109a481 100644 --- a/datastore/setup.py +++ b/datastore/setup.py @@ -27,7 +27,7 @@ # consolidate. SETUP_BASE = { 'author': 'Google Cloud Platform', - 'author_email': 'jjg+google-cloud-python@google.com', + 'author_email': 'googleapis-publisher@google.com', 'scripts': [], 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', 'license': 'Apache 2.0', @@ -51,14 +51,14 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.24.0, < 0.25dev', + 'google-cloud-core >= 0.25.0, < 0.26dev', 'google-gax>=0.15.7, <0.16dev', 'gapic-google-cloud-datastore-v1 >= 0.15.0, < 0.16dev', ] setup( name='google-cloud-datastore', - version='1.0.0', + version='1.1.0', description='Python Client for Google Cloud Datastore', long_description=README, namespace_packages=[ diff --git a/datastore/tests/unit/test_client.py b/datastore/tests/unit/test_client.py index 9824e06b73ad..a03bbe8b710f 100644 --- a/datastore/tests/unit/test_client.py +++ b/datastore/tests/unit/test_client.py @@ -148,22 +148,16 @@ def test_constructor_w_implicit_inputs(self): other = 'other' creds = _make_credentials() - default_called = [] - - def fallback_mock(project): - default_called.append(project) - return project or other klass = self._get_target_class() patch1 = mock.patch( 'google.cloud.datastore.client._determine_default_project', - new=fallback_mock) + return_value=other) patch2 = mock.patch( - 'google.cloud.client.get_credentials', - return_value=creds) + 'google.auth.default', return_value=(creds, None)) - with patch1: - with patch2: + with patch1 as _determine_default_project: + with patch2 as default: client = klass() self.assertEqual(client.project, other) @@ -174,7 +168,9 @@ def fallback_mock(project): self.assertIsNone(client.current_batch) self.assertIsNone(client.current_transaction) - self.assertEqual(default_called, [None]) + + default.assert_called_once_with() + _determine_default_project.assert_called_once_with(None) def test_constructor_w_explicit_inputs(self): from google.cloud.datastore.client import _DATASTORE_BASE_URL diff --git a/datastore/tests/unit/test_key.py b/datastore/tests/unit/test_key.py index 5b89e146254d..4fb7b89911b2 100644 --- a/datastore/tests/unit/test_key.py +++ b/datastore/tests/unit/test_key.py @@ -26,12 +26,15 @@ class TestKey(unittest.TestCase): # 'Parent', 59, 'Child', 'Feather', # namespace='space', app='s~sample-app') # urlsafe = key.urlsafe() - _URLSAFE_EXAMPLE = ( + _URLSAFE_EXAMPLE1 = ( b'agxzfnNhbXBsZS1hcHByHgsSBlBhcmVudBg7DAsSBUNoaWxkIgdGZ' b'WF0aGVyDKIBBXNwYWNl') - _URLSAFE_APP = 's~sample-app' - _URLSAFE_NAMESPACE = 'space' - _URLSAFE_FLAT_PATH = ('Parent', 59, 'Child', 'Feather') + _URLSAFE_APP1 = 's~sample-app' + _URLSAFE_NAMESPACE1 = 'space' + _URLSAFE_FLAT_PATH1 = ('Parent', 59, 'Child', 'Feather') + _URLSAFE_EXAMPLE2 = b'agZzfmZpcmVyDwsSBEtpbmQiBVRoaW5nDA' + _URLSAFE_APP2 = 's~fire' + _URLSAFE_FLAT_PATH2 = ('Kind', 'Thing') @staticmethod def _get_target_class(): @@ -388,25 +391,45 @@ def test_to_protobuf_w_no_kind(self): def test_to_legacy_urlsafe(self): key = self._make_one( - *self._URLSAFE_FLAT_PATH, - project=self._URLSAFE_APP, - namespace=self._URLSAFE_NAMESPACE) + *self._URLSAFE_FLAT_PATH1, + project=self._URLSAFE_APP1, + namespace=self._URLSAFE_NAMESPACE1) # NOTE: ``key.project`` is somewhat "invalid" but that is OK. urlsafe = key.to_legacy_urlsafe() - self.assertEqual(urlsafe, self._URLSAFE_EXAMPLE) + self.assertEqual(urlsafe, self._URLSAFE_EXAMPLE1) + + def test_to_legacy_urlsafe_strip_padding(self): + key = self._make_one( + *self._URLSAFE_FLAT_PATH2, + project=self._URLSAFE_APP2) + # NOTE: ``key.project`` is somewhat "invalid" but that is OK. + urlsafe = key.to_legacy_urlsafe() + self.assertEqual(urlsafe, self._URLSAFE_EXAMPLE2) + # Make sure it started with base64 padding. + self.assertNotEqual(len(self._URLSAFE_EXAMPLE2) % 4, 0) def test_from_legacy_urlsafe(self): klass = self._get_target_class() - key = klass.from_legacy_urlsafe(self._URLSAFE_EXAMPLE) + key = klass.from_legacy_urlsafe(self._URLSAFE_EXAMPLE1) - self.assertEqual('s~' + key.project, self._URLSAFE_APP) - self.assertEqual(key.namespace, self._URLSAFE_NAMESPACE) - self.assertEqual(key.flat_path, self._URLSAFE_FLAT_PATH) + self.assertEqual('s~' + key.project, self._URLSAFE_APP1) + self.assertEqual(key.namespace, self._URLSAFE_NAMESPACE1) + self.assertEqual(key.flat_path, self._URLSAFE_FLAT_PATH1) # Also make sure we didn't accidentally set the parent. self.assertIsNone(key._parent) self.assertIsNotNone(key.parent) self.assertIs(key._parent, key.parent) + def test_from_legacy_urlsafe_needs_padding(self): + klass = self._get_target_class() + # Make sure it will have base64 padding added. + self.assertNotEqual(len(self._URLSAFE_EXAMPLE2) % 4, 0) + key = klass.from_legacy_urlsafe(self._URLSAFE_EXAMPLE2) + + self.assertEqual('s~' + key.project, self._URLSAFE_APP2) + self.assertIsNone(key.namespace) + self.assertEqual(key.flat_path, self._URLSAFE_FLAT_PATH2) + def test_is_partial_no_name_or_id(self): key = self._make_one('KIND', project=self._DEFAULT_PROJECT) self.assertTrue(key.is_partial) diff --git a/datastore/tests/unit/test_query.py b/datastore/tests/unit/test_query.py index b361ec25a42f..26c1b6cc0831 100644 --- a/datastore/tests/unit/test_query.py +++ b/datastore/tests/unit/test_query.py @@ -550,21 +550,14 @@ def _call_fut(self, iterator, entity_pb): return _item_to_entity(iterator, entity_pb) def test_it(self): - from google.cloud._testing import _Monkey - from google.cloud.datastore import helpers - - result = object() - entities = [] - - def mocked(entity_pb): - entities.append(entity_pb) - return result - - entity_pb = object() - with _Monkey(helpers, entity_from_protobuf=mocked): - self.assertIs(result, self._call_fut(None, entity_pb)) - - self.assertEqual(entities, [entity_pb]) + entity_pb = mock.sentinel.entity_pb + patch = mock.patch( + 'google.cloud.datastore.helpers.entity_from_protobuf') + with patch as entity_from_protobuf: + result = self._call_fut(None, entity_pb) + self.assertIs(result, entity_from_protobuf.return_value) + + entity_from_protobuf.assert_called_once_with(entity_pb) class Test__pb_from_query(unittest.TestCase): diff --git a/dns/MANIFEST.in b/dns/MANIFEST.in index 9f7100c9528a..fc77f8c82ff0 100644 --- a/dns/MANIFEST.in +++ b/dns/MANIFEST.in @@ -1,4 +1,4 @@ include README.rst LICENSE recursive-include google *.json *.proto -recursive-include unit_tests * +recursive-include tests * global-exclude *.pyc __pycache__ diff --git a/dns/README.rst b/dns/README.rst index 2e290780b3ed..e5882fd0adc2 100644 --- a/dns/README.rst +++ b/dns/README.rst @@ -9,7 +9,7 @@ Python Client for Google Cloud DNS - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/dns-usage.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/dns/usage.html Quick Start ----------- @@ -26,7 +26,7 @@ possible. Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html .. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication Using the API @@ -42,6 +42,6 @@ See the ``google-cloud-python`` API DNS `Documentation`_ to learn how to manage DNS records using this Client Library. .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-dns.svg - :target: https://pypi.python.org/pypi/google-cloud-dns + :target: https://pypi.org/project/google-cloud-dns/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-dns.svg - :target: https://pypi.python.org/pypi/google-cloud-dns + :target: https://pypi.org/project/google-cloud-dns/ diff --git a/dns/nox.py b/dns/nox.py index 0fc4850053ec..0fd18ca931e9 100644 --- a/dns/nox.py +++ b/dns/nox.py @@ -30,6 +30,9 @@ def unit_tests(session, python_version): # Run unit tests against all supported versions of Python. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'unit-' + python_version + # Install all test dependencies, then install this package in-place. session.install('mock', 'pytest', 'pytest-cov', *LOCAL_DEPS) session.install('-e', '.') @@ -44,21 +47,32 @@ def unit_tests(session, python_version): @nox.session def lint(session): - """Run flake8. + """Run linters. - Returns a failure if flake8 finds linting errors or sufficiently + Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ session.interpreter = 'python3.6' - session.install('flake8', *LOCAL_DEPS) + session.install('flake8', 'pylint', 'gcp-devrel-py-tools', *LOCAL_DEPS) session.install('.') - session.run('flake8', 'google/cloud/dns') + session.run('flake8', 'google/cloud/datastore') + session.run( + 'gcp-devrel-py-tools', 'run-pylint', + '--config', 'pylint.config.py', + '--library-filesets', 'google', + '--test-filesets', 'tests', + # Temporarily allow this to fail. + success_codes=range(0, 100)) @nox.session def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.interpreter = 'python3.6' + + # Set the virtualenv dirname. + session.virtualenv_dirname = 'setup' + session.install('docutils', 'Pygments') session.run( 'python', 'setup.py', 'check', '--restructuredtext', '--strict') diff --git a/dns/pylint.config.py b/dns/pylint.config.py new file mode 100644 index 000000000000..b618319b8b61 --- /dev/null +++ b/dns/pylint.config.py @@ -0,0 +1,25 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module is used to configure gcp-devrel-py-tools run-pylint.""" + +# Library configuration + +# library_additions = {} +# library_replacements = {} + +# Test configuration + +# test_additions = copy.deepcopy(library_additions) +# test_replacements = copy.deepcopy(library_replacements) diff --git a/dns/setup.py b/dns/setup.py index 72c14abc2db3..62af1fe9ddc5 100644 --- a/dns/setup.py +++ b/dns/setup.py @@ -27,7 +27,7 @@ # consolidate. SETUP_BASE = { 'author': 'Google Cloud Platform', - 'author_email': 'jjg+google-cloud-python@google.com', + 'author_email': 'googleapis-publisher@google.com', 'scripts': [], 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', 'license': 'Apache 2.0', @@ -51,12 +51,12 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.24.0, < 0.25dev', + 'google-cloud-core >= 0.25.0, < 0.26dev', ] setup( name='google-cloud-dns', - version='0.24.0', + version='0.25.0', description='Python Client for Google Cloud DNS', long_description=README, namespace_packages=[ diff --git a/docs/_static/images/gcp-logo-32x32.png b/docs/_static/images/gcp-logo-32x32.png deleted file mode 100644 index 1dcafb042990..000000000000 Binary files a/docs/_static/images/gcp-logo-32x32.png and /dev/null differ diff --git a/docs/_static/images/gcp-logo.png b/docs/_static/images/gcp-logo.png deleted file mode 100644 index eb7f9d405fdc..000000000000 Binary files a/docs/_static/images/gcp-logo.png and /dev/null differ diff --git a/docs/bigquery/usage.rst b/docs/bigquery/usage.rst index aaa63e91b679..77252e210ccd 100644 --- a/docs/bigquery/usage.rst +++ b/docs/bigquery/usage.rst @@ -307,7 +307,7 @@ Retrieve the results: .. code-block:: python >>> results = job.results() - >>> rows, total_count, token = query.fetch_data() # API requet + >>> rows, total_count, token = query.fetch_data() # API request >>> while True: ... do_something_with(rows) ... if token is None: diff --git a/docs/bigtable/usage.rst b/docs/bigtable/usage.rst index 421b2426f8cf..f5df56fdee74 100644 --- a/docs/bigtable/usage.rst +++ b/docs/bigtable/usage.rst @@ -40,4 +40,4 @@ In the hierarchy of API concepts .. _Google Cloud Bigtable: https://cloud.google.com/bigtable/docs/ .. _gRPC: http://www.grpc.io/ -.. _grpcio: https://pypi.python.org/pypi/grpcio +.. _grpcio: https://pypi.org/project/grpcio/ diff --git a/docs/core/modules.rst b/docs/core/modules.rst index 195a79c5abb2..a1cdbc456de5 100644 --- a/docs/core/modules.rst +++ b/docs/core/modules.rst @@ -9,13 +9,6 @@ Base Client :show-inheritance: :inherited-members: -Credentials Helpers -~~~~~~~~~~~~~~~~~~~ - -.. automodule:: google.cloud.credentials - :members: - :show-inheritance: - Exceptions ~~~~~~~~~~ diff --git a/docs/index.rst b/docs/index.rst index 3402e3e629fe..ee47a2ac378f 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -12,7 +12,7 @@ resource-manager/api runtimeconfig/usage spanner/usage - speech/usage + speech/index error-reporting/usage monitoring/usage logging/usage @@ -37,7 +37,7 @@ Cloud Datastore `Google Cloud Datastore`_ is a fully managed, schemaless database for storing non-relational data. -.. _Google Cloud Datastore: https://developers.google.com/datastore/ +.. _Google Cloud Datastore: https://cloud.google.com/datastore/ .. code-block:: python @@ -56,7 +56,7 @@ Cloud Storage `Google Cloud Storage`_ allows you to store data on Google infrastructure. -.. _Google Cloud Storage: https://developers.google.com/storage/ +.. _Google Cloud Storage: https://cloud.google.com/storage/ .. code-block:: python @@ -73,4 +73,4 @@ Resources * `GitHub `__ * `Issues `__ * `Stack Overflow `__ -* `PyPI `__ +* `PyPI `__ diff --git a/docs/json/json/home.html b/docs/json/json/home.html deleted file mode 100644 index 762625468340..000000000000 --- a/docs/json/json/home.html +++ /dev/null @@ -1,127 +0,0 @@ -
-
-
-

google-cloud-python

-

Google Cloud Client Library for Python - - an idiomatic, intuitive, and natural way for Python developers to - integrate with Google Cloud Platform services, like Cloud Datastore - and Cloud Storage.

-
- -
-
$ pip install --upgrade google-cloud
-

- Latest Release {{home.latestRelease.name}} - {{home.latestRelease.date|date}} -

-
-
-
- -
- -
- -
-
-
-

What is it?

- -

google-cloud-python is a client library for accessing Google - Cloud Platform services that significantly reduces the boilerplate - code you have to write. The library provides high-level API - abstractions so they're easier to understand. It embraces - idioms of Python, works well with the standard library, and - integrates better with your codebase. - All this means you spend more time creating code that matters - to you.

- -

google-cloud-python is configured to access Google Cloud Platform - services and authorize (OAuth 2.0) automatically on your behalf. - With a one-line install and a private key, you are up and ready - to go. Better yet, if you are running on a Google Compute Engine - instance, the one-line install is enough!

- -
- -
-

Retrieve Datastore Entities

-
from google.cloud import datastore
-
-client = datastore.Client()
-product_key = client.key('Product', 123)
-print(client.get(product_key))
-
-
-
- -
-
-

Examples

- - -
-
- -
-
-

FAQ

- -

What is the relationship between the google-cloud-python package - and the google-cloud command-line tool?

-

Both the google-cloud command-line tool and - google-cloud-python package are a part of the Google Cloud SDK: a collection - of tools and libraries that enable you to easily create and manage - resources on the Google Cloud Platform. The google-cloud command-line - tool can be used to manage both your development workflow and your - Google Cloud Platform resources while the google-cloud-python package is the - Google Cloud Client Library for Python.

- -

What is the relationship between google-cloud-python - and the Google APIs Python Client?

-

The - Google APIs Python Client is a client library for - using the broad set of Google APIs. - google-cloud-python is built specifically for the Google Cloud Platform - and is the recommended way to integrate Google Cloud APIs into your - Python applications. If your application requires both Google Cloud Platform and - other Google APIs, the 2 libraries may be used by your application.

-
-
diff --git a/docs/json/json/master/index.json b/docs/json/json/master/index.json deleted file mode 100644 index 1526595856f1..000000000000 --- a/docs/json/json/master/index.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "description": "

google-cloud-python

Google Cloud API access in idiomatic Python.

", - "examples": [ - "import google.cloud" - ], - "id": "google.cloud.__init__", - "methods": [], - "name": "__Init__", - "source": "/google/cloud/__init__.py" -} diff --git a/docs/json/json/master/overview.html b/docs/json/json/master/overview.html deleted file mode 100644 index 08d4b32fabe7..000000000000 --- a/docs/json/json/master/overview.html +++ /dev/null @@ -1,46 +0,0 @@ -

Getting started

- -

-The google-cloud library is pip install-able: -

- -
- $ pip install google-cloud -
- -
- -

Cloud Datastore

- -

-Google Cloud Datastore is a fully managed, - schemaless database for storing non-relational data. -

- -
- - from google.cloud import datastore - - client = datastore.Client() - key = client.key('Person') - - entity = datastore.Entity(key=key) - entity['name'] = 'Your name' - entity['age'] = 25 - client.put(entity) -
- -

Cloud Storage

- -

-Google Cloud Storage allows you to store data on Google infrastructure. -

- -
- from google.cloud import storage - - client = storage.Client() - bucket = client.get_bucket('') - blob = bucket.blob('my-test-file.txt') - blob.upload_from_string('this is test content!') -
diff --git a/docs/json/json/master/toc.json b/docs/json/json/master/toc.json deleted file mode 100644 index 2292459a133d..000000000000 --- a/docs/json/json/master/toc.json +++ /dev/null @@ -1,451 +0,0 @@ -{ - "guides": [ - { - "contents": [ - "https://raw.githubusercontent.com/GoogleCloudPlatform/gcloud-common/master/authentication/readme.md", - "authentication.md" - ], - "edit": "https://github.com/GoogleCloudPlatform/gcloud-common/edit/master/authentication/readme.md", - "id": "authentication", - "title": "Authentication" - }, - { - "contents": "https://raw.githubusercontent.com/GoogleCloudPlatform/gcloud-common/master/faq/readme.md", - "edit": "https://github.com/GoogleCloudPlatform/gcloud-common/edit/master/faq/readme.md", - "id": "faq", - "title": "FAQ" - }, - { - "contents": "https://raw.githubusercontent.com/GoogleCloudPlatform/gcloud-common/master/troubleshooting/readme.md", - "edit": "https://github.com/GoogleCloudPlatform/gcloud-common/edit/master/troubleshooting/readme.md", - "id": "troubleshooting", - "title": "Troubleshooting" - }, - { - "contents": "https://raw.githubusercontent.com/GoogleCloudPlatform/gcloud-common/master/contributing/readme.md", - "edit": "https://github.com/GoogleCloudPlatform/gcloud-common/edit/master/contributing/readme.md", - "id": "contributing", - "title": "Contributing" - } - ], - "overview": "overview.html", - "services": [ - { - "nav": [ - { - "title": "Client", - "type": "google/cloud/client/client" - }, - { - "title": "Connection", - "type": "google/cloud/connection/connection" - }, - { - "title": "Credentials", - "type": "google/cloud/credentials" - }, - { - "title": "Environment Variables", - "type": "google/cloud/environment_vars" - }, - { - "title": "Iterator", - "type": "google/cloud/iterator" - } - ], - "title": "gcloud", - "type": "google/cloud/gcloud" - }, - { - "implemented": ">=0.7.0", - "nav": [ - { - "title": "Client", - "type": "google/cloud/bigquery/client/client" - }, - { - "title": "Dataset", - "type": "google/cloud/bigquery/dataset/dataset" - }, - { - "title": "Jobs", - "type": "google/cloud/bigquery/job" - }, - { - "title": "Table", - "type": "google/cloud/bigquery/table/table" - }, - { - "title": "Query", - "type": "google/cloud/bigquery/query/query" - } - ], - "title": "BigQuery", - "type": "google/cloud/bigquery" - }, - { - "implemented": ">=0.12.0", - "nav": [ - { - "title": "Client", - "type": "google/cloud/bigtable/client/client" - }, - { - "title": "Cluster", - "type": "google/cloud/bigtable/cluster/cluster" - }, - { - "title": "Column Families", - "type": "google/cloud/bigtable/column_family/columnfamily" - }, - { - "title": "Bigtable Row", - "type": "google/cloud/bigtable/row/row" - }, - { - "title": "Bigtable Row Filter", - "type": "google/cloud/bigtable/row_filters/rowfilter" - }, - { - "title": "Table", - "type": "google/cloud/bigtable/table/table" - } - ], - "title": "BigTable", - "type": "google/cloud/bigtable" - }, - { - "implemented": ">=0.3.0", - "nav": [ - { - "title": "Client", - "type": "google/cloud/datastore/client/client" - }, - { - "title": "Entity", - "type": "google/cloud/datastore/entity/entity" - }, - { - "title": "Key", - "type": "google/cloud/datastore/key/key" - }, - { - "title": "Query", - "type": "google/cloud/datastore/query/query" - }, - { - "title": "Transaction", - "type": "google/cloud/datastore/transaction/transaction" - }, - { - "title": "Batch", - "type": "google/cloud/datastore/batch/batch" - }, - { - "title": "Helpers", - "type": "datastore/helpers" - } - ], - "title": "Datastore", - "type": "google/cloud/datastore" - }, - { - "implemented": ">=0.8.0", - "nav": [ - { - "title": "Client", - "type": "google/cloud/dns/client/client" - }, - { - "title": "Managed Zones", - "type": "google/cloud/dns/zone/managedzone" - }, - { - "title": "Resource Record Set", - "type": "google/cloud/dns/resource_record_set/resourcerecordset" - }, - { - "title": "Change Sets", - "type": "google/cloud/dns/changes/changes" - } - ], - "title": "DNS", - "type": "google/cloud/dns" - }, - { - "nav": [ - { - "title": "Client", - "type": "google/cloud/error_reporting/client/client" - } - ], - "title": "Error Reporting", - "type": "google/cloud/error_reporting" - }, - { - "nav": [ - { - "title": "BadRequest", - "type": "google/cloud/exceptions/badrequest" - }, - { - "title": "ClientError", - "type": "google/cloud/exceptions/clienterror" - }, - { - "title": "Conflict", - "type": "google/cloud/exceptions/conflict" - }, - { - "title": "Forbidden", - "type": "google/cloud/exceptions/forbidden" - }, - { - "title": "GoogleCloudError", - "type": "google/cloud/exceptions/gclouderror" - }, - { - "title": "InternalServerError", - "type": "google/cloud/exceptions/internalservererror" - }, - { - "title": "LengthRequired", - "type": "google/cloud/exceptions/lengthrequired" - }, - { - "title": "MethodNotAllowed", - "type": "google/cloud/exceptions/methodnotallowed" - }, - { - "title": "MethodNotImplemented", - "type": "google/cloud/exceptions/methodnotimplemented" - }, - { - "title": "MovedPermanently", - "type": "google/cloud/exceptions/movedpermanently" - }, - { - "title": "NotFound", - "type": "google/cloud/exceptions/notfound" - }, - { - "title": "NotModified", - "type": "google/cloud/exceptions/notmodified" - }, - { - "title": "PreconditionFailed", - "type": "google/cloud/exceptions/preconditionfailed" - }, - { - "title": "Redirection", - "type": "google/cloud/exceptions/redirection" - }, - { - "title": "RequestRangeNotSatisfiable", - "type": "google/cloud/exceptions/requestrangenotsatisfiable" - }, - { - "title": "ResumeIncomplete", - "type": "google/cloud/exceptions/resumeincomplete" - }, - { - "title": "ServerError", - "type": "google/cloud/exceptions/servererror" - }, - { - "title": "ServiceUnavailable", - "type": "google/cloud/exceptions/serviceunavailable" - }, - { - "title": "TemporaryRedirect", - "type": "google/cloud/exceptions/temporaryredirect" - }, - { - "title": "TooManyRequests", - "type": "google/cloud/exceptions/toomanyrequests" - }, - { - "title": "Unauthorized", - "type": "google/cloud/exceptions/unauthorized" - }, - { - "title": "Exceptions", - "type": "google/cloud/exceptions" - } - ], - "title": "Exceptions", - "type": "google/cloud/exceptions" - }, - { - "implemented": ">=0.12.0", - "nav": [ - { - "title": "Client", - "type": "google/cloud/logging/client/client" - }, - { - "title": "Logger", - "type": "google/cloud/logging/logger/logger" - }, - { - "title": "Entries", - "type": "google/cloud/logging/entries" - }, - { - "title": "Metric", - "type": "google/cloud/logging/metric/metric" - }, - { - "title": "Sink", - "type": "google/cloud/logging/sink/sink" - } - ], - "title": "Cloud Logging", - "type": "google/cloud/logging" - }, - { - "implemented": ">=0.12.0", - "nav": [ - { - "title": "Client", - "type": "google/cloud/monitoring/client/client" - }, - { - "title": "Metric Descriptors", - "type": "google/cloud/monitoring/metric/metricdescriptor" - }, - { - "title": "Label Descriptors", - "type": "google/cloud/monitoring/label/labeldescriptor" - }, - { - "title": "Monitored Resource Descriptors", - "type": "google/cloud/monitoring/resource/resourcedescriptor" - }, - { - "title": "Time Series Query", - "type": "google/cloud/monitoring/query/query" - }, - { - "title": "Time Series", - "type": "googe/cloud/monitoring/timeseries/timeseries" - } - ], - "title": "Cloud Monitoring", - "type": "google/cloud/monitoring" - }, - { - "implemented": ">=0.5.0", - "nav": [ - { - "title": "Client", - "type": "google/cloud/pubsub/client/client" - }, - { - "title": "Topic", - "type": "google/cloud/pubsub/topic/topic" - }, - { - "title": "Subscription", - "type": "google/cloud/pubsub/subscription/subscription" - }, - { - "title": "Message", - "type": "google/cloud/pubsub/message/message" - }, - { - "title": "Policy", - "type": "google/cloud/pubsub/iam/policy" - } - ], - "title": "Pub/Sub", - "type": "google/cloud/pubsub" - }, - { - "implemented": ">=0.8.0", - "nav": [ - { - "title": "Client", - "type": "google/cloud/resource_manager/client/client" - }, - { - "title": "Project", - "type": "google/cloud/resource_manager/project/project" - } - ], - "title": "Resource Manager", - "type": "google/cloud/resource_manager" - }, - { - "implemented": ">=0.3.0", - "nav": [ - { - "title": "Client", - "type": "google/cloud/storage/client/client" - }, - { - "title": "Blob", - "type": "google/cloud/storage/blob/blob" - }, - { - "title": "Bucket", - "type": "google/cloud/storage/bucket/bucket" - }, - { - "title": "ACL", - "type": "google/cloud/storage/acl/acl" - }, - { - "title": "Batch", - "type": "google/cloud/storage/batch/batch" - } - ], - "title": "Storage", - "type": "google/cloud/storage" - }, - { - "nav": [ - { - "title": "Buffered Stream", - "type": "google/cloud/streaming/buffered_stream/bufferedstream" - }, - { - "title": "Request", - "type": "google/cloud/streaming/http_wrapper/request" - }, - { - "title": "Response", - "type": "google/cloud/streaming/http_wrapper/response" - }, - { - "title": "Stream Slice", - "type": "google/cloud/streaming/stream_slice/streamslice" - }, - { - "title": "Download", - "type": "google/cloud/streaming/transfer/download" - }, - { - "title": "Upload", - "type": "google/cloud/streaming/transfer/upload" - }, - { - "title": "Util", - "type": "google/cloud/streaming/util" - } - ], - "title": "Streaming", - "type": "google/cloud/streaming" - }, - { - "nav": [ - { - "title": "Connection", - "type": "google/cloud/vision/connection/connection" - } - ], - "title": "Vision", - "type": "google/cloud/vision" - } - ] -} diff --git a/docs/json/manifest.json b/docs/json/manifest.json deleted file mode 100644 index fa41098a95a2..000000000000 --- a/docs/json/manifest.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "content": "json", - "friendlyLang": "Python", - "home": "home.html", - "lang": "python", - "latestRelease": { - "date": 1455306471454, - "link": "https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master", - "name": "master" - }, - "markdown": "python", - "package": { - "href": "https://pypi.python.org/pypi/google-cloud", - "title": "PyPI" - }, - "titleDelimiter": ".", - "versions": [ - "master" - ] -} diff --git a/docs/language/client.rst b/docs/language/client.rst deleted file mode 100644 index 310e7b5bf828..000000000000 --- a/docs/language/client.rst +++ /dev/null @@ -1,6 +0,0 @@ -Natural Language Client -======================= - -.. automodule:: google.cloud.language.client - :members: - :show-inheritance: diff --git a/docs/language/document.rst b/docs/language/document.rst deleted file mode 100644 index e879b11e590a..000000000000 --- a/docs/language/document.rst +++ /dev/null @@ -1,6 +0,0 @@ -Document -~~~~~~~~ - -.. automodule:: google.cloud.language.document - :members: - :show-inheritance: diff --git a/docs/language/gapic/v1/api.rst b/docs/language/gapic/v1/api.rst new file mode 100644 index 000000000000..2c5fd4fd76ea --- /dev/null +++ b/docs/language/gapic/v1/api.rst @@ -0,0 +1,6 @@ +Natural Language Client API +=========================== + +.. automodule:: google.cloud.language_v1 + :members: + :inherited-members: diff --git a/docs/language/gapic/v1/types.rst b/docs/language/gapic/v1/types.rst new file mode 100644 index 000000000000..90d27a4b96fb --- /dev/null +++ b/docs/language/gapic/v1/types.rst @@ -0,0 +1,5 @@ +Natural Language Client Types +============================= + +.. automodule:: google.cloud.language_v1.types + :members: diff --git a/docs/language/gapic/v1beta2/api.rst b/docs/language/gapic/v1beta2/api.rst new file mode 100644 index 000000000000..330d7e6e7a78 --- /dev/null +++ b/docs/language/gapic/v1beta2/api.rst @@ -0,0 +1,6 @@ +Natural Language Beta Client API +================================ + +.. automodule:: google.cloud.language_v1beta2 + :members: + :inherited-members: diff --git a/docs/language/gapic/v1beta2/types.rst b/docs/language/gapic/v1beta2/types.rst new file mode 100644 index 000000000000..d9a7eb171f00 --- /dev/null +++ b/docs/language/gapic/v1beta2/types.rst @@ -0,0 +1,5 @@ +Natural Language Beta Client Types +================================== + +.. automodule:: google.cloud.language_v1beta2.types + :members: diff --git a/docs/language/responses.rst b/docs/language/responses.rst deleted file mode 100644 index 5584cbcdcfab..000000000000 --- a/docs/language/responses.rst +++ /dev/null @@ -1,37 +0,0 @@ -Natural Language Response Classes -================================= - -Responses -~~~~~~~~~ - -.. automodule:: google.cloud.language.api_responses - :members: - :show-inheritance: - -Sentences -~~~~~~~~~ - -.. automodule:: google.cloud.language.sentence - :members: - :show-inheritance: - -Entity -~~~~~~ - -.. automodule:: google.cloud.language.entity - :members: - :show-inheritance: - -Sentiment -~~~~~~~~~ - -.. automodule:: google.cloud.language.sentiment - :members: - :show-inheritance: - -Syntax -~~~~~~ - -.. automodule:: google.cloud.language.syntax - :members: - :show-inheritance: diff --git a/docs/language/usage.rst b/docs/language/usage.rst index 2a8c9ddba589..31d4bb20b95c 100644 --- a/docs/language/usage.rst +++ b/docs/language/usage.rst @@ -1,14 +1,6 @@ Natural Language ================ -.. toctree:: - :maxdepth: 2 - :hidden: - - client - document - responses - The `Google Natural Language`_ API can be used to reveal the structure and meaning of text via powerful machine learning models. You can use it to extract information about @@ -21,40 +13,43 @@ with your document storage on Google Cloud Storage. .. _Google Natural Language: https://cloud.google.com/natural-language/docs/getting-started -Client ------- -:class:`~google.cloud.language.client.Client` objects provide a -means to configure your application. Each instance holds -an authenticated connection to the Natural Language service. +******************************** +Authentication and Configuration +******************************** -For an overview of authentication in ``google-cloud-python``, see -:doc:`/core/auth`. +- For an overview of authentication in ``google-cloud-python``, + see :doc:`/core/auth`. -Assuming your environment is set up as described in that document, -create an instance of :class:`~google.cloud.language.client.Client`. +- In addition to any authentication configuration, you should also set the + :envvar:`GOOGLE_CLOUD_PROJECT` environment variable for the project you'd + like to interact with. If the :envvar:`GOOGLE_CLOUD_PROJECT` environment + variable is not present, the project ID from JSON file credentials is used. - .. code-block:: python + If you are using Google App Engine or Google Compute Engine + this will be detected automatically. - >>> from google.cloud import language - >>> client = language.Client() +- After configuring your environment, create a + :class:`~google.cloud.language_v1.LanguageServiceClient`. -By default the ``language`` is ``'en-US'`` and the ``encoding`` is -UTF-8. To over-ride these values: +.. code-block:: python - .. code-block:: python + >>> from google.cloud import language + >>> client = language.LanguageServiceClient() - >>> document = client.document_from_text( - ... text_content, language='es', encoding=language.Encoding.UTF16) +or pass in ``credentials`` explicitly. +.. code-block:: python + + >>> from google.cloud import language + >>> client = language.LanguageServiceClient( + ... credentials=creds, + ... ) -The encoding can be one of -:attr:`Encoding.UTF8 `, -:attr:`Encoding.UTF16 `, or -:attr:`Encoding.UTF32 `. -Methods -------- +********* +Documents +********* The Google Natural Language API has three supported methods @@ -62,109 +57,90 @@ The Google Natural Language API has three supported methods - `analyzeSentiment`_ - `annotateText`_ -and each method uses a `Document`_ for representing text. To -create a :class:`~google.cloud.language.document.Document`, +and each method uses a :class:`~.language_v1.types.Document` for representing +text. .. code-block:: python - >>> text_content = ( - ... 'Google, headquartered in Mountain View, unveiled the ' - ... 'new Android phone at the Consumer Electronic Show. ' - ... 'Sundar Pichai said in his keynote that users love ' - ... 'their new Android phones.') - >>> document = client.document_from_text(text_content) + >>> document = language.types.Document( + ... content='Google, headquartered in Mountain View, unveiled the ' + ... 'new Android phone at the Consumer Electronic Show. ' + ... 'Sundar Pichai said in his keynote that users love ' + ... 'their new Android phones.', + ... language='en', + ... type='PLAIN_TEXT', + ... ) -By using :meth:`~google.cloud.language.client.Client.document_from_text`, -the document's type is plain text: - - .. code-block:: python - - >>> document.doc_type == language.Document.PLAIN_TEXT - True The document's language defaults to ``None``, which will cause the API to auto-detect the language. -In addition, the -:meth:`~google.cloud.language.client.Client.document_from_html`, -factory can be used to created an HTML document. In this -method and the from text method, the language can be -over-ridden: +In addition, you can construct an HTML document: .. code-block:: python - >>> html_content = """\ - ... - ... - ... El Tiempo de las Historias</time> - ... </head> - ... <body> - ... <p>La vaca saltó sobre la luna.</p> - ... </body> - ... </html> - ... """ - >>> document = client.document_from_html(html_content, - ... language='es') + >>> html_content = """\ + ... <html> + ... <head> + ... <title>El Tiempo de las Historias</time> + ... </head> + ... <body> + ... <p>La vaca saltó sobre la luna.</p> + ... </body> + ... </html> + ... """ + >>> document = language.types.Document( + ... content=html_content, + ... language='es', + ... type='HTML', + ... ) The ``language`` argument can be either ISO-639-1 or BCP-47 language -codes; at the time, only English, Spanish, and Japanese `are supported`_. -However, the ``analyzeSentiment`` method `only supports`_ English text. +codes. The API reference page contains the full list of `supported languages`_. -.. _are supported: https://cloud.google.com/natural-language/docs/ -.. _only supports: https://cloud.google.com/natural-language/docs/reference/rest/v1beta1/documents/analyzeSentiment#body.request_body.FIELDS.document +.. _supported languages: https://cloud.google.com/natural-language/docs/languages -The document type (``doc_type``) value can be one of -:attr:`Document.PLAIN_TEXT <google.cloud.language.document.Document.PLAIN_TEXT>` or -:attr:`Document.HTML <google.cloud.language.document.Document.HTML>`. In addition to supplying the text / HTML content, a document can refer -to content stored in `Google Cloud Storage`_. We can use the -:meth:`~google.cloud.language.client.Client.document_from_url` method: - - .. code-block:: python - - >>> gcs_url = 'gs://my-text-bucket/sentiment-me.txt' - >>> document = client.document_from_url( - ... gcs_url, doc_type=language.Document.HTML) - >>> document.gcs_url == gcs_url - True - >>> document.doc_type == language.Document.PLAIN_TEXT - True - -The document type can be specified with the ``doc_type`` argument: +to content stored in `Google Cloud Storage`_. .. code-block:: python - >>> document = client.document_from_url( - ... gcs_url, doc_type=language.Document.HTML) + >>> document = language.types.Document( + ... gcs_content_uri='gs://my-text-bucket/sentiment-me.txt', + ... type=language.enums.HTML, + ... ) .. _analyzeEntities: https://cloud.google.com/natural-language/docs/reference/rest/v1beta1/documents/analyzeEntities .. _analyzeSentiment: https://cloud.google.com/natural-language/docs/reference/rest/v1beta1/documents/analyzeSentiment .. _annotateText: https://cloud.google.com/natural-language/docs/reference/rest/v1beta1/documents/annotateText -.. _Document: https://cloud.google.com/natural-language/reference/rest/v1beta1/Document .. _Google Cloud Storage: https://cloud.google.com/storage/ +**************** Analyze Entities ----------------- +**************** -The :meth:`~google.cloud.language.document.Document.analyze_entities` method -finds named entities (i.e. proper names) in the text and returns them -as a :class:`list` of :class:`~google.cloud.language.entity.Entity` objects. -Each entity has a corresponding type, salience (prominence), associated -metadata and other properties. +The :meth:`~.language_v1.LanguageServiceClient.analyze_entities` +method finds named entities (i.e. proper names) in the text. This method +returns a :class:`~.language_v1.types.AnalyzeEntitiesResponse`. .. code-block:: python - >>> text_content = ("Michelangelo Caravaggio, Italian painter, is " - ... "known for 'The Calling of Saint Matthew'.") - >>> document = client.document_from_text(text_content) - >>> entity_response = document.analyze_entities() - >>> for entity in entity_response.entities: + >>> document = language.types.Document( + ... content='Michelangelo Caravaggio, Italian painter, is ' + ... 'known for "The Calling of Saint Matthew".', + ... type=language.enums.Type.PLAIN_TEXT, + ... ) + >>> response = client.analyze_entities( + ... document=document, + ... encoding_type='UTF32', + ... ) + >>> for entity in response.entities: ... print('=' * 20) - ... print(' name: %s' % (entity.name,)) - ... print(' type: %s' % (entity.entity_type,)) - ... print(' metadata: %s' % (entity.metadata,)) - ... print(' salience: %s' % (entity.salience,)) + ... print(' name: {0}'.format(entity.name)) + ... print(' type: {0}'.format(entity.entity_type)) + ... print(' metadata: {0}'.format(entity.metadata)) + ... print(' salience: {0}'.format(entity.salience)) ==================== name: Michelangelo Caravaggio type: PERSON @@ -181,90 +157,84 @@ metadata and other properties. metadata: {'wikipedia_url': 'http://en.wikipedia.org/wiki/Caravaggio'} salience: 0.038798928 +.. note:: + + It is recommended to send an ``encoding_type`` argument to Natural + Language methods, so they provide useful offsets for the data they return. + While the correct value varies by environment, in Python you *usually* + want ``UTF32``. + + +***************** Analyze Sentiment ------------------ +***************** -The :meth:`~google.cloud.language.document.Document.analyze_sentiment` method -analyzes the sentiment of the provided text and returns a -:class:`~google.cloud.language.sentiment.Sentiment`. Currently, this method -only supports English text. +The :meth:`~.language_v1.LanguageServiceClient.analyze_sentiment` method +analyzes the sentiment of the provided text. This method returns a +:class:`~.language_v1.types.AnalyzeSentimentResponse`. .. code-block:: python - >>> text_content = "Jogging isn't very fun." - >>> document = client.document_from_text(text_content) - >>> sentiment_response = document.analyze_sentiment() - >>> sentiment = sentiment_response.sentiment + >>> document = language.types.Document( + ... content='Jogging is not very fun.', + ... type='PLAIN_TEXT', + ... ) + >>> response = client.analyze_sentiment( + ... document=document, + ... encoding_type='UTF32', + ... ) + >>> sentiment = response.document_sentiment >>> print(sentiment.score) -1 >>> print(sentiment.magnitude) 0.8 +.. note:: + + It is recommended to send an ``encoding_type`` argument to Natural + Language methods, so they provide useful offsets for the data they return. + While the correct value varies by environment, in Python you *usually* + want ``UTF32``. + + +************* Annotate Text -------------- +************* -The :meth:`~google.cloud.language.document.Document.annotate_text` method +The :meth:`~.language_v1.LanguageServiceClient.annotate_text` method analyzes a document and is intended for users who are familiar with -machine learning and need in-depth text features to build upon. - -The method returns a named tuple with four entries: - -* ``sentences``: A :class:`list` of sentences in the text -* ``tokens``: A :class:`list` of :class:`~google.cloud.language.syntax.Token` - object (e.g. words, punctuation) -* ``sentiment``: The :class:`~google.cloud.language.sentiment.Sentiment` of - the text (as returned by - :meth:`~google.cloud.language.document.Document.analyze_sentiment`) -* ``entities``: :class:`list` of :class:`~google.cloud.language.entity.Entity` - objects extracted from the text (as returned by - :meth:`~google.cloud.language.document.Document.analyze_entities`) - -By default :meth:`~google.cloud.language.document.Document.annotate_text` has -three arguments ``include_syntax``, ``include_entities`` and -``include_sentiment`` which are all :data:`True`. However, each of these -`Features`_ can be selectively turned off by setting the corresponding -arguments to :data:`False`. - -When ``include_syntax=False``, ``sentences`` and ``tokens`` in the -response is :data:`None`. When ``include_sentiment=False``, ``sentiment`` in -the response is :data:`None`. When ``include_entities=False``, ``entities`` in -the response is :data:`None`. +machine learning and need in-depth text features to build upon. This method +returns a :class:`~.language_v1.types.AnnotateTextResponse`. - .. code-block:: python - >>> text_content = 'The cow jumped over the Moon.' - >>> document = client.document_from_text(text_content) - >>> annotations = document.annotate_text() - >>> # Sentences present if include_syntax=True - >>> print(annotations.sentences) - ['The cow jumped over the Moon.'] - >>> # Tokens present if include_syntax=True - >>> for token in annotations.tokens: - ... msg = '%11s: %s' % (token.part_of_speech, token.text_content) - ... print(msg) - DETERMINER: The - NOUN: cow - VERB: jumped - ADPOSITION: over - DETERMINER: the - NOUN: Moon - PUNCTUATION: . - >>> # Sentiment present if include_sentiment=True - >>> print(annotations.sentiment.score) - 1 - >>> print(annotations.sentiment.magnitude) - 0.1 - >>> # Entities present if include_entities=True - >>> for entity in annotations.entities: - ... print('=' * 20) - ... print(' name: %s' % (entity.name,)) - ... print(' type: %s' % (entity.entity_type,)) - ... print(' metadata: %s' % (entity.metadata,)) - ... print(' salience: %s' % (entity.salience,)) - ==================== - name: Moon - type: LOCATION - metadata: {'wikipedia_url': 'http://en.wikipedia.org/wiki/Natural_satellite'} - salience: 0.11793101 +************* +API Reference +************* + +This package includes clients for multiple versions of the Natural Language +API. By default, you will get ``v1``, the latest GA version. + +.. toctree:: + :maxdepth: 2 + + gapic/v1/api + gapic/v1/types + +If you are interested in beta features ahead of the latest GA, you may +opt-in to the v1.1 beta, which is spelled ``v1beta2``. In order to do this, +you will want to import from ``google.cloud.language_v1beta2`` in lieu of +``google.cloud.language``. + +An API and type reference is provided for the v1.1 beta also: + +.. toctree:: + :maxdepth: 2 + + gapic/v1beta2/api + gapic/v1beta2/types + +.. note:: -.. _Features: https://cloud.google.com/natural-language/docs/reference/rest/v1beta1/documents/annotateText#Features + The client for the beta API is provided on a provisional basis. The API + surface is subject to change, and it is possible that this client will be + deprecated or removed after its features become GA. diff --git a/docs/spanner/usage.rst b/docs/spanner/usage.rst index 0d9142041523..2d61fbaed9c7 100644 --- a/docs/spanner/usage.rst +++ b/docs/spanner/usage.rst @@ -40,5 +40,4 @@ In the hierarchy of API concepts .. _Cloud Spanner: https://cloud.google.com/spanner/docs/ .. _gRPC: http://www.grpc.io/ -.. _grpcio: https://pypi.python.org/pypi/grpcio - +.. _grpcio: https://pypi.org/project/grpcio/ diff --git a/docs/speech/alternative.rst b/docs/speech/alternative.rst deleted file mode 100644 index 7c287b8dfa44..000000000000 --- a/docs/speech/alternative.rst +++ /dev/null @@ -1,7 +0,0 @@ -Speech Alternative -================== - -.. automodule:: google.cloud.speech.alternative - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/speech/client.rst b/docs/speech/client.rst deleted file mode 100644 index 4e6caad90ff3..000000000000 --- a/docs/speech/client.rst +++ /dev/null @@ -1,7 +0,0 @@ -Speech Client -============= - -.. automodule:: google.cloud.speech.client - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/speech/encoding.rst b/docs/speech/encoding.rst deleted file mode 100644 index affe80a4ebd2..000000000000 --- a/docs/speech/encoding.rst +++ /dev/null @@ -1,7 +0,0 @@ -Speech Encoding -=============== - -.. automodule:: google.cloud.speech.encoding - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/speech/gapic/api.rst b/docs/speech/gapic/api.rst new file mode 100644 index 000000000000..ded919fcbdcd --- /dev/null +++ b/docs/speech/gapic/api.rst @@ -0,0 +1,6 @@ +Speech Client API +================= + +.. automodule:: google.cloud.speech_v1 + :members: + :inherited-members: diff --git a/docs/speech/gapic/types.rst b/docs/speech/gapic/types.rst new file mode 100644 index 000000000000..0ddf83d3bb60 --- /dev/null +++ b/docs/speech/gapic/types.rst @@ -0,0 +1,5 @@ +Speech Client Types +=================== + +.. automodule:: google.cloud.speech_v1.types + :members: diff --git a/docs/speech/usage.rst b/docs/speech/index.rst similarity index 50% rename from docs/speech/usage.rst rename to docs/speech/index.rst index a651965e9e18..9373e830cff3 100644 --- a/docs/speech/usage.rst +++ b/docs/speech/index.rst @@ -1,16 +1,6 @@ +###### Speech -====== - -.. toctree:: - :maxdepth: 2 - :hidden: - - client - encoding - operation - result - sample - alternative +###### The `Google Speech`_ API enables developers to convert audio to text. The API recognizes over 80 languages and variants, to support your global user @@ -18,10 +8,11 @@ base. .. _Google Speech: https://cloud.google.com/speech/docs/getting-started -Client ------- -:class:`~google.cloud.speech.client.Client` objects provide a +Authentication and Configuration +-------------------------------- + +:class:`~google.cloud.speech_v1.SpeechClient` objects provide a means to configure your application. Each instance holds an authenticated connection to the Cloud Speech Service. @@ -29,21 +20,22 @@ For an overview of authentication in ``google-cloud-python``, see :doc:`/core/auth`. Assuming your environment is set up as described in that document, -create an instance of :class:`~google.cloud.speech.client.Client`. +create an instance of :class:`~.speech_v1.SpeechClient`. .. code-block:: python >>> from google.cloud import speech - >>> client = speech.Client() + >>> client = speech.SpeechClient() Asynchronous Recognition ------------------------ -The :meth:`~google.cloud.speech.Client.long_running_recognize` sends audio -data to the Speech API and initiates a Long Running Operation. Using this -operation, you can periodically poll for recognition results. Use asynchronous -requests for audio data of any duration up to 80 minutes. +The :meth:`~.speech_v1.SpeechClient.long_running_recognize` method +sends audio data to the Speech API and initiates a Long Running Operation. + +Using this operation, you can periodically poll for recognition results. +Use asynchronous requests for audio data of any duration up to 80 minutes. See: `Speech Asynchronous Recognize`_ @@ -52,13 +44,16 @@ See: `Speech Asynchronous Recognize`_ >>> import time >>> from google.cloud import speech - >>> client = speech.Client() - >>> sample = client.sample(source_uri='gs://my-bucket/recording.flac', - ... encoding=speech.Encoding.LINEAR16, - ... sample_rate_hertz=44100) - >>> operation = sample.long_running_recognize( - ... language_code='en-US', - ... max_alternatives=2, + >>> client = speech.SpeechClient() + >>> operation = client.long_running_recognize( + ... audio=speech.types.RecognitionAudio( + ... uri='gs://my-bucket/recording.flac', + ... ), + ... config=speech.types.RecognitionConfig( + ... encoding='LINEAR16', + ... language_code='en-US', + ... sample_rate_hertz=44100, + ... ), ... ) >>> retry_count = 100 >>> while retry_count > 0 and not operation.complete: @@ -80,7 +75,7 @@ See: `Speech Asynchronous Recognize`_ Synchronous Recognition ----------------------- -The :meth:`~google.cloud.speech.Client.recognize` method converts speech +The :meth:`~.speech_v1.SpeechClient.recognize` method converts speech data to text and returns alternative text transcriptions. This example uses ``language_code='en-GB'`` to better recognize a dialect from @@ -89,12 +84,17 @@ Great Britain. .. code-block:: python >>> from google.cloud import speech - >>> client = speech.Client() - >>> sample = client.sample(source_uri='gs://my-bucket/recording.flac', - ... encoding=speech.Encoding.FLAC, - ... sample_rate_hertz=44100) - >>> results = sample.recognize( - ... language_code='en-GB', max_alternatives=2) + >>> client = speech.SpeechClient() + >>> results = client.recognize( + ... audio=speech.types.RecognitionAudio( + ... uri='gs://my-bucket/recording.flac', + ... ), + ... config=speech.types.RecognitionConfig( + ... encoding='LINEAR16', + ... language_code='en-US', + ... sample_rate_hertz=44100, + ... ), + ... ) >>> for result in results: ... for alternative in result.alternatives: ... print('=' * 20) @@ -112,14 +112,17 @@ Example of using the profanity filter. .. code-block:: python >>> from google.cloud import speech - >>> client = speech.Client() - >>> sample = client.sample(source_uri='gs://my-bucket/recording.flac', - ... encoding=speech.Encoding.FLAC, - ... sample_rate_hertz=44100) - >>> results = sample.recognize( - ... language_code='en-US', - ... max_alternatives=1, - ... profanity_filter=True, + >>> client = speech.SpeechClient() + >>> results = client.recognize( + ... audio=speech.types.RecognitionAudio( + ... uri='gs://my-bucket/recording.flac', + ... ), + ... config=speech.types.RecognitionConfig( + ... encoding='LINEAR16', + ... language_code='en-US', + ... profanity_filter=True, + ... sample_rate_hertz=44100, + ... ), ... ) >>> for result in results: ... for alternative in result.alternatives: @@ -137,15 +140,20 @@ words to the vocabulary of the recognizer. .. code-block:: python >>> from google.cloud import speech - >>> client = speech.Client() - >>> sample = client.sample(source_uri='gs://my-bucket/recording.flac', - ... encoding=speech.Encoding.FLAC, - ... sample_rate_hertz=44100) - >>> hints = ['hi', 'good afternoon'] - >>> results = sample.recognize( - ... language_code='en-US', - ... max_alternatives=2, - ... speech_contexts=hints, + >>> from google.cloud import speech + >>> client = speech.SpeechClient() + >>> results = client.recognize( + ... audio=speech.types.RecognitionAudio( + ... uri='gs://my-bucket/recording.flac', + ... ), + ... config=speech.types.RecognitionConfig( + ... encoding='LINEAR16', + ... language_code='en-US', + ... sample_rate_hertz=44100, + ... speech_contexts=[speech.types.SpeechContext( + ... phrases=['hi', 'good afternoon'], + ... )], + ... ), ... ) >>> for result in results: ... for alternative in result.alternatives: @@ -160,7 +168,7 @@ words to the vocabulary of the recognizer. Streaming Recognition --------------------- -The :meth:`~google.cloud.speech.Client.streaming_recognize` method converts +The :meth:`~speech_v1.SpeechClient.streaming_recognize` method converts speech data to possible text alternatives on the fly. .. note:: @@ -170,18 +178,27 @@ speech data to possible text alternatives on the fly. .. code-block:: python + >>> import io >>> from google.cloud import speech - >>> client = speech.Client() - >>> with open('./hello.wav', 'rb') as stream: - ... sample = client.sample(stream=stream, - ... encoding=speech.Encoding.LINEAR16, - ... sample_rate_hertz=16000) - ... results = sample.streaming_recognize(language_code='en-US') - ... for result in results: - ... for alternative in result.alternatives: - ... print('=' * 20) - ... print('transcript: ' + alternative.transcript) - ... print('confidence: ' + str(alternative.confidence)) + >>> client = speech.SpeechClient() + >>> config = speech.types.RecognitionConfig( + ... encoding='LINEAR16', + ... language_code='en-US', + ... sample_rate_hertz=44100, + ... ) + >>> with io.open('./hello.wav', 'rb') as stream: + ... requests = [speech.types.StreamingRecognizeRequest( + ... audio_content=stream.read(), + ... )] + >>> results = sample.streaming_recognize( + ... config=speech.types.StreamingRecognitionConfig(config=config), + ... requests, + ... ) + >>> for result in results: + ... for alternative in result.alternatives: + ... print('=' * 20) + ... print('transcript: ' + alternative.transcript) + ... print('confidence: ' + str(alternative.confidence)) ==================== transcript: hello thank you for using Google Cloud platform confidence: 0.927983105183 @@ -193,20 +210,36 @@ until the client closes the output stream or until the maximum time limit has been reached. If you only want to recognize a single utterance you can set - ``single_utterance`` to :data:`True` and only one result will be returned. +``single_utterance`` to :data:`True` and only one result will be returned. See: `Single Utterance`_ .. code-block:: python - >>> with open('./hello_pause_goodbye.wav', 'rb') as stream: - ... sample = client.sample(stream=stream, - ... encoding=speech.Encoding.LINEAR16, - ... sample_rate_hertz=16000) - ... results = sample.streaming_recognize( - ... language_code='en-US', - ... single_utterance=True, - ... ) + >>> import io + >>> from google.cloud import speech + >>> client = speech.SpeechClient() + >>> config = speech.types.RecognitionConfig( + ... encoding='LINEAR16', + ... language_code='en-US', + ... sample_rate_hertz=44100, + ... ) + >>> with io.open('./hello-pause-goodbye.wav', 'rb') as stream: + ... requests = [speech.types.StreamingRecognizeRequest( + ... audio_content=stream.read(), + ... )] + >>> results = sample.streaming_recognize( + ... config=speech.types.StreamingRecognitionConfig( + ... config=config, + ... single_utterance=False, + ... ), + ... requests, + ... ) + >>> for result in results: + ... for alternative in result.alternatives: + ... print('=' * 20) + ... print('transcript: ' + alternative.transcript) + ... print('confidence: ' + str(alternative.confidence)) ... for result in results: ... for alternative in result.alternatives: ... print('=' * 20) @@ -221,22 +254,31 @@ If ``interim_results`` is set to :data:`True`, interim results .. code-block:: python + >>> import io >>> from google.cloud import speech - >>> client = speech.Client() - >>> with open('./hello.wav', 'rb') as stream: - ... sample = client.sample(stream=stream, - ... encoding=speech.Encoding.LINEAR16, - ... sample_rate=16000) - ... results = sample.streaming_recognize( - ... interim_results=True, - ... language_code='en-US', - ... ) - ... for result in results: - ... for alternative in result.alternatives: - ... print('=' * 20) - ... print('transcript: ' + alternative.transcript) - ... print('confidence: ' + str(alternative.confidence)) - ... print('is_final:' + str(result.is_final)) + >>> client = speech.SpeechClient() + >>> config = speech.types.RecognitionConfig( + ... encoding='LINEAR16', + ... language_code='en-US', + ... sample_rate_hertz=44100, + ... ) + >>> with io.open('./hello.wav', 'rb') as stream: + ... requests = [speech.types.StreamingRecognizeRequest( + ... audio_content=stream.read(), + ... )] + >>> results = sample.streaming_recognize( + ... config=speech.types.StreamingRecognitionConfig( + ... config=config, + ... iterim_results=True, + ... ), + ... requests, + ... ) + >>> for result in results: + ... for alternative in result.alternatives: + ... print('=' * 20) + ... print('transcript: ' + alternative.transcript) + ... print('confidence: ' + str(alternative.confidence)) + ... print('is_final:' + str(result.is_final)) ==================== 'he' None @@ -254,3 +296,13 @@ If ``interim_results`` is set to :data:`True`, interim results .. _Single Utterance: https://cloud.google.com/speech/reference/rpc/google.cloud.speech.v1beta1#streamingrecognitionconfig .. _sync_recognize: https://cloud.google.com/speech/reference/rest/v1beta1/speech/syncrecognize .. _Speech Asynchronous Recognize: https://cloud.google.com/speech/reference/rest/v1beta1/speech/asyncrecognize + + +API Reference +------------- + +.. toctree:: + :maxdepth: 2 + + gapic/api + gapic/types diff --git a/docs/speech/operation.rst b/docs/speech/operation.rst deleted file mode 100644 index 5c0ec3b92b12..000000000000 --- a/docs/speech/operation.rst +++ /dev/null @@ -1,7 +0,0 @@ -Speech Operation -================ - -.. automodule:: google.cloud.speech.operation - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/speech/result.rst b/docs/speech/result.rst deleted file mode 100644 index d4759b704199..000000000000 --- a/docs/speech/result.rst +++ /dev/null @@ -1,7 +0,0 @@ -Speech Result -============= - -.. automodule:: google.cloud.speech.result - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/speech/sample.rst b/docs/speech/sample.rst deleted file mode 100644 index f0b4098ba4ca..000000000000 --- a/docs/speech/sample.rst +++ /dev/null @@ -1,7 +0,0 @@ -Speech Sample -============= - -.. automodule:: google.cloud.speech.sample - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/trace/apis.rst b/docs/trace/apis.rst new file mode 100644 index 000000000000..80a8d50c0c60 --- /dev/null +++ b/docs/trace/apis.rst @@ -0,0 +1,19 @@ +API Reference +============= + +APIs +---- + +.. autosummary:: + :toctree: + + google.cloud.gapic.trace.v1.trace_service_client + + +API types +~~~~~~~~~ + +.. autosummary:: + :toctree: + + google.cloud.gapic.trace.v1.enums diff --git a/docs/trace/conf.py b/docs/trace/conf.py new file mode 100644 index 000000000000..5eead079b01c --- /dev/null +++ b/docs/trace/conf.py @@ -0,0 +1,311 @@ +# -*- coding: utf-8 -*- +# +# google-cloud-trace documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath('..')) + +__version__ = '0.15.4' + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +#needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.autosummary', + 'sphinx.ext.intersphinx', + 'sphinx.ext.coverage', + 'sphinx.ext.napoleon', + 'sphinx.ext.viewcode', +] + +# autodoc/autosummary flags +autoclass_content = 'both' +autodoc_default_flags = ['members'] +autosummary_generate = True + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The encoding of source files. +#source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'google-cloud-trace' +copyright = u'2017, Google' +author = u'Google APIs' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = '.'.join(release.split('.')[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ['_build'] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +#keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = 'sphinx_rtd_theme' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +#html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +#html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# "<project> v<release> documentation". +#html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +#html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +# html_static_path = [] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +#html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +#html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +#html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +#html_domain_indices = True + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +#html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +#html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a <link> tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +#html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +#html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +#html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = 'google-cloud-trace-doc' + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + #'preamble': '', + + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'google-cloud-trace.tex', + u'google-cloud-trace Documentation', author, 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# If true, show page references after internal links. +#latex_show_pagerefs = False + +# If true, show URL addresses after external links. +#latex_show_urls = False + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_domain_indices = True + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [(master_doc, 'google-cloud-trace', + u'google-cloud-trace Documentation', [author], 1)] + +# If true, show URL addresses after external links. +#man_show_urls = False + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'google-cloud-trace', + u'google-cloud-trace Documentation', author, + 'google-cloud-trace', + 'GAPIC library for the {metadata.shortName} v1 service', 'APIs'), +] + +# Documents to append as an appendix to all manuals. +#texinfo_appendices = [] + +# If false, no module index is generated. +#texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +#texinfo_no_detailmenu = False + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + 'python': ('http://python.readthedocs.org/en/latest/', None), + 'gax': ('https://gax-python.readthedocs.org/en/latest/', None), +} + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/docs/trace/index.rst b/docs/trace/index.rst new file mode 100644 index 000000000000..08044709bcc6 --- /dev/null +++ b/docs/trace/index.rst @@ -0,0 +1,41 @@ +.. gapic-google-cloud-trace-v1 sphinx documentation master file + + +GAPIC library for the Stackdriver Trace API +============================================================================================================= + +This is the API documentation for ``gapic-google-cloud-trace-v1``. + +gapic-google-cloud-trace-v1 uses google-gax_ (Google API extensions) to provide an +easy-to-use client library for the `Stackdriver Trace API`_ (v1) defined in the googleapis_ git repository + + +.. _`google-gax`: https://github.com/googleapis/gax-python +.. _`googleapis`: https://github.com/googleapis/googleapis/tree/master/google/devtools/cloudtrace/v1 +.. _`Stackdriver Trace API`: https://developers.google.com/apis-explorer/?hl=en_US#p/cloudtrace/v1/ + + +APIs +---- + +.. autosummary:: + + google.cloud.gapic.trace.v1.trace_service_client + + +Contents +-------- + +.. toctree:: + + self + starting + apis + + +Indices and tables +------------------ + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/docs/trace/starting.rst b/docs/trace/starting.rst new file mode 100644 index 000000000000..245fcfd68a87 --- /dev/null +++ b/docs/trace/starting.rst @@ -0,0 +1,78 @@ +Getting started +=============== + +gapic-google-cloud-trace-v1 will allow you to connect to the `Stackdriver Trace API`_ and access all its methods. In order to achieve this, you need to set up authentication as well as install the library locally. + +.. _`Stackdriver Trace API`: https://developers.google.com/apis-explorer/?hl=en_US#p/cloudtrace/v1/ + + +Installation +------------ + + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +~~~~~~~~~~ + +.. code-block:: console + + pip install virtualenv + virtualenv <your-env> + source <your-env>/bin/activate + <your-env>/bin/pip install gapic-google-cloud-trace-v1 + +Windows +~~~~~~~ + +.. code-block:: console + + pip install virtualenv + virtualenv <your-env> + <your-env>\Scripts\activate + <your-env>\Scripts\pip.exe install gapic-google-cloud-trace-v1 + + +Using the API +------------- + + +Authentication +~~~~~~~~~~~~~~ + +To authenticate all your API calls, first install and setup the `Google Cloud SDK`_. +Once done, you can then run the following command in your terminal: + +.. code-block:: console + + $ gcloud beta auth application-default login + +or + +.. code-block:: console + + $ gcloud auth login + +Please see `gcloud beta auth application-default login`_ document for the difference between these commands. + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ +.. _gcloud beta auth application-default login: https://cloud.google.com/sdk/gcloud/reference/beta/auth/application-default/login +.. code-block:: console + +At this point you are all set to continue. + + +Examples +~~~~~~~~ + +To see example usage, please read through the :doc:`API reference </apis>`. The +documentation for each API method includes simple examples. diff --git a/docs/vision/gapic/api.rst b/docs/vision/gapic/v1/api.rst similarity index 100% rename from docs/vision/gapic/api.rst rename to docs/vision/gapic/v1/api.rst diff --git a/docs/vision/gapic/types.rst b/docs/vision/gapic/v1/types.rst similarity index 100% rename from docs/vision/gapic/types.rst rename to docs/vision/gapic/v1/types.rst diff --git a/docs/vision/index.rst b/docs/vision/index.rst index 49f90d502d46..c69240f792bd 100644 --- a/docs/vision/index.rst +++ b/docs/vision/index.rst @@ -33,19 +33,21 @@ Authentication and Configuration this will be detected automatically. - After configuring your environment, create a - :class:`~google.cloud.vision.client.Client`. + :class:`~google.cloud.vision_v1.ImageAnnotatorClient`. .. code-block:: python >>> from google.cloud import vision >>> client = vision.ImageAnnotatorClient() -or pass in ``credentials`` and ``project`` explicitly. +or pass in ``credentials`` explicitly. .. code-block:: python >>> from google.cloud import vision - >>> client = vision.Client(project='my-project', credentials=creds) + >>> client = vision.ImageAnnotatorClient( + ... credentials=creds, + ... ) ***************** @@ -91,7 +93,7 @@ for it using our direct methods: ... }) >>> len(response.annotations) 1 - >>> for face in resposne.annotations[0].faces: + >>> for face in response.annotations[0].faces: ... print(face.joy) Likelihood.VERY_LIKELY Likelihood.VERY_LIKELY @@ -103,7 +105,7 @@ No results found **************** If no results for the detection performed can be extracted from the image, then -an empty list is returned. This behavior is similiar with all detection types. +an empty list is returned. This behavior is similar with all detection types. Example with :meth:`~google.cloud.vision.ImageAnnotatorClient.logo_detection`: @@ -127,5 +129,5 @@ API Reference .. toctree:: :maxdepth: 2 - gapic/api - gapic/types + gapic/v1/api + gapic/v1/types diff --git a/error_reporting/MANIFEST.in b/error_reporting/MANIFEST.in index 9f7100c9528a..fc77f8c82ff0 100644 --- a/error_reporting/MANIFEST.in +++ b/error_reporting/MANIFEST.in @@ -1,4 +1,4 @@ include README.rst LICENSE recursive-include google *.json *.proto -recursive-include unit_tests * +recursive-include tests * global-exclude *.pyc __pycache__ diff --git a/error_reporting/README.rst b/error_reporting/README.rst index e968d42754bf..104856f348e6 100644 --- a/error_reporting/README.rst +++ b/error_reporting/README.rst @@ -9,7 +9,7 @@ Python Client for Stackdriver Error Reporting - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/error-reporting-usage.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/error-reporting/usage.html Quick Start ----------- @@ -26,7 +26,7 @@ possible. Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html .. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication Using the API @@ -47,6 +47,6 @@ See the ``google-cloud-python`` API Error Reporting `Documentation`_ to learn how to get started using this library. .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-error-reporting.svg - :target: https://pypi.python.org/pypi/google-cloud-error-reporting + :target: https://pypi.org/project/google-cloud-error-reporting/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-error-reporting.svg - :target: https://pypi.python.org/pypi/google-cloud-error-reporting + :target: https://pypi.org/project/google-cloud-error-reporting/ diff --git a/error_reporting/nox.py b/error_reporting/nox.py index 1deed376b6e7..d2e2a7cb9609 100644 --- a/error_reporting/nox.py +++ b/error_reporting/nox.py @@ -30,6 +30,9 @@ def unit_tests(session, python_version): # Run unit tests against all supported versions of Python. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'unit-' + python_version + # Install all test dependencies, then install this package in-place. session.install('mock', 'pytest', 'pytest-cov', *LOCAL_DEPS) session.install('-e', '.') @@ -44,21 +47,32 @@ def unit_tests(session, python_version): @nox.session def lint(session): - """Run flake8. + """Run linters. - Returns a failure if flake8 finds linting errors or sufficiently + Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ session.interpreter = 'python3.6' - session.install('flake8', *LOCAL_DEPS) + session.install('flake8', 'pylint', 'gcp-devrel-py-tools', *LOCAL_DEPS) session.install('.') session.run('flake8', 'google/cloud/error_reporting') + session.run( + 'gcp-devrel-py-tools', 'run-pylint', + '--config', 'pylint.config.py', + '--library-filesets', 'google', + '--test-filesets', 'tests', + # Temporarily allow this to fail. + success_codes=range(0, 100)) @nox.session def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.interpreter = 'python3.6' + + # Set the virtualenv dirname. + session.virtualenv_dirname = 'setup' + session.install('docutils', 'Pygments') session.run( 'python', 'setup.py', 'check', '--restructuredtext', '--strict') @@ -71,11 +85,14 @@ def system_tests(session, python_version): # Sanity check: Only run system tests if the environment variable is set. if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): - return + session.skip('Credentials must be set via environment variable.') # Run the system tests against latest Python 2 and Python 3 only. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'sys-' + python_version + # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) diff --git a/error_reporting/pylint.config.py b/error_reporting/pylint.config.py new file mode 100644 index 000000000000..b618319b8b61 --- /dev/null +++ b/error_reporting/pylint.config.py @@ -0,0 +1,25 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module is used to configure gcp-devrel-py-tools run-pylint.""" + +# Library configuration + +# library_additions = {} +# library_replacements = {} + +# Test configuration + +# test_additions = copy.deepcopy(library_additions) +# test_replacements = copy.deepcopy(library_replacements) diff --git a/error_reporting/setup.py b/error_reporting/setup.py index 2cf6ef8a9cac..807af3b97907 100644 --- a/error_reporting/setup.py +++ b/error_reporting/setup.py @@ -27,7 +27,7 @@ # consolidate. SETUP_BASE = { 'author': 'Google Cloud Platform', - 'author_email': 'jjg+google-cloud-python@google.com', + 'author_email': 'googleapis-publisher@google.com', 'scripts': [], 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', 'license': 'Apache 2.0', @@ -51,14 +51,14 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.24.1, < 0.25dev', - 'google-cloud-logging >= 1.0.0, < 2.0dev', + 'google-cloud-core >= 0.25.0, < 0.26dev', + 'google-cloud-logging >= 1.1.0, < 1.2dev', 'gapic-google-cloud-error-reporting-v1beta1 >= 0.15.0, < 0.16dev' ] setup( name='google-cloud-error-reporting', - version='0.24.2', + version='0.25.1', description='Python Client for Stackdriver Error Reporting', long_description=README, namespace_packages=[ diff --git a/language/MANIFEST.in b/language/MANIFEST.in index 9f7100c9528a..fc77f8c82ff0 100644 --- a/language/MANIFEST.in +++ b/language/MANIFEST.in @@ -1,4 +1,4 @@ include README.rst LICENSE recursive-include google *.json *.proto -recursive-include unit_tests * +recursive-include tests * global-exclude *.pyc __pycache__ diff --git a/language/README.rst b/language/README.rst index 5d09b6de0dc6..9940503a7832 100644 --- a/language/README.rst +++ b/language/README.rst @@ -9,7 +9,7 @@ Python Client for Google Cloud Natural Language - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/language-usage.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/language/usage.html Quick Start ----------- @@ -28,7 +28,7 @@ possible. Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html .. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication Using the API @@ -46,6 +46,6 @@ See the ``google-cloud-python`` API Natural Language `Documentation`_ to learn how to analyze text with this API. .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-language.svg - :target: https://pypi.python.org/pypi/google-cloud-language + :target: https://pypi.org/project/google-cloud-language/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-language.svg - :target: https://pypi.python.org/pypi/google-cloud-language + :target: https://pypi.org/project/google-cloud-language/ diff --git a/language/google/cloud/gapic/__init__.py b/language/google/cloud/gapic/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/language/google/cloud/gapic/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/language/google/cloud/gapic/language/__init__.py b/language/google/cloud/gapic/language/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/language/google/cloud/gapic/language/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/language/google/cloud/gapic/language/v1/__init__.py b/language/google/cloud/gapic/language/v1/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/language/google/cloud/gapic/language/v1/enums.py b/language/google/cloud/gapic/language/v1/enums.py new file mode 100644 index 000000000000..2b53e4d913bb --- /dev/null +++ b/language/google/cloud/gapic/language/v1/enums.py @@ -0,0 +1,516 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Wrappers for protocol buffer enum types.""" + + +class EncodingType(object): + """ + Represents the text encoding that the caller uses to process the output. + Providing an ``EncodingType`` is recommended because the API provides the + beginning offsets for various outputs, such as tokens and mentions, and + languages that natively use different text encodings may access offsets + differently. + + Attributes: + NONE (int): If ``EncodingType`` is not specified, encoding-dependent information (such as + ``begin_offset``) will be set at ``-1``. + UTF8 (int): Encoding-dependent information (such as ``begin_offset``) is calculated based + on the UTF-8 encoding of the input. C++ and Go are examples of languages + that use this encoding natively. + UTF16 (int): Encoding-dependent information (such as ``begin_offset``) is calculated based + on the UTF-16 encoding of the input. Java and Javascript are examples of + languages that use this encoding natively. + UTF32 (int): Encoding-dependent information (such as ``begin_offset``) is calculated based + on the UTF-32 encoding of the input. Python is an example of a language + that uses this encoding natively. + """ + NONE = 0 + UTF8 = 1 + UTF16 = 2 + UTF32 = 3 + + +class Document(object): + class Type(object): + """ + The document types enum. + + Attributes: + TYPE_UNSPECIFIED (int): The content type is not specified. + PLAIN_TEXT (int): Plain text + HTML (int): HTML + """ + TYPE_UNSPECIFIED = 0 + PLAIN_TEXT = 1 + HTML = 2 + + +class Entity(object): + class Type(object): + """ + The type of the entity. + + Attributes: + UNKNOWN (int): Unknown + PERSON (int): Person + LOCATION (int): Location + ORGANIZATION (int): Organization + EVENT (int): Event + WORK_OF_ART (int): Work of art + CONSUMER_GOOD (int): Consumer goods + OTHER (int): Other types + """ + UNKNOWN = 0 + PERSON = 1 + LOCATION = 2 + ORGANIZATION = 3 + EVENT = 4 + WORK_OF_ART = 5 + CONSUMER_GOOD = 6 + OTHER = 7 + + +class PartOfSpeech(object): + class Tag(object): + """ + The part of speech tags enum. + + Attributes: + UNKNOWN (int): Unknown + ADJ (int): Adjective + ADP (int): Adposition (preposition and postposition) + ADV (int): Adverb + CONJ (int): Conjunction + DET (int): Determiner + NOUN (int): Noun (common and proper) + NUM (int): Cardinal number + PRON (int): Pronoun + PRT (int): Particle or other function word + PUNCT (int): Punctuation + VERB (int): Verb (all tenses and modes) + X (int): Other: foreign words, typos, abbreviations + AFFIX (int): Affix + """ + UNKNOWN = 0 + ADJ = 1 + ADP = 2 + ADV = 3 + CONJ = 4 + DET = 5 + NOUN = 6 + NUM = 7 + PRON = 8 + PRT = 9 + PUNCT = 10 + VERB = 11 + X = 12 + AFFIX = 13 + + class Aspect(object): + """ + The characteristic of a verb that expresses time flow during an event. + + Attributes: + ASPECT_UNKNOWN (int): Aspect is not applicable in the analyzed language or is not predicted. + PERFECTIVE (int): Perfective + IMPERFECTIVE (int): Imperfective + PROGRESSIVE (int): Progressive + """ + ASPECT_UNKNOWN = 0 + PERFECTIVE = 1 + IMPERFECTIVE = 2 + PROGRESSIVE = 3 + + class Case(object): + """ + The grammatical function performed by a noun or pronoun in a phrase, + clause, or sentence. In some languages, other parts of speech, such as + adjective and determiner, take case inflection in agreement with the noun. + + Attributes: + CASE_UNKNOWN (int): Case is not applicable in the analyzed language or is not predicted. + ACCUSATIVE (int): Accusative + ADVERBIAL (int): Adverbial + COMPLEMENTIVE (int): Complementive + DATIVE (int): Dative + GENITIVE (int): Genitive + INSTRUMENTAL (int): Instrumental + LOCATIVE (int): Locative + NOMINATIVE (int): Nominative + OBLIQUE (int): Oblique + PARTITIVE (int): Partitive + PREPOSITIONAL (int): Prepositional + REFLEXIVE_CASE (int): Reflexive + RELATIVE_CASE (int): Relative + VOCATIVE (int): Vocative + """ + CASE_UNKNOWN = 0 + ACCUSATIVE = 1 + ADVERBIAL = 2 + COMPLEMENTIVE = 3 + DATIVE = 4 + GENITIVE = 5 + INSTRUMENTAL = 6 + LOCATIVE = 7 + NOMINATIVE = 8 + OBLIQUE = 9 + PARTITIVE = 10 + PREPOSITIONAL = 11 + REFLEXIVE_CASE = 12 + RELATIVE_CASE = 13 + VOCATIVE = 14 + + class Form(object): + """ + Depending on the language, Form can be categorizing different forms of + verbs, adjectives, adverbs, etc. For example, categorizing inflected + endings of verbs and adjectives or distinguishing between short and long + forms of adjectives and participles + + Attributes: + FORM_UNKNOWN (int): Form is not applicable in the analyzed language or is not predicted. + ADNOMIAL (int): Adnomial + AUXILIARY (int): Auxiliary + COMPLEMENTIZER (int): Complementizer + FINAL_ENDING (int): Final ending + GERUND (int): Gerund + REALIS (int): Realis + IRREALIS (int): Irrealis + SHORT (int): Short form + LONG (int): Long form + ORDER (int): Order form + SPECIFIC (int): Specific form + """ + FORM_UNKNOWN = 0 + ADNOMIAL = 1 + AUXILIARY = 2 + COMPLEMENTIZER = 3 + FINAL_ENDING = 4 + GERUND = 5 + REALIS = 6 + IRREALIS = 7 + SHORT = 8 + LONG = 9 + ORDER = 10 + SPECIFIC = 11 + + class Gender(object): + """ + Gender classes of nouns reflected in the behaviour of associated words. + + Attributes: + GENDER_UNKNOWN (int): Gender is not applicable in the analyzed language or is not predicted. + FEMININE (int): Feminine + MASCULINE (int): Masculine + NEUTER (int): Neuter + """ + GENDER_UNKNOWN = 0 + FEMININE = 1 + MASCULINE = 2 + NEUTER = 3 + + class Mood(object): + """ + The grammatical feature of verbs, used for showing modality and attitude. + + Attributes: + MOOD_UNKNOWN (int): Mood is not applicable in the analyzed language or is not predicted. + CONDITIONAL_MOOD (int): Conditional + IMPERATIVE (int): Imperative + INDICATIVE (int): Indicative + INTERROGATIVE (int): Interrogative + JUSSIVE (int): Jussive + SUBJUNCTIVE (int): Subjunctive + """ + MOOD_UNKNOWN = 0 + CONDITIONAL_MOOD = 1 + IMPERATIVE = 2 + INDICATIVE = 3 + INTERROGATIVE = 4 + JUSSIVE = 5 + SUBJUNCTIVE = 6 + + class Number(object): + """ + Count distinctions. + + Attributes: + NUMBER_UNKNOWN (int): Number is not applicable in the analyzed language or is not predicted. + SINGULAR (int): Singular + PLURAL (int): Plural + DUAL (int): Dual + """ + NUMBER_UNKNOWN = 0 + SINGULAR = 1 + PLURAL = 2 + DUAL = 3 + + class Person(object): + """ + The distinction between the speaker, second person, third person, etc. + + Attributes: + PERSON_UNKNOWN (int): Person is not applicable in the analyzed language or is not predicted. + FIRST (int): First + SECOND (int): Second + THIRD (int): Third + REFLEXIVE_PERSON (int): Reflexive + """ + PERSON_UNKNOWN = 0 + FIRST = 1 + SECOND = 2 + THIRD = 3 + REFLEXIVE_PERSON = 4 + + class Proper(object): + """ + This category shows if the token is part of a proper name. + + Attributes: + PROPER_UNKNOWN (int): Proper is not applicable in the analyzed language or is not predicted. + PROPER (int): Proper + NOT_PROPER (int): Not proper + """ + PROPER_UNKNOWN = 0 + PROPER = 1 + NOT_PROPER = 2 + + class Reciprocity(object): + """ + Reciprocal features of a pronoun. + + Attributes: + RECIPROCITY_UNKNOWN (int): Reciprocity is not applicable in the analyzed language or is not + predicted. + RECIPROCAL (int): Reciprocal + NON_RECIPROCAL (int): Non-reciprocal + """ + RECIPROCITY_UNKNOWN = 0 + RECIPROCAL = 1 + NON_RECIPROCAL = 2 + + class Tense(object): + """ + Time reference. + + Attributes: + TENSE_UNKNOWN (int): Tense is not applicable in the analyzed language or is not predicted. + CONDITIONAL_TENSE (int): Conditional + FUTURE (int): Future + PAST (int): Past + PRESENT (int): Present + IMPERFECT (int): Imperfect + PLUPERFECT (int): Pluperfect + """ + TENSE_UNKNOWN = 0 + CONDITIONAL_TENSE = 1 + FUTURE = 2 + PAST = 3 + PRESENT = 4 + IMPERFECT = 5 + PLUPERFECT = 6 + + class Voice(object): + """ + The relationship between the action that a verb expresses and the + participants identified by its arguments. + + Attributes: + VOICE_UNKNOWN (int): Voice is not applicable in the analyzed language or is not predicted. + ACTIVE (int): Active + CAUSATIVE (int): Causative + PASSIVE (int): Passive + """ + VOICE_UNKNOWN = 0 + ACTIVE = 1 + CAUSATIVE = 2 + PASSIVE = 3 + + +class DependencyEdge(object): + class Label(object): + """ + The parse label enum for the token. + + Attributes: + UNKNOWN (int): Unknown + ABBREV (int): Abbreviation modifier + ACOMP (int): Adjectival complement + ADVCL (int): Adverbial clause modifier + ADVMOD (int): Adverbial modifier + AMOD (int): Adjectival modifier of an NP + APPOS (int): Appositional modifier of an NP + ATTR (int): Attribute dependent of a copular verb + AUX (int): Auxiliary (non-main) verb + AUXPASS (int): Passive auxiliary + CC (int): Coordinating conjunction + CCOMP (int): Clausal complement of a verb or adjective + CONJ (int): Conjunct + CSUBJ (int): Clausal subject + CSUBJPASS (int): Clausal passive subject + DEP (int): Dependency (unable to determine) + DET (int): Determiner + DISCOURSE (int): Discourse + DOBJ (int): Direct object + EXPL (int): Expletive + GOESWITH (int): Goes with (part of a word in a text not well edited) + IOBJ (int): Indirect object + MARK (int): Marker (word introducing a subordinate clause) + MWE (int): Multi-word expression + MWV (int): Multi-word verbal expression + NEG (int): Negation modifier + NN (int): Noun compound modifier + NPADVMOD (int): Noun phrase used as an adverbial modifier + NSUBJ (int): Nominal subject + NSUBJPASS (int): Passive nominal subject + NUM (int): Numeric modifier of a noun + NUMBER (int): Element of compound number + P (int): Punctuation mark + PARATAXIS (int): Parataxis relation + PARTMOD (int): Participial modifier + PCOMP (int): The complement of a preposition is a clause + POBJ (int): Object of a preposition + POSS (int): Possession modifier + POSTNEG (int): Postverbal negative particle + PRECOMP (int): Predicate complement + PRECONJ (int): Preconjunt + PREDET (int): Predeterminer + PREF (int): Prefix + PREP (int): Prepositional modifier + PRONL (int): The relationship between a verb and verbal morpheme + PRT (int): Particle + PS (int): Associative or possessive marker + QUANTMOD (int): Quantifier phrase modifier + RCMOD (int): Relative clause modifier + RCMODREL (int): Complementizer in relative clause + RDROP (int): Ellipsis without a preceding predicate + REF (int): Referent + REMNANT (int): Remnant + REPARANDUM (int): Reparandum + ROOT (int): Root + SNUM (int): Suffix specifying a unit of number + SUFF (int): Suffix + TMOD (int): Temporal modifier + TOPIC (int): Topic marker + VMOD (int): Clause headed by an infinite form of the verb that modifies a noun + VOCATIVE (int): Vocative + XCOMP (int): Open clausal complement + SUFFIX (int): Name suffix + TITLE (int): Name title + ADVPHMOD (int): Adverbial phrase modifier + AUXCAUS (int): Causative auxiliary + AUXVV (int): Helper auxiliary + DTMOD (int): Rentaishi (Prenominal modifier) + FOREIGN (int): Foreign words + KW (int): Keyword + LIST (int): List for chains of comparable items + NOMC (int): Nominalized clause + NOMCSUBJ (int): Nominalized clausal subject + NOMCSUBJPASS (int): Nominalized clausal passive + NUMC (int): Compound of numeric modifier + COP (int): Copula + DISLOCATED (int): Dislocated relation (for fronted/topicalized elements) + """ + UNKNOWN = 0 + ABBREV = 1 + ACOMP = 2 + ADVCL = 3 + ADVMOD = 4 + AMOD = 5 + APPOS = 6 + ATTR = 7 + AUX = 8 + AUXPASS = 9 + CC = 10 + CCOMP = 11 + CONJ = 12 + CSUBJ = 13 + CSUBJPASS = 14 + DEP = 15 + DET = 16 + DISCOURSE = 17 + DOBJ = 18 + EXPL = 19 + GOESWITH = 20 + IOBJ = 21 + MARK = 22 + MWE = 23 + MWV = 24 + NEG = 25 + NN = 26 + NPADVMOD = 27 + NSUBJ = 28 + NSUBJPASS = 29 + NUM = 30 + NUMBER = 31 + P = 32 + PARATAXIS = 33 + PARTMOD = 34 + PCOMP = 35 + POBJ = 36 + POSS = 37 + POSTNEG = 38 + PRECOMP = 39 + PRECONJ = 40 + PREDET = 41 + PREF = 42 + PREP = 43 + PRONL = 44 + PRT = 45 + PS = 46 + QUANTMOD = 47 + RCMOD = 48 + RCMODREL = 49 + RDROP = 50 + REF = 51 + REMNANT = 52 + REPARANDUM = 53 + ROOT = 54 + SNUM = 55 + SUFF = 56 + TMOD = 57 + TOPIC = 58 + VMOD = 59 + VOCATIVE = 60 + XCOMP = 61 + SUFFIX = 62 + TITLE = 63 + ADVPHMOD = 64 + AUXCAUS = 65 + AUXVV = 66 + DTMOD = 67 + FOREIGN = 68 + KW = 69 + LIST = 70 + NOMC = 71 + NOMCSUBJ = 72 + NOMCSUBJPASS = 73 + NUMC = 74 + COP = 75 + DISLOCATED = 76 + + +class EntityMention(object): + class Type(object): + """ + The supported types of mentions. + + Attributes: + TYPE_UNKNOWN (int): Unknown + PROPER (int): Proper name + COMMON (int): Common noun (or noun compound) + """ + TYPE_UNKNOWN = 0 + PROPER = 1 + COMMON = 2 diff --git a/language/google/cloud/gapic/language/v1/language_service_client.py b/language/google/cloud/gapic/language/v1/language_service_client.py new file mode 100644 index 000000000000..fb55b9568b67 --- /dev/null +++ b/language/google/cloud/gapic/language/v1/language_service_client.py @@ -0,0 +1,290 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# EDITING INSTRUCTIONS +# This file was generated from the file +# https://github.com/google/googleapis/blob/master/google/cloud/language/v1/language_service.proto, +# and updates to that file get reflected here through a refresh process. +# For the short term, the refresh process will only be runnable by Google engineers. +# +# The only allowed edits are to method and file documentation. A 3-way +# merge preserves those additions if the generated source changes. +"""Accesses the google.cloud.language.v1 LanguageService API.""" + +import collections +import json +import os +import pkg_resources +import platform + +from google.gax import api_callable +from google.gax import config +from google.gax import path_template +import google.gax + +from google.cloud.gapic.language.v1 import enums +from google.cloud.proto.language.v1 import language_service_pb2 + + +class LanguageServiceClient(object): + """ + Provides text analysis operations such as sentiment analysis and entity + recognition. + """ + + SERVICE_ADDRESS = 'language.googleapis.com' + """The default address of the service.""" + + DEFAULT_SERVICE_PORT = 443 + """The default port of the service.""" + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _ALL_SCOPES = ('https://www.googleapis.com/auth/cloud-platform', ) + + def __init__(self, + service_path=SERVICE_ADDRESS, + port=DEFAULT_SERVICE_PORT, + channel=None, + credentials=None, + ssl_credentials=None, + scopes=None, + client_config=None, + app_name=None, + app_version='', + lib_name=None, + lib_version='', + metrics_headers=()): + """Constructor. + + Args: + service_path (string): The domain name of the API remote host. + port (int): The port on which to connect to the remote host. + channel (:class:`grpc.Channel`): A ``Channel`` instance through + which to make calls. + credentials (object): The authorization credentials to attach to + requests. These credentials identify this application to the + service. + ssl_credentials (:class:`grpc.ChannelCredentials`): A + ``ChannelCredentials`` instance for use with an SSL-enabled + channel. + scopes (list[string]): A list of OAuth2 scopes to attach to requests. + client_config (dict): + A dictionary for call options for each method. See + :func:`google.gax.construct_settings` for the structure of + this data. Falls back to the default config if not specified + or the specified config is missing data points. + app_name (string): The name of the application calling + the service. Recommended for analytics purposes. + app_version (string): The version of the application calling + the service. Recommended for analytics purposes. + lib_name (string): The API library software used for calling + the service. (Unless you are writing an API client itself, + leave this as default.) + lib_version (string): The API library software version used + for calling the service. (Unless you are writing an API client + itself, leave this as default.) + metrics_headers (dict): A dictionary of values for tracking + client library metrics. Ultimately serializes to a string + (e.g. 'foo/1.2.3 bar/3.14.1'). This argument should be + considered private. + + Returns: + A LanguageServiceClient object. + """ + # Unless the calling application specifically requested + # OAuth scopes, request everything. + if scopes is None: + scopes = self._ALL_SCOPES + + # Initialize an empty client config, if none is set. + if client_config is None: + client_config = {} + + # Initialize metrics_headers as an ordered dictionary + # (cuts down on cardinality of the resulting string slightly). + metrics_headers = collections.OrderedDict(metrics_headers) + metrics_headers['gl-python'] = platform.python_version() + + # The library may or may not be set, depending on what is + # calling this client. Newer client libraries set the library name + # and version. + if lib_name: + metrics_headers[lib_name] = lib_version + + # Finally, track the GAPIC package version. + metrics_headers['gapic'] = pkg_resources.get_distribution( + 'google-cloud-language', ).version + + # Load the configuration defaults. + default_client_config = json.loads( + pkg_resources.resource_string( + __name__, 'language_service_client_config.json').decode()) + defaults = api_callable.construct_settings( + 'google.cloud.language.v1.LanguageService', + default_client_config, + client_config, + config.STATUS_CODE_NAMES, + metrics_headers=metrics_headers, ) + self.language_service_stub = config.create_stub( + language_service_pb2.LanguageServiceStub, + channel=channel, + service_path=service_path, + service_port=port, + credentials=credentials, + scopes=scopes, + ssl_credentials=ssl_credentials) + + self._analyze_sentiment = api_callable.create_api_call( + self.language_service_stub.AnalyzeSentiment, + settings=defaults['analyze_sentiment']) + self._analyze_entities = api_callable.create_api_call( + self.language_service_stub.AnalyzeEntities, + settings=defaults['analyze_entities']) + self._analyze_syntax = api_callable.create_api_call( + self.language_service_stub.AnalyzeSyntax, + settings=defaults['analyze_syntax']) + self._annotate_text = api_callable.create_api_call( + self.language_service_stub.AnnotateText, + settings=defaults['annotate_text']) + + # Service calls + def analyze_sentiment(self, document, encoding_type=None, options=None): + """ + Analyzes the sentiment of the provided text. + + Example: + >>> from google.cloud.gapic.language.v1 import language_service_client + >>> from google.cloud.proto.language.v1 import language_service_pb2 + >>> client = language_service_client.LanguageServiceClient() + >>> document = language_service_pb2.Document() + >>> response = client.analyze_sentiment(document) + + Args: + document (:class:`google.cloud.proto.language.v1.language_service_pb2.Document`): Input document. + encoding_type (enum :class:`google.cloud.gapic.language.v1.enums.EncodingType`): The encoding type used by the API to calculate sentence offsets. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.language.v1.language_service_pb2.AnalyzeSentimentResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = language_service_pb2.AnalyzeSentimentRequest( + document=document, encoding_type=encoding_type) + return self._analyze_sentiment(request, options) + + def analyze_entities(self, document, encoding_type, options=None): + """ + Finds named entities (currently proper names and common nouns) in the text + along with entity types, salience, mentions for each entity, and + other properties. + + Example: + >>> from google.cloud.gapic.language.v1 import language_service_client + >>> from google.cloud.gapic.language.v1 import enums + >>> from google.cloud.proto.language.v1 import language_service_pb2 + >>> client = language_service_client.LanguageServiceClient() + >>> document = language_service_pb2.Document() + >>> encoding_type = enums.EncodingType.NONE + >>> response = client.analyze_entities(document, encoding_type) + + Args: + document (:class:`google.cloud.proto.language.v1.language_service_pb2.Document`): Input document. + encoding_type (enum :class:`google.cloud.gapic.language.v1.enums.EncodingType`): The encoding type used by the API to calculate offsets. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.language.v1.language_service_pb2.AnalyzeEntitiesResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = language_service_pb2.AnalyzeEntitiesRequest( + document=document, encoding_type=encoding_type) + return self._analyze_entities(request, options) + + def analyze_syntax(self, document, encoding_type, options=None): + """ + Analyzes the syntax of the text and provides sentence boundaries and + tokenization along with part of speech tags, dependency trees, and other + properties. + + Example: + >>> from google.cloud.gapic.language.v1 import language_service_client + >>> from google.cloud.gapic.language.v1 import enums + >>> from google.cloud.proto.language.v1 import language_service_pb2 + >>> client = language_service_client.LanguageServiceClient() + >>> document = language_service_pb2.Document() + >>> encoding_type = enums.EncodingType.NONE + >>> response = client.analyze_syntax(document, encoding_type) + + Args: + document (:class:`google.cloud.proto.language.v1.language_service_pb2.Document`): Input document. + encoding_type (enum :class:`google.cloud.gapic.language.v1.enums.EncodingType`): The encoding type used by the API to calculate offsets. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.language.v1.language_service_pb2.AnalyzeSyntaxResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = language_service_pb2.AnalyzeSyntaxRequest( + document=document, encoding_type=encoding_type) + return self._analyze_syntax(request, options) + + def annotate_text(self, document, features, encoding_type, options=None): + """ + A convenience method that provides all the features that analyzeSentiment, + analyzeEntities, and analyzeSyntax provide in one call. + + Example: + >>> from google.cloud.gapic.language.v1 import language_service_client + >>> from google.cloud.gapic.language.v1 import enums + >>> from google.cloud.proto.language.v1 import language_service_pb2 + >>> client = language_service_client.LanguageServiceClient() + >>> document = language_service_pb2.Document() + >>> features = language_service_pb2.AnnotateTextRequest.Features() + >>> encoding_type = enums.EncodingType.NONE + >>> response = client.annotate_text(document, features, encoding_type) + + Args: + document (:class:`google.cloud.proto.language.v1.language_service_pb2.Document`): Input document. + features (:class:`google.cloud.proto.language.v1.language_service_pb2.AnnotateTextRequest.Features`): The enabled features. + encoding_type (enum :class:`google.cloud.gapic.language.v1.enums.EncodingType`): The encoding type used by the API to calculate offsets. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.language.v1.language_service_pb2.AnnotateTextResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = language_service_pb2.AnnotateTextRequest( + document=document, features=features, encoding_type=encoding_type) + return self._annotate_text(request, options) diff --git a/language/google/cloud/gapic/language/v1/language_service_client_config.json b/language/google/cloud/gapic/language/v1/language_service_client_config.json new file mode 100644 index 000000000000..202d5b0d427b --- /dev/null +++ b/language/google/cloud/gapic/language/v1/language_service_client_config.json @@ -0,0 +1,46 @@ +{ + "interfaces": { + "google.cloud.language.v1.LanguageService": { + "retry_codes": { + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "non_idempotent": [] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "AnalyzeSentiment": { + "timeout_millis": 30000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "AnalyzeEntities": { + "timeout_millis": 30000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "AnalyzeSyntax": { + "timeout_millis": 30000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "AnnotateText": { + "timeout_millis": 30000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/language/google/cloud/gapic/language/v1beta2/__init__.py b/language/google/cloud/gapic/language/v1beta2/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/language/google/cloud/gapic/language/v1beta2/enums.py b/language/google/cloud/gapic/language/v1beta2/enums.py new file mode 100644 index 000000000000..2b53e4d913bb --- /dev/null +++ b/language/google/cloud/gapic/language/v1beta2/enums.py @@ -0,0 +1,516 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Wrappers for protocol buffer enum types.""" + + +class EncodingType(object): + """ + Represents the text encoding that the caller uses to process the output. + Providing an ``EncodingType`` is recommended because the API provides the + beginning offsets for various outputs, such as tokens and mentions, and + languages that natively use different text encodings may access offsets + differently. + + Attributes: + NONE (int): If ``EncodingType`` is not specified, encoding-dependent information (such as + ``begin_offset``) will be set at ``-1``. + UTF8 (int): Encoding-dependent information (such as ``begin_offset``) is calculated based + on the UTF-8 encoding of the input. C++ and Go are examples of languages + that use this encoding natively. + UTF16 (int): Encoding-dependent information (such as ``begin_offset``) is calculated based + on the UTF-16 encoding of the input. Java and Javascript are examples of + languages that use this encoding natively. + UTF32 (int): Encoding-dependent information (such as ``begin_offset``) is calculated based + on the UTF-32 encoding of the input. Python is an example of a language + that uses this encoding natively. + """ + NONE = 0 + UTF8 = 1 + UTF16 = 2 + UTF32 = 3 + + +class Document(object): + class Type(object): + """ + The document types enum. + + Attributes: + TYPE_UNSPECIFIED (int): The content type is not specified. + PLAIN_TEXT (int): Plain text + HTML (int): HTML + """ + TYPE_UNSPECIFIED = 0 + PLAIN_TEXT = 1 + HTML = 2 + + +class Entity(object): + class Type(object): + """ + The type of the entity. + + Attributes: + UNKNOWN (int): Unknown + PERSON (int): Person + LOCATION (int): Location + ORGANIZATION (int): Organization + EVENT (int): Event + WORK_OF_ART (int): Work of art + CONSUMER_GOOD (int): Consumer goods + OTHER (int): Other types + """ + UNKNOWN = 0 + PERSON = 1 + LOCATION = 2 + ORGANIZATION = 3 + EVENT = 4 + WORK_OF_ART = 5 + CONSUMER_GOOD = 6 + OTHER = 7 + + +class PartOfSpeech(object): + class Tag(object): + """ + The part of speech tags enum. + + Attributes: + UNKNOWN (int): Unknown + ADJ (int): Adjective + ADP (int): Adposition (preposition and postposition) + ADV (int): Adverb + CONJ (int): Conjunction + DET (int): Determiner + NOUN (int): Noun (common and proper) + NUM (int): Cardinal number + PRON (int): Pronoun + PRT (int): Particle or other function word + PUNCT (int): Punctuation + VERB (int): Verb (all tenses and modes) + X (int): Other: foreign words, typos, abbreviations + AFFIX (int): Affix + """ + UNKNOWN = 0 + ADJ = 1 + ADP = 2 + ADV = 3 + CONJ = 4 + DET = 5 + NOUN = 6 + NUM = 7 + PRON = 8 + PRT = 9 + PUNCT = 10 + VERB = 11 + X = 12 + AFFIX = 13 + + class Aspect(object): + """ + The characteristic of a verb that expresses time flow during an event. + + Attributes: + ASPECT_UNKNOWN (int): Aspect is not applicable in the analyzed language or is not predicted. + PERFECTIVE (int): Perfective + IMPERFECTIVE (int): Imperfective + PROGRESSIVE (int): Progressive + """ + ASPECT_UNKNOWN = 0 + PERFECTIVE = 1 + IMPERFECTIVE = 2 + PROGRESSIVE = 3 + + class Case(object): + """ + The grammatical function performed by a noun or pronoun in a phrase, + clause, or sentence. In some languages, other parts of speech, such as + adjective and determiner, take case inflection in agreement with the noun. + + Attributes: + CASE_UNKNOWN (int): Case is not applicable in the analyzed language or is not predicted. + ACCUSATIVE (int): Accusative + ADVERBIAL (int): Adverbial + COMPLEMENTIVE (int): Complementive + DATIVE (int): Dative + GENITIVE (int): Genitive + INSTRUMENTAL (int): Instrumental + LOCATIVE (int): Locative + NOMINATIVE (int): Nominative + OBLIQUE (int): Oblique + PARTITIVE (int): Partitive + PREPOSITIONAL (int): Prepositional + REFLEXIVE_CASE (int): Reflexive + RELATIVE_CASE (int): Relative + VOCATIVE (int): Vocative + """ + CASE_UNKNOWN = 0 + ACCUSATIVE = 1 + ADVERBIAL = 2 + COMPLEMENTIVE = 3 + DATIVE = 4 + GENITIVE = 5 + INSTRUMENTAL = 6 + LOCATIVE = 7 + NOMINATIVE = 8 + OBLIQUE = 9 + PARTITIVE = 10 + PREPOSITIONAL = 11 + REFLEXIVE_CASE = 12 + RELATIVE_CASE = 13 + VOCATIVE = 14 + + class Form(object): + """ + Depending on the language, Form can be categorizing different forms of + verbs, adjectives, adverbs, etc. For example, categorizing inflected + endings of verbs and adjectives or distinguishing between short and long + forms of adjectives and participles + + Attributes: + FORM_UNKNOWN (int): Form is not applicable in the analyzed language or is not predicted. + ADNOMIAL (int): Adnomial + AUXILIARY (int): Auxiliary + COMPLEMENTIZER (int): Complementizer + FINAL_ENDING (int): Final ending + GERUND (int): Gerund + REALIS (int): Realis + IRREALIS (int): Irrealis + SHORT (int): Short form + LONG (int): Long form + ORDER (int): Order form + SPECIFIC (int): Specific form + """ + FORM_UNKNOWN = 0 + ADNOMIAL = 1 + AUXILIARY = 2 + COMPLEMENTIZER = 3 + FINAL_ENDING = 4 + GERUND = 5 + REALIS = 6 + IRREALIS = 7 + SHORT = 8 + LONG = 9 + ORDER = 10 + SPECIFIC = 11 + + class Gender(object): + """ + Gender classes of nouns reflected in the behaviour of associated words. + + Attributes: + GENDER_UNKNOWN (int): Gender is not applicable in the analyzed language or is not predicted. + FEMININE (int): Feminine + MASCULINE (int): Masculine + NEUTER (int): Neuter + """ + GENDER_UNKNOWN = 0 + FEMININE = 1 + MASCULINE = 2 + NEUTER = 3 + + class Mood(object): + """ + The grammatical feature of verbs, used for showing modality and attitude. + + Attributes: + MOOD_UNKNOWN (int): Mood is not applicable in the analyzed language or is not predicted. + CONDITIONAL_MOOD (int): Conditional + IMPERATIVE (int): Imperative + INDICATIVE (int): Indicative + INTERROGATIVE (int): Interrogative + JUSSIVE (int): Jussive + SUBJUNCTIVE (int): Subjunctive + """ + MOOD_UNKNOWN = 0 + CONDITIONAL_MOOD = 1 + IMPERATIVE = 2 + INDICATIVE = 3 + INTERROGATIVE = 4 + JUSSIVE = 5 + SUBJUNCTIVE = 6 + + class Number(object): + """ + Count distinctions. + + Attributes: + NUMBER_UNKNOWN (int): Number is not applicable in the analyzed language or is not predicted. + SINGULAR (int): Singular + PLURAL (int): Plural + DUAL (int): Dual + """ + NUMBER_UNKNOWN = 0 + SINGULAR = 1 + PLURAL = 2 + DUAL = 3 + + class Person(object): + """ + The distinction between the speaker, second person, third person, etc. + + Attributes: + PERSON_UNKNOWN (int): Person is not applicable in the analyzed language or is not predicted. + FIRST (int): First + SECOND (int): Second + THIRD (int): Third + REFLEXIVE_PERSON (int): Reflexive + """ + PERSON_UNKNOWN = 0 + FIRST = 1 + SECOND = 2 + THIRD = 3 + REFLEXIVE_PERSON = 4 + + class Proper(object): + """ + This category shows if the token is part of a proper name. + + Attributes: + PROPER_UNKNOWN (int): Proper is not applicable in the analyzed language or is not predicted. + PROPER (int): Proper + NOT_PROPER (int): Not proper + """ + PROPER_UNKNOWN = 0 + PROPER = 1 + NOT_PROPER = 2 + + class Reciprocity(object): + """ + Reciprocal features of a pronoun. + + Attributes: + RECIPROCITY_UNKNOWN (int): Reciprocity is not applicable in the analyzed language or is not + predicted. + RECIPROCAL (int): Reciprocal + NON_RECIPROCAL (int): Non-reciprocal + """ + RECIPROCITY_UNKNOWN = 0 + RECIPROCAL = 1 + NON_RECIPROCAL = 2 + + class Tense(object): + """ + Time reference. + + Attributes: + TENSE_UNKNOWN (int): Tense is not applicable in the analyzed language or is not predicted. + CONDITIONAL_TENSE (int): Conditional + FUTURE (int): Future + PAST (int): Past + PRESENT (int): Present + IMPERFECT (int): Imperfect + PLUPERFECT (int): Pluperfect + """ + TENSE_UNKNOWN = 0 + CONDITIONAL_TENSE = 1 + FUTURE = 2 + PAST = 3 + PRESENT = 4 + IMPERFECT = 5 + PLUPERFECT = 6 + + class Voice(object): + """ + The relationship between the action that a verb expresses and the + participants identified by its arguments. + + Attributes: + VOICE_UNKNOWN (int): Voice is not applicable in the analyzed language or is not predicted. + ACTIVE (int): Active + CAUSATIVE (int): Causative + PASSIVE (int): Passive + """ + VOICE_UNKNOWN = 0 + ACTIVE = 1 + CAUSATIVE = 2 + PASSIVE = 3 + + +class DependencyEdge(object): + class Label(object): + """ + The parse label enum for the token. + + Attributes: + UNKNOWN (int): Unknown + ABBREV (int): Abbreviation modifier + ACOMP (int): Adjectival complement + ADVCL (int): Adverbial clause modifier + ADVMOD (int): Adverbial modifier + AMOD (int): Adjectival modifier of an NP + APPOS (int): Appositional modifier of an NP + ATTR (int): Attribute dependent of a copular verb + AUX (int): Auxiliary (non-main) verb + AUXPASS (int): Passive auxiliary + CC (int): Coordinating conjunction + CCOMP (int): Clausal complement of a verb or adjective + CONJ (int): Conjunct + CSUBJ (int): Clausal subject + CSUBJPASS (int): Clausal passive subject + DEP (int): Dependency (unable to determine) + DET (int): Determiner + DISCOURSE (int): Discourse + DOBJ (int): Direct object + EXPL (int): Expletive + GOESWITH (int): Goes with (part of a word in a text not well edited) + IOBJ (int): Indirect object + MARK (int): Marker (word introducing a subordinate clause) + MWE (int): Multi-word expression + MWV (int): Multi-word verbal expression + NEG (int): Negation modifier + NN (int): Noun compound modifier + NPADVMOD (int): Noun phrase used as an adverbial modifier + NSUBJ (int): Nominal subject + NSUBJPASS (int): Passive nominal subject + NUM (int): Numeric modifier of a noun + NUMBER (int): Element of compound number + P (int): Punctuation mark + PARATAXIS (int): Parataxis relation + PARTMOD (int): Participial modifier + PCOMP (int): The complement of a preposition is a clause + POBJ (int): Object of a preposition + POSS (int): Possession modifier + POSTNEG (int): Postverbal negative particle + PRECOMP (int): Predicate complement + PRECONJ (int): Preconjunt + PREDET (int): Predeterminer + PREF (int): Prefix + PREP (int): Prepositional modifier + PRONL (int): The relationship between a verb and verbal morpheme + PRT (int): Particle + PS (int): Associative or possessive marker + QUANTMOD (int): Quantifier phrase modifier + RCMOD (int): Relative clause modifier + RCMODREL (int): Complementizer in relative clause + RDROP (int): Ellipsis without a preceding predicate + REF (int): Referent + REMNANT (int): Remnant + REPARANDUM (int): Reparandum + ROOT (int): Root + SNUM (int): Suffix specifying a unit of number + SUFF (int): Suffix + TMOD (int): Temporal modifier + TOPIC (int): Topic marker + VMOD (int): Clause headed by an infinite form of the verb that modifies a noun + VOCATIVE (int): Vocative + XCOMP (int): Open clausal complement + SUFFIX (int): Name suffix + TITLE (int): Name title + ADVPHMOD (int): Adverbial phrase modifier + AUXCAUS (int): Causative auxiliary + AUXVV (int): Helper auxiliary + DTMOD (int): Rentaishi (Prenominal modifier) + FOREIGN (int): Foreign words + KW (int): Keyword + LIST (int): List for chains of comparable items + NOMC (int): Nominalized clause + NOMCSUBJ (int): Nominalized clausal subject + NOMCSUBJPASS (int): Nominalized clausal passive + NUMC (int): Compound of numeric modifier + COP (int): Copula + DISLOCATED (int): Dislocated relation (for fronted/topicalized elements) + """ + UNKNOWN = 0 + ABBREV = 1 + ACOMP = 2 + ADVCL = 3 + ADVMOD = 4 + AMOD = 5 + APPOS = 6 + ATTR = 7 + AUX = 8 + AUXPASS = 9 + CC = 10 + CCOMP = 11 + CONJ = 12 + CSUBJ = 13 + CSUBJPASS = 14 + DEP = 15 + DET = 16 + DISCOURSE = 17 + DOBJ = 18 + EXPL = 19 + GOESWITH = 20 + IOBJ = 21 + MARK = 22 + MWE = 23 + MWV = 24 + NEG = 25 + NN = 26 + NPADVMOD = 27 + NSUBJ = 28 + NSUBJPASS = 29 + NUM = 30 + NUMBER = 31 + P = 32 + PARATAXIS = 33 + PARTMOD = 34 + PCOMP = 35 + POBJ = 36 + POSS = 37 + POSTNEG = 38 + PRECOMP = 39 + PRECONJ = 40 + PREDET = 41 + PREF = 42 + PREP = 43 + PRONL = 44 + PRT = 45 + PS = 46 + QUANTMOD = 47 + RCMOD = 48 + RCMODREL = 49 + RDROP = 50 + REF = 51 + REMNANT = 52 + REPARANDUM = 53 + ROOT = 54 + SNUM = 55 + SUFF = 56 + TMOD = 57 + TOPIC = 58 + VMOD = 59 + VOCATIVE = 60 + XCOMP = 61 + SUFFIX = 62 + TITLE = 63 + ADVPHMOD = 64 + AUXCAUS = 65 + AUXVV = 66 + DTMOD = 67 + FOREIGN = 68 + KW = 69 + LIST = 70 + NOMC = 71 + NOMCSUBJ = 72 + NOMCSUBJPASS = 73 + NUMC = 74 + COP = 75 + DISLOCATED = 76 + + +class EntityMention(object): + class Type(object): + """ + The supported types of mentions. + + Attributes: + TYPE_UNKNOWN (int): Unknown + PROPER (int): Proper name + COMMON (int): Common noun (or noun compound) + """ + TYPE_UNKNOWN = 0 + PROPER = 1 + COMMON = 2 diff --git a/language/google/cloud/gapic/language/v1beta2/language_service_client.py b/language/google/cloud/gapic/language/v1beta2/language_service_client.py new file mode 100644 index 000000000000..a990d2a9758a --- /dev/null +++ b/language/google/cloud/gapic/language/v1beta2/language_service_client.py @@ -0,0 +1,326 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# EDITING INSTRUCTIONS +# This file was generated from the file +# https://github.com/google/googleapis/blob/master/google/cloud/language/v1beta2/language_service.proto, +# and updates to that file get reflected here through a refresh process. +# For the short term, the refresh process will only be runnable by Google engineers. +# +# The only allowed edits are to method and file documentation. A 3-way +# merge preserves those additions if the generated source changes. +"""Accesses the google.cloud.language.v1beta2 LanguageService API.""" + +import collections +import json +import os +import pkg_resources +import platform + +from google.gax import api_callable +from google.gax import config +from google.gax import path_template +import google.gax + +from google.cloud.gapic.language.v1beta2 import enums +from google.cloud.proto.language.v1beta2 import language_service_pb2 + + +class LanguageServiceClient(object): + """ + Provides text analysis operations such as sentiment analysis and entity + recognition. + """ + + SERVICE_ADDRESS = 'language.googleapis.com' + """The default address of the service.""" + + DEFAULT_SERVICE_PORT = 443 + """The default port of the service.""" + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _ALL_SCOPES = ('https://www.googleapis.com/auth/cloud-platform', ) + + def __init__(self, + service_path=SERVICE_ADDRESS, + port=DEFAULT_SERVICE_PORT, + channel=None, + credentials=None, + ssl_credentials=None, + scopes=None, + client_config=None, + app_name=None, + app_version='', + lib_name=None, + lib_version='', + metrics_headers=()): + """Constructor. + + Args: + service_path (string): The domain name of the API remote host. + port (int): The port on which to connect to the remote host. + channel (:class:`grpc.Channel`): A ``Channel`` instance through + which to make calls. + credentials (object): The authorization credentials to attach to + requests. These credentials identify this application to the + service. + ssl_credentials (:class:`grpc.ChannelCredentials`): A + ``ChannelCredentials`` instance for use with an SSL-enabled + channel. + scopes (list[string]): A list of OAuth2 scopes to attach to requests. + client_config (dict): + A dictionary for call options for each method. See + :func:`google.gax.construct_settings` for the structure of + this data. Falls back to the default config if not specified + or the specified config is missing data points. + app_name (string): The name of the application calling + the service. Recommended for analytics purposes. + app_version (string): The version of the application calling + the service. Recommended for analytics purposes. + lib_name (string): The API library software used for calling + the service. (Unless you are writing an API client itself, + leave this as default.) + lib_version (string): The API library software version used + for calling the service. (Unless you are writing an API client + itself, leave this as default.) + metrics_headers (dict): A dictionary of values for tracking + client library metrics. Ultimately serializes to a string + (e.g. 'foo/1.2.3 bar/3.14.1'). This argument should be + considered private. + + Returns: + A LanguageServiceClient object. + """ + # Unless the calling application specifically requested + # OAuth scopes, request everything. + if scopes is None: + scopes = self._ALL_SCOPES + + # Initialize an empty client config, if none is set. + if client_config is None: + client_config = {} + + # Initialize metrics_headers as an ordered dictionary + # (cuts down on cardinality of the resulting string slightly). + metrics_headers = collections.OrderedDict(metrics_headers) + metrics_headers['gl-python'] = platform.python_version() + + # The library may or may not be set, depending on what is + # calling this client. Newer client libraries set the library name + # and version. + if lib_name: + metrics_headers[lib_name] = lib_version + + # Finally, track the GAPIC package version. + metrics_headers['gapic'] = pkg_resources.get_distribution( + 'google-cloud-language', ).version + + # Load the configuration defaults. + default_client_config = json.loads( + pkg_resources.resource_string( + __name__, 'language_service_client_config.json').decode()) + defaults = api_callable.construct_settings( + 'google.cloud.language.v1beta2.LanguageService', + default_client_config, + client_config, + config.STATUS_CODE_NAMES, + metrics_headers=metrics_headers, ) + self.language_service_stub = config.create_stub( + language_service_pb2.LanguageServiceStub, + channel=channel, + service_path=service_path, + service_port=port, + credentials=credentials, + scopes=scopes, + ssl_credentials=ssl_credentials) + + self._analyze_sentiment = api_callable.create_api_call( + self.language_service_stub.AnalyzeSentiment, + settings=defaults['analyze_sentiment']) + self._analyze_entities = api_callable.create_api_call( + self.language_service_stub.AnalyzeEntities, + settings=defaults['analyze_entities']) + self._analyze_entity_sentiment = api_callable.create_api_call( + self.language_service_stub.AnalyzeEntitySentiment, + settings=defaults['analyze_entity_sentiment']) + self._analyze_syntax = api_callable.create_api_call( + self.language_service_stub.AnalyzeSyntax, + settings=defaults['analyze_syntax']) + self._annotate_text = api_callable.create_api_call( + self.language_service_stub.AnnotateText, + settings=defaults['annotate_text']) + + # Service calls + def analyze_sentiment(self, document, encoding_type=None, options=None): + """ + Analyzes the sentiment of the provided text. + + Example: + >>> from google.cloud.gapic.language.v1beta2 import language_service_client + >>> from google.cloud.proto.language.v1beta2 import language_service_pb2 + >>> client = language_service_client.LanguageServiceClient() + >>> document = language_service_pb2.Document() + >>> response = client.analyze_sentiment(document) + + Args: + document (:class:`google.cloud.proto.language.v1beta2.language_service_pb2.Document`): Input document. + encoding_type (enum :class:`google.cloud.gapic.language.v1beta2.enums.EncodingType`): The encoding type used by the API to calculate sentence offsets for the + sentence sentiment. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.language.v1beta2.language_service_pb2.AnalyzeSentimentResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = language_service_pb2.AnalyzeSentimentRequest( + document=document, encoding_type=encoding_type) + return self._analyze_sentiment(request, options) + + def analyze_entities(self, document, encoding_type, options=None): + """ + Finds named entities (currently proper names and common nouns) in the text + along with entity types, salience, mentions for each entity, and + other properties. + + Example: + >>> from google.cloud.gapic.language.v1beta2 import language_service_client + >>> from google.cloud.gapic.language.v1beta2 import enums + >>> from google.cloud.proto.language.v1beta2 import language_service_pb2 + >>> client = language_service_client.LanguageServiceClient() + >>> document = language_service_pb2.Document() + >>> encoding_type = enums.EncodingType.NONE + >>> response = client.analyze_entities(document, encoding_type) + + Args: + document (:class:`google.cloud.proto.language.v1beta2.language_service_pb2.Document`): Input document. + encoding_type (enum :class:`google.cloud.gapic.language.v1beta2.enums.EncodingType`): The encoding type used by the API to calculate offsets. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.language.v1beta2.language_service_pb2.AnalyzeEntitiesResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = language_service_pb2.AnalyzeEntitiesRequest( + document=document, encoding_type=encoding_type) + return self._analyze_entities(request, options) + + def analyze_entity_sentiment(self, document, encoding_type, options=None): + """ + Finds entities, similar to ``AnalyzeEntities`` in the text and analyzes + sentiment associated with each entity and its mentions. + + Example: + >>> from google.cloud.gapic.language.v1beta2 import language_service_client + >>> from google.cloud.gapic.language.v1beta2 import enums + >>> from google.cloud.proto.language.v1beta2 import language_service_pb2 + >>> client = language_service_client.LanguageServiceClient() + >>> document = language_service_pb2.Document() + >>> encoding_type = enums.EncodingType.NONE + >>> response = client.analyze_entity_sentiment(document, encoding_type) + + Args: + document (:class:`google.cloud.proto.language.v1beta2.language_service_pb2.Document`): Input document. + encoding_type (enum :class:`google.cloud.gapic.language.v1beta2.enums.EncodingType`): The encoding type used by the API to calculate offsets. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.language.v1beta2.language_service_pb2.AnalyzeEntitySentimentResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = language_service_pb2.AnalyzeEntitySentimentRequest( + document=document, encoding_type=encoding_type) + return self._analyze_entity_sentiment(request, options) + + def analyze_syntax(self, document, encoding_type, options=None): + """ + Analyzes the syntax of the text and provides sentence boundaries and + tokenization along with part of speech tags, dependency trees, and other + properties. + + Example: + >>> from google.cloud.gapic.language.v1beta2 import language_service_client + >>> from google.cloud.gapic.language.v1beta2 import enums + >>> from google.cloud.proto.language.v1beta2 import language_service_pb2 + >>> client = language_service_client.LanguageServiceClient() + >>> document = language_service_pb2.Document() + >>> encoding_type = enums.EncodingType.NONE + >>> response = client.analyze_syntax(document, encoding_type) + + Args: + document (:class:`google.cloud.proto.language.v1beta2.language_service_pb2.Document`): Input document. + encoding_type (enum :class:`google.cloud.gapic.language.v1beta2.enums.EncodingType`): The encoding type used by the API to calculate offsets. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.language.v1beta2.language_service_pb2.AnalyzeSyntaxResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = language_service_pb2.AnalyzeSyntaxRequest( + document=document, encoding_type=encoding_type) + return self._analyze_syntax(request, options) + + def annotate_text(self, document, features, encoding_type, options=None): + """ + A convenience method that provides all syntax, sentiment, and entity + features in one call. + + Example: + >>> from google.cloud.gapic.language.v1beta2 import language_service_client + >>> from google.cloud.gapic.language.v1beta2 import enums + >>> from google.cloud.proto.language.v1beta2 import language_service_pb2 + >>> client = language_service_client.LanguageServiceClient() + >>> document = language_service_pb2.Document() + >>> features = language_service_pb2.AnnotateTextRequest.Features() + >>> encoding_type = enums.EncodingType.NONE + >>> response = client.annotate_text(document, features, encoding_type) + + Args: + document (:class:`google.cloud.proto.language.v1beta2.language_service_pb2.Document`): Input document. + features (:class:`google.cloud.proto.language.v1beta2.language_service_pb2.AnnotateTextRequest.Features`): The enabled features. + encoding_type (enum :class:`google.cloud.gapic.language.v1beta2.enums.EncodingType`): The encoding type used by the API to calculate offsets. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.language.v1beta2.language_service_pb2.AnnotateTextResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = language_service_pb2.AnnotateTextRequest( + document=document, features=features, encoding_type=encoding_type) + return self._annotate_text(request, options) diff --git a/language/google/cloud/gapic/language/v1beta2/language_service_client_config.json b/language/google/cloud/gapic/language/v1beta2/language_service_client_config.json new file mode 100644 index 000000000000..8018f8a7bbf5 --- /dev/null +++ b/language/google/cloud/gapic/language/v1beta2/language_service_client_config.json @@ -0,0 +1,51 @@ +{ + "interfaces": { + "google.cloud.language.v1beta2.LanguageService": { + "retry_codes": { + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "non_idempotent": [] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "AnalyzeSentiment": { + "timeout_millis": 30000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "AnalyzeEntities": { + "timeout_millis": 30000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "AnalyzeEntitySentiment": { + "timeout_millis": 30000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "AnalyzeSyntax": { + "timeout_millis": 30000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "AnnotateText": { + "timeout_millis": 30000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/language/google/cloud/language/__init__.py b/language/google/cloud/language/__init__.py index 8cc584b17cb8..8bba28ead739 100644 --- a/language/google/cloud/language/__init__.py +++ b/language/google/cloud/language/__init__.py @@ -12,14 +12,49 @@ # See the License for the specific language governing permissions and # limitations under the License. +# ----------------------------------------------------------------------------- +# TRANSITION CODE +# ----------------------------------------------------------------------------- +# The old Language manual layer is now deprecated, but to allow +# users the time to move from the manual layer to the mostly auto-generated +# layer, they are both living side by side for a few months. +# +# Instantiating the old manual layer (`google.cloud.language.Client`) will +# issue a DeprecationWarning. +# +# When it comes time to remove the old layer, everything in this directory +# should go away EXCEPT __init__.py (which can be renamed to language.py and +# put one directory above). +# +# Additionally, the import and export of `Client`, `Document`, and `Encoding` +# should be removed from this file (along with this note), and the rest should +# be left intact. +# ----------------------------------------------------------------------------- + """Client library for Google Cloud Natural Language API.""" +from __future__ import absolute_import from pkg_resources import get_distribution __version__ = get_distribution('google-cloud-language').version +from google.cloud.language_v1 import * # noqa + from google.cloud.language.client import Client from google.cloud.language.document import Document from google.cloud.language.document import Encoding -__all__ = ['Client', 'Document', 'Encoding', '__version__'] +__all__ = ( + # Common + '__version__', + + # Manual Layer + 'Client', + 'Document', + 'Encoding', + + # Auto-gen + 'enums', + 'LanguageServiceClient', + 'types', +) diff --git a/language/google/cloud/language/client.py b/language/google/cloud/language/client.py index da6ea90c156b..58066443c844 100644 --- a/language/google/cloud/language/client.py +++ b/language/google/cloud/language/client.py @@ -52,6 +52,16 @@ class Client(client_module.Client): } def __init__(self, credentials=None, api_version='v1', _http=None): + + # Add a deprecation warning for this class. + warnings.warn( + 'This client class and objects that derive from it have been ' + 'deprecated. Use `google.cloud.language.LanguageServiceClient` ' + '(provided by this package) instead. This client will be removed ' + 'in a future release.', + DeprecationWarning, + ) + super(Client, self).__init__( credentials=credentials, _http=_http) ConnectionClass = self._CONNECTION_CLASSES[api_version] diff --git a/language/google/cloud/language_v1/__init__.py b/language/google/cloud/language_v1/__init__.py new file mode 100644 index 000000000000..a5666eadb5c7 --- /dev/null +++ b/language/google/cloud/language_v1/__init__.py @@ -0,0 +1,30 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from google.cloud.gapic.language.v1 import language_service_client as lsc +from google.cloud.gapic.language.v1 import enums + +from google.cloud.language_v1 import types + + +LanguageServiceClient = lsc.LanguageServiceClient + + +__all__ = ( + 'enums', + 'LanguageServiceClient', + 'types', +) diff --git a/language/google/cloud/language_v1/types.py b/language/google/cloud/language_v1/types.py new file mode 100644 index 000000000000..6223f6846e09 --- /dev/null +++ b/language/google/cloud/language_v1/types.py @@ -0,0 +1,30 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import +import sys + +from google.cloud.proto.language.v1 import language_service_pb2 + +from google.gax.utils.messages import get_messages + + +names = [] +for name, message in get_messages(language_service_pb2).items(): + message.__module__ = 'google.cloud.language_v1.types' + setattr(sys.modules[__name__], name, message) + names.append(name) + + +__all__ = tuple(sorted(names)) diff --git a/language/google/cloud/language_v1beta2/__init__.py b/language/google/cloud/language_v1beta2/__init__.py new file mode 100644 index 000000000000..e0a3e4cc287a --- /dev/null +++ b/language/google/cloud/language_v1beta2/__init__.py @@ -0,0 +1,30 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from google.cloud.gapic.language.v1beta2 import language_service_client as lsc +from google.cloud.gapic.language.v1beta2 import enums + +from google.cloud.language_v1beta2 import types + + +LanguageServiceClient = lsc.LanguageServiceClient + + +__all__ = ( + 'enums', + 'LanguageServiceClient', + 'types', +) diff --git a/language/google/cloud/language_v1beta2/types.py b/language/google/cloud/language_v1beta2/types.py new file mode 100644 index 000000000000..557d05aeb001 --- /dev/null +++ b/language/google/cloud/language_v1beta2/types.py @@ -0,0 +1,30 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import +import sys + +from google.cloud.proto.language.v1beta2 import language_service_pb2 + +from google.gax.utils.messages import get_messages + + +names = [] +for name, message in get_messages(language_service_pb2).items(): + message.__module__ = 'google.cloud.language_v1beta2.types' + setattr(sys.modules[__name__], name, message) + names.append(name) + + +__all__ = tuple(sorted(names)) diff --git a/language/google/cloud/proto/__init__.py b/language/google/cloud/proto/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/language/google/cloud/proto/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/language/google/cloud/proto/language/__init__.py b/language/google/cloud/proto/language/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/language/google/cloud/proto/language/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/language/google/cloud/proto/language/v1/__init__.py b/language/google/cloud/proto/language/v1/__init__.py new file mode 100644 index 000000000000..8b137891791f --- /dev/null +++ b/language/google/cloud/proto/language/v1/__init__.py @@ -0,0 +1 @@ + diff --git a/language/google/cloud/proto/language/v1/language_service_pb2.py b/language/google/cloud/proto/language/v1/language_service_pb2.py new file mode 100644 index 000000000000..98d59f56272c --- /dev/null +++ b/language/google/cloud/proto/language/v1/language_service_pb2.py @@ -0,0 +1,2647 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/proto/language/v1/language_service.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/cloud/proto/language/v1/language_service.proto', + package='google.cloud.language.v1', + syntax='proto3', + serialized_pb=_b('\n5google/cloud/proto/language/v1/language_service.proto\x12\x18google.cloud.language.v1\x1a\x1cgoogle/api/annotations.proto\"\xc3\x01\n\x08\x44ocument\x12\x35\n\x04type\x18\x01 \x01(\x0e\x32\'.google.cloud.language.v1.Document.Type\x12\x11\n\x07\x63ontent\x18\x02 \x01(\tH\x00\x12\x19\n\x0fgcs_content_uri\x18\x03 \x01(\tH\x00\x12\x10\n\x08language\x18\x04 \x01(\t\"6\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nPLAIN_TEXT\x10\x01\x12\x08\n\x04HTML\x10\x02\x42\x08\n\x06source\"t\n\x08Sentence\x12\x30\n\x04text\x18\x01 \x01(\x0b\x32\".google.cloud.language.v1.TextSpan\x12\x36\n\tsentiment\x18\x02 \x01(\x0b\x32#.google.cloud.language.v1.Sentiment\"\x86\x03\n\x06\x45ntity\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x04type\x18\x02 \x01(\x0e\x32%.google.cloud.language.v1.Entity.Type\x12@\n\x08metadata\x18\x03 \x03(\x0b\x32..google.cloud.language.v1.Entity.MetadataEntry\x12\x10\n\x08salience\x18\x04 \x01(\x02\x12\x39\n\x08mentions\x18\x05 \x03(\x0b\x32\'.google.cloud.language.v1.EntityMention\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"y\n\x04Type\x12\x0b\n\x07UNKNOWN\x10\x00\x12\n\n\x06PERSON\x10\x01\x12\x0c\n\x08LOCATION\x10\x02\x12\x10\n\x0cORGANIZATION\x10\x03\x12\t\n\x05\x45VENT\x10\x04\x12\x0f\n\x0bWORK_OF_ART\x10\x05\x12\x11\n\rCONSUMER_GOOD\x10\x06\x12\t\n\x05OTHER\x10\x07\"\xcb\x01\n\x05Token\x12\x30\n\x04text\x18\x01 \x01(\x0b\x32\".google.cloud.language.v1.TextSpan\x12>\n\x0epart_of_speech\x18\x02 \x01(\x0b\x32&.google.cloud.language.v1.PartOfSpeech\x12\x41\n\x0f\x64\x65pendency_edge\x18\x03 \x01(\x0b\x32(.google.cloud.language.v1.DependencyEdge\x12\r\n\x05lemma\x18\x04 \x01(\t\"-\n\tSentiment\x12\x11\n\tmagnitude\x18\x02 \x01(\x02\x12\r\n\x05score\x18\x03 \x01(\x02\"\xa3\x10\n\x0cPartOfSpeech\x12\x37\n\x03tag\x18\x01 \x01(\x0e\x32*.google.cloud.language.v1.PartOfSpeech.Tag\x12=\n\x06\x61spect\x18\x02 \x01(\x0e\x32-.google.cloud.language.v1.PartOfSpeech.Aspect\x12\x39\n\x04\x63\x61se\x18\x03 \x01(\x0e\x32+.google.cloud.language.v1.PartOfSpeech.Case\x12\x39\n\x04\x66orm\x18\x04 \x01(\x0e\x32+.google.cloud.language.v1.PartOfSpeech.Form\x12=\n\x06gender\x18\x05 \x01(\x0e\x32-.google.cloud.language.v1.PartOfSpeech.Gender\x12\x39\n\x04mood\x18\x06 \x01(\x0e\x32+.google.cloud.language.v1.PartOfSpeech.Mood\x12=\n\x06number\x18\x07 \x01(\x0e\x32-.google.cloud.language.v1.PartOfSpeech.Number\x12=\n\x06person\x18\x08 \x01(\x0e\x32-.google.cloud.language.v1.PartOfSpeech.Person\x12=\n\x06proper\x18\t \x01(\x0e\x32-.google.cloud.language.v1.PartOfSpeech.Proper\x12G\n\x0breciprocity\x18\n \x01(\x0e\x32\x32.google.cloud.language.v1.PartOfSpeech.Reciprocity\x12;\n\x05tense\x18\x0b \x01(\x0e\x32,.google.cloud.language.v1.PartOfSpeech.Tense\x12;\n\x05voice\x18\x0c \x01(\x0e\x32,.google.cloud.language.v1.PartOfSpeech.Voice\"\x8d\x01\n\x03Tag\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x07\n\x03\x41\x44J\x10\x01\x12\x07\n\x03\x41\x44P\x10\x02\x12\x07\n\x03\x41\x44V\x10\x03\x12\x08\n\x04\x43ONJ\x10\x04\x12\x07\n\x03\x44\x45T\x10\x05\x12\x08\n\x04NOUN\x10\x06\x12\x07\n\x03NUM\x10\x07\x12\x08\n\x04PRON\x10\x08\x12\x07\n\x03PRT\x10\t\x12\t\n\x05PUNCT\x10\n\x12\x08\n\x04VERB\x10\x0b\x12\x05\n\x01X\x10\x0c\x12\t\n\x05\x41\x46\x46IX\x10\r\"O\n\x06\x41spect\x12\x12\n\x0e\x41SPECT_UNKNOWN\x10\x00\x12\x0e\n\nPERFECTIVE\x10\x01\x12\x10\n\x0cIMPERFECTIVE\x10\x02\x12\x0f\n\x0bPROGRESSIVE\x10\x03\"\xf8\x01\n\x04\x43\x61se\x12\x10\n\x0c\x43\x41SE_UNKNOWN\x10\x00\x12\x0e\n\nACCUSATIVE\x10\x01\x12\r\n\tADVERBIAL\x10\x02\x12\x11\n\rCOMPLEMENTIVE\x10\x03\x12\n\n\x06\x44\x41TIVE\x10\x04\x12\x0c\n\x08GENITIVE\x10\x05\x12\x10\n\x0cINSTRUMENTAL\x10\x06\x12\x0c\n\x08LOCATIVE\x10\x07\x12\x0e\n\nNOMINATIVE\x10\x08\x12\x0b\n\x07OBLIQUE\x10\t\x12\r\n\tPARTITIVE\x10\n\x12\x11\n\rPREPOSITIONAL\x10\x0b\x12\x12\n\x0eREFLEXIVE_CASE\x10\x0c\x12\x11\n\rRELATIVE_CASE\x10\r\x12\x0c\n\x08VOCATIVE\x10\x0e\"\xaf\x01\n\x04\x46orm\x12\x10\n\x0c\x46ORM_UNKNOWN\x10\x00\x12\x0c\n\x08\x41\x44NOMIAL\x10\x01\x12\r\n\tAUXILIARY\x10\x02\x12\x12\n\x0e\x43OMPLEMENTIZER\x10\x03\x12\x10\n\x0c\x46INAL_ENDING\x10\x04\x12\n\n\x06GERUND\x10\x05\x12\n\n\x06REALIS\x10\x06\x12\x0c\n\x08IRREALIS\x10\x07\x12\t\n\x05SHORT\x10\x08\x12\x08\n\x04LONG\x10\t\x12\t\n\x05ORDER\x10\n\x12\x0c\n\x08SPECIFIC\x10\x0b\"E\n\x06Gender\x12\x12\n\x0eGENDER_UNKNOWN\x10\x00\x12\x0c\n\x08\x46\x45MININE\x10\x01\x12\r\n\tMASCULINE\x10\x02\x12\n\n\x06NEUTER\x10\x03\"\x7f\n\x04Mood\x12\x10\n\x0cMOOD_UNKNOWN\x10\x00\x12\x14\n\x10\x43ONDITIONAL_MOOD\x10\x01\x12\x0e\n\nIMPERATIVE\x10\x02\x12\x0e\n\nINDICATIVE\x10\x03\x12\x11\n\rINTERROGATIVE\x10\x04\x12\x0b\n\x07JUSSIVE\x10\x05\x12\x0f\n\x0bSUBJUNCTIVE\x10\x06\"@\n\x06Number\x12\x12\n\x0eNUMBER_UNKNOWN\x10\x00\x12\x0c\n\x08SINGULAR\x10\x01\x12\n\n\x06PLURAL\x10\x02\x12\x08\n\x04\x44UAL\x10\x03\"T\n\x06Person\x12\x12\n\x0ePERSON_UNKNOWN\x10\x00\x12\t\n\x05\x46IRST\x10\x01\x12\n\n\x06SECOND\x10\x02\x12\t\n\x05THIRD\x10\x03\x12\x14\n\x10REFLEXIVE_PERSON\x10\x04\"8\n\x06Proper\x12\x12\n\x0ePROPER_UNKNOWN\x10\x00\x12\n\n\x06PROPER\x10\x01\x12\x0e\n\nNOT_PROPER\x10\x02\"J\n\x0bReciprocity\x12\x17\n\x13RECIPROCITY_UNKNOWN\x10\x00\x12\x0e\n\nRECIPROCAL\x10\x01\x12\x12\n\x0eNON_RECIPROCAL\x10\x02\"s\n\x05Tense\x12\x11\n\rTENSE_UNKNOWN\x10\x00\x12\x15\n\x11\x43ONDITIONAL_TENSE\x10\x01\x12\n\n\x06\x46UTURE\x10\x02\x12\x08\n\x04PAST\x10\x03\x12\x0b\n\x07PRESENT\x10\x04\x12\r\n\tIMPERFECT\x10\x05\x12\x0e\n\nPLUPERFECT\x10\x06\"B\n\x05Voice\x12\x11\n\rVOICE_UNKNOWN\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\r\n\tCAUSATIVE\x10\x02\x12\x0b\n\x07PASSIVE\x10\x03\"\xd8\x07\n\x0e\x44\x65pendencyEdge\x12\x18\n\x10head_token_index\x18\x01 \x01(\x05\x12=\n\x05label\x18\x02 \x01(\x0e\x32..google.cloud.language.v1.DependencyEdge.Label\"\xec\x06\n\x05Label\x12\x0b\n\x07UNKNOWN\x10\x00\x12\n\n\x06\x41\x42\x42REV\x10\x01\x12\t\n\x05\x41\x43OMP\x10\x02\x12\t\n\x05\x41\x44VCL\x10\x03\x12\n\n\x06\x41\x44VMOD\x10\x04\x12\x08\n\x04\x41MOD\x10\x05\x12\t\n\x05\x41PPOS\x10\x06\x12\x08\n\x04\x41TTR\x10\x07\x12\x07\n\x03\x41UX\x10\x08\x12\x0b\n\x07\x41UXPASS\x10\t\x12\x06\n\x02\x43\x43\x10\n\x12\t\n\x05\x43\x43OMP\x10\x0b\x12\x08\n\x04\x43ONJ\x10\x0c\x12\t\n\x05\x43SUBJ\x10\r\x12\r\n\tCSUBJPASS\x10\x0e\x12\x07\n\x03\x44\x45P\x10\x0f\x12\x07\n\x03\x44\x45T\x10\x10\x12\r\n\tDISCOURSE\x10\x11\x12\x08\n\x04\x44OBJ\x10\x12\x12\x08\n\x04\x45XPL\x10\x13\x12\x0c\n\x08GOESWITH\x10\x14\x12\x08\n\x04IOBJ\x10\x15\x12\x08\n\x04MARK\x10\x16\x12\x07\n\x03MWE\x10\x17\x12\x07\n\x03MWV\x10\x18\x12\x07\n\x03NEG\x10\x19\x12\x06\n\x02NN\x10\x1a\x12\x0c\n\x08NPADVMOD\x10\x1b\x12\t\n\x05NSUBJ\x10\x1c\x12\r\n\tNSUBJPASS\x10\x1d\x12\x07\n\x03NUM\x10\x1e\x12\n\n\x06NUMBER\x10\x1f\x12\x05\n\x01P\x10 \x12\r\n\tPARATAXIS\x10!\x12\x0b\n\x07PARTMOD\x10\"\x12\t\n\x05PCOMP\x10#\x12\x08\n\x04POBJ\x10$\x12\x08\n\x04POSS\x10%\x12\x0b\n\x07POSTNEG\x10&\x12\x0b\n\x07PRECOMP\x10\'\x12\x0b\n\x07PRECONJ\x10(\x12\n\n\x06PREDET\x10)\x12\x08\n\x04PREF\x10*\x12\x08\n\x04PREP\x10+\x12\t\n\x05PRONL\x10,\x12\x07\n\x03PRT\x10-\x12\x06\n\x02PS\x10.\x12\x0c\n\x08QUANTMOD\x10/\x12\t\n\x05RCMOD\x10\x30\x12\x0c\n\x08RCMODREL\x10\x31\x12\t\n\x05RDROP\x10\x32\x12\x07\n\x03REF\x10\x33\x12\x0b\n\x07REMNANT\x10\x34\x12\x0e\n\nREPARANDUM\x10\x35\x12\x08\n\x04ROOT\x10\x36\x12\x08\n\x04SNUM\x10\x37\x12\x08\n\x04SUFF\x10\x38\x12\x08\n\x04TMOD\x10\x39\x12\t\n\x05TOPIC\x10:\x12\x08\n\x04VMOD\x10;\x12\x0c\n\x08VOCATIVE\x10<\x12\t\n\x05XCOMP\x10=\x12\n\n\x06SUFFIX\x10>\x12\t\n\x05TITLE\x10?\x12\x0c\n\x08\x41\x44VPHMOD\x10@\x12\x0b\n\x07\x41UXCAUS\x10\x41\x12\t\n\x05\x41UXVV\x10\x42\x12\t\n\x05\x44TMOD\x10\x43\x12\x0b\n\x07\x46OREIGN\x10\x44\x12\x06\n\x02KW\x10\x45\x12\x08\n\x04LIST\x10\x46\x12\x08\n\x04NOMC\x10G\x12\x0c\n\x08NOMCSUBJ\x10H\x12\x10\n\x0cNOMCSUBJPASS\x10I\x12\x08\n\x04NUMC\x10J\x12\x07\n\x03\x43OP\x10K\x12\x0e\n\nDISLOCATED\x10L\"\xaf\x01\n\rEntityMention\x12\x30\n\x04text\x18\x01 \x01(\x0b\x32\".google.cloud.language.v1.TextSpan\x12:\n\x04type\x18\x02 \x01(\x0e\x32,.google.cloud.language.v1.EntityMention.Type\"0\n\x04Type\x12\x10\n\x0cTYPE_UNKNOWN\x10\x00\x12\n\n\x06PROPER\x10\x01\x12\n\n\x06\x43OMMON\x10\x02\"1\n\x08TextSpan\x12\x0f\n\x07\x63ontent\x18\x01 \x01(\t\x12\x14\n\x0c\x62\x65gin_offset\x18\x02 \x01(\x05\"\x8e\x01\n\x17\x41nalyzeSentimentRequest\x12\x34\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\".google.cloud.language.v1.Document\x12=\n\rencoding_type\x18\x02 \x01(\x0e\x32&.google.cloud.language.v1.EncodingType\"\xa4\x01\n\x18\x41nalyzeSentimentResponse\x12?\n\x12\x64ocument_sentiment\x18\x01 \x01(\x0b\x32#.google.cloud.language.v1.Sentiment\x12\x10\n\x08language\x18\x02 \x01(\t\x12\x35\n\tsentences\x18\x03 \x03(\x0b\x32\".google.cloud.language.v1.Sentence\"\x8d\x01\n\x16\x41nalyzeEntitiesRequest\x12\x34\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\".google.cloud.language.v1.Document\x12=\n\rencoding_type\x18\x02 \x01(\x0e\x32&.google.cloud.language.v1.EncodingType\"_\n\x17\x41nalyzeEntitiesResponse\x12\x32\n\x08\x65ntities\x18\x01 \x03(\x0b\x32 .google.cloud.language.v1.Entity\x12\x10\n\x08language\x18\x02 \x01(\t\"\x8b\x01\n\x14\x41nalyzeSyntaxRequest\x12\x34\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\".google.cloud.language.v1.Document\x12=\n\rencoding_type\x18\x02 \x01(\x0e\x32&.google.cloud.language.v1.EncodingType\"\x91\x01\n\x15\x41nalyzeSyntaxResponse\x12\x35\n\tsentences\x18\x01 \x03(\x0b\x32\".google.cloud.language.v1.Sentence\x12/\n\x06tokens\x18\x02 \x03(\x0b\x32\x1f.google.cloud.language.v1.Token\x12\x10\n\x08language\x18\x03 \x01(\t\"\xb6\x02\n\x13\x41nnotateTextRequest\x12\x34\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\".google.cloud.language.v1.Document\x12H\n\x08\x66\x65\x61tures\x18\x02 \x01(\x0b\x32\x36.google.cloud.language.v1.AnnotateTextRequest.Features\x12=\n\rencoding_type\x18\x03 \x01(\x0e\x32&.google.cloud.language.v1.EncodingType\x1a`\n\x08\x46\x65\x61tures\x12\x16\n\x0e\x65xtract_syntax\x18\x01 \x01(\x08\x12\x18\n\x10\x65xtract_entities\x18\x02 \x01(\x08\x12\"\n\x1a\x65xtract_document_sentiment\x18\x03 \x01(\x08\"\x85\x02\n\x14\x41nnotateTextResponse\x12\x35\n\tsentences\x18\x01 \x03(\x0b\x32\".google.cloud.language.v1.Sentence\x12/\n\x06tokens\x18\x02 \x03(\x0b\x32\x1f.google.cloud.language.v1.Token\x12\x32\n\x08\x65ntities\x18\x03 \x03(\x0b\x32 .google.cloud.language.v1.Entity\x12?\n\x12\x64ocument_sentiment\x18\x04 \x01(\x0b\x32#.google.cloud.language.v1.Sentiment\x12\x10\n\x08language\x18\x05 \x01(\t*8\n\x0c\x45ncodingType\x12\x08\n\x04NONE\x10\x00\x12\x08\n\x04UTF8\x10\x01\x12\t\n\x05UTF16\x10\x02\x12\t\n\x05UTF32\x10\x03\x32\x8d\x05\n\x0fLanguageService\x12\xa4\x01\n\x10\x41nalyzeSentiment\x12\x31.google.cloud.language.v1.AnalyzeSentimentRequest\x1a\x32.google.cloud.language.v1.AnalyzeSentimentResponse\")\x82\xd3\xe4\x93\x02#\"\x1e/v1/documents:analyzeSentiment:\x01*\x12\xa0\x01\n\x0f\x41nalyzeEntities\x12\x30.google.cloud.language.v1.AnalyzeEntitiesRequest\x1a\x31.google.cloud.language.v1.AnalyzeEntitiesResponse\"(\x82\xd3\xe4\x93\x02\"\"\x1d/v1/documents:analyzeEntities:\x01*\x12\x98\x01\n\rAnalyzeSyntax\x12..google.cloud.language.v1.AnalyzeSyntaxRequest\x1a/.google.cloud.language.v1.AnalyzeSyntaxResponse\"&\x82\xd3\xe4\x93\x02 \"\x1b/v1/documents:analyzeSyntax:\x01*\x12\x94\x01\n\x0c\x41nnotateText\x12-.google.cloud.language.v1.AnnotateTextRequest\x1a..google.cloud.language.v1.AnnotateTextResponse\"%\x82\xd3\xe4\x93\x02\x1f\"\x1a/v1/documents:annotateText:\x01*Bx\n\x1c\x63om.google.cloud.language.v1B\x14LanguageServiceProtoP\x01Z@google.golang.org/genproto/googleapis/cloud/language/v1;languageb\x06proto3') + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +_ENCODINGTYPE = _descriptor.EnumDescriptor( + name='EncodingType', + full_name='google.cloud.language.v1.EncodingType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='NONE', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='UTF8', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='UTF16', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='UTF32', index=3, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=5797, + serialized_end=5853, +) +_sym_db.RegisterEnumDescriptor(_ENCODINGTYPE) + +EncodingType = enum_type_wrapper.EnumTypeWrapper(_ENCODINGTYPE) +NONE = 0 +UTF8 = 1 +UTF16 = 2 +UTF32 = 3 + + +_DOCUMENT_TYPE = _descriptor.EnumDescriptor( + name='Type', + full_name='google.cloud.language.v1.Document.Type', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='TYPE_UNSPECIFIED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PLAIN_TEXT', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='HTML', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=245, + serialized_end=299, +) +_sym_db.RegisterEnumDescriptor(_DOCUMENT_TYPE) + +_ENTITY_TYPE = _descriptor.EnumDescriptor( + name='Type', + full_name='google.cloud.language.v1.Entity.Type', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PERSON', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='LOCATION', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ORGANIZATION', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='EVENT', index=4, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='WORK_OF_ART', index=5, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CONSUMER_GOOD', index=6, number=6, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='OTHER', index=7, number=7, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=699, + serialized_end=820, +) +_sym_db.RegisterEnumDescriptor(_ENTITY_TYPE) + +_PARTOFSPEECH_TAG = _descriptor.EnumDescriptor( + name='Tag', + full_name='google.cloud.language.v1.PartOfSpeech.Tag', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADJ', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADP', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADV', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CONJ', index=4, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DET', index=5, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NOUN', index=6, number=6, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NUM', index=7, number=7, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PRON', index=8, number=8, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PRT', index=9, number=9, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PUNCT', index=10, number=10, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='VERB', index=11, number=11, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='X', index=12, number=12, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AFFIX', index=13, number=13, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=1837, + serialized_end=1978, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_TAG) + +_PARTOFSPEECH_ASPECT = _descriptor.EnumDescriptor( + name='Aspect', + full_name='google.cloud.language.v1.PartOfSpeech.Aspect', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='ASPECT_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PERFECTIVE', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='IMPERFECTIVE', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PROGRESSIVE', index=3, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=1980, + serialized_end=2059, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_ASPECT) + +_PARTOFSPEECH_CASE = _descriptor.EnumDescriptor( + name='Case', + full_name='google.cloud.language.v1.PartOfSpeech.Case', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='CASE_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ACCUSATIVE', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADVERBIAL', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='COMPLEMENTIVE', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DATIVE', index=4, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='GENITIVE', index=5, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='INSTRUMENTAL', index=6, number=6, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='LOCATIVE', index=7, number=7, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NOMINATIVE', index=8, number=8, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='OBLIQUE', index=9, number=9, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PARTITIVE', index=10, number=10, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PREPOSITIONAL', index=11, number=11, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='REFLEXIVE_CASE', index=12, number=12, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='RELATIVE_CASE', index=13, number=13, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='VOCATIVE', index=14, number=14, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2062, + serialized_end=2310, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_CASE) + +_PARTOFSPEECH_FORM = _descriptor.EnumDescriptor( + name='Form', + full_name='google.cloud.language.v1.PartOfSpeech.Form', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='FORM_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADNOMIAL', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AUXILIARY', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='COMPLEMENTIZER', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FINAL_ENDING', index=4, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='GERUND', index=5, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='REALIS', index=6, number=6, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='IRREALIS', index=7, number=7, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SHORT', index=8, number=8, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='LONG', index=9, number=9, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ORDER', index=10, number=10, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SPECIFIC', index=11, number=11, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2313, + serialized_end=2488, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_FORM) + +_PARTOFSPEECH_GENDER = _descriptor.EnumDescriptor( + name='Gender', + full_name='google.cloud.language.v1.PartOfSpeech.Gender', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='GENDER_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FEMININE', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MASCULINE', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NEUTER', index=3, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2490, + serialized_end=2559, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_GENDER) + +_PARTOFSPEECH_MOOD = _descriptor.EnumDescriptor( + name='Mood', + full_name='google.cloud.language.v1.PartOfSpeech.Mood', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='MOOD_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CONDITIONAL_MOOD', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='IMPERATIVE', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='INDICATIVE', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='INTERROGATIVE', index=4, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='JUSSIVE', index=5, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SUBJUNCTIVE', index=6, number=6, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2561, + serialized_end=2688, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_MOOD) + +_PARTOFSPEECH_NUMBER = _descriptor.EnumDescriptor( + name='Number', + full_name='google.cloud.language.v1.PartOfSpeech.Number', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='NUMBER_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SINGULAR', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PLURAL', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DUAL', index=3, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2690, + serialized_end=2754, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_NUMBER) + +_PARTOFSPEECH_PERSON = _descriptor.EnumDescriptor( + name='Person', + full_name='google.cloud.language.v1.PartOfSpeech.Person', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='PERSON_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FIRST', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SECOND', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='THIRD', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='REFLEXIVE_PERSON', index=4, number=4, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2756, + serialized_end=2840, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_PERSON) + +_PARTOFSPEECH_PROPER = _descriptor.EnumDescriptor( + name='Proper', + full_name='google.cloud.language.v1.PartOfSpeech.Proper', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='PROPER_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PROPER', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NOT_PROPER', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2842, + serialized_end=2898, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_PROPER) + +_PARTOFSPEECH_RECIPROCITY = _descriptor.EnumDescriptor( + name='Reciprocity', + full_name='google.cloud.language.v1.PartOfSpeech.Reciprocity', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='RECIPROCITY_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='RECIPROCAL', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NON_RECIPROCAL', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2900, + serialized_end=2974, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_RECIPROCITY) + +_PARTOFSPEECH_TENSE = _descriptor.EnumDescriptor( + name='Tense', + full_name='google.cloud.language.v1.PartOfSpeech.Tense', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='TENSE_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CONDITIONAL_TENSE', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FUTURE', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PAST', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PRESENT', index=4, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='IMPERFECT', index=5, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PLUPERFECT', index=6, number=6, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2976, + serialized_end=3091, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_TENSE) + +_PARTOFSPEECH_VOICE = _descriptor.EnumDescriptor( + name='Voice', + full_name='google.cloud.language.v1.PartOfSpeech.Voice', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='VOICE_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ACTIVE', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CAUSATIVE', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PASSIVE', index=3, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=3093, + serialized_end=3159, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_VOICE) + +_DEPENDENCYEDGE_LABEL = _descriptor.EnumDescriptor( + name='Label', + full_name='google.cloud.language.v1.DependencyEdge.Label', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ABBREV', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ACOMP', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADVCL', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADVMOD', index=4, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AMOD', index=5, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='APPOS', index=6, number=6, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ATTR', index=7, number=7, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AUX', index=8, number=8, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AUXPASS', index=9, number=9, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CC', index=10, number=10, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CCOMP', index=11, number=11, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CONJ', index=12, number=12, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CSUBJ', index=13, number=13, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CSUBJPASS', index=14, number=14, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DEP', index=15, number=15, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DET', index=16, number=16, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DISCOURSE', index=17, number=17, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DOBJ', index=18, number=18, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='EXPL', index=19, number=19, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='GOESWITH', index=20, number=20, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='IOBJ', index=21, number=21, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MARK', index=22, number=22, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MWE', index=23, number=23, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MWV', index=24, number=24, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NEG', index=25, number=25, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NN', index=26, number=26, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NPADVMOD', index=27, number=27, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NSUBJ', index=28, number=28, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NSUBJPASS', index=29, number=29, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NUM', index=30, number=30, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NUMBER', index=31, number=31, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='P', index=32, number=32, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PARATAXIS', index=33, number=33, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PARTMOD', index=34, number=34, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PCOMP', index=35, number=35, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='POBJ', index=36, number=36, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='POSS', index=37, number=37, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='POSTNEG', index=38, number=38, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PRECOMP', index=39, number=39, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PRECONJ', index=40, number=40, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PREDET', index=41, number=41, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PREF', index=42, number=42, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PREP', index=43, number=43, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PRONL', index=44, number=44, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PRT', index=45, number=45, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PS', index=46, number=46, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='QUANTMOD', index=47, number=47, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='RCMOD', index=48, number=48, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='RCMODREL', index=49, number=49, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='RDROP', index=50, number=50, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='REF', index=51, number=51, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='REMNANT', index=52, number=52, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='REPARANDUM', index=53, number=53, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ROOT', index=54, number=54, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SNUM', index=55, number=55, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SUFF', index=56, number=56, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TMOD', index=57, number=57, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TOPIC', index=58, number=58, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='VMOD', index=59, number=59, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='VOCATIVE', index=60, number=60, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='XCOMP', index=61, number=61, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SUFFIX', index=62, number=62, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TITLE', index=63, number=63, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADVPHMOD', index=64, number=64, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AUXCAUS', index=65, number=65, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AUXVV', index=66, number=66, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DTMOD', index=67, number=67, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FOREIGN', index=68, number=68, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='KW', index=69, number=69, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='LIST', index=70, number=70, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NOMC', index=71, number=71, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NOMCSUBJ', index=72, number=72, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NOMCSUBJPASS', index=73, number=73, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NUMC', index=74, number=74, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='COP', index=75, number=75, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DISLOCATED', index=76, number=76, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=3270, + serialized_end=4146, +) +_sym_db.RegisterEnumDescriptor(_DEPENDENCYEDGE_LABEL) + +_ENTITYMENTION_TYPE = _descriptor.EnumDescriptor( + name='Type', + full_name='google.cloud.language.v1.EntityMention.Type', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='TYPE_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PROPER', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='COMMON', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=4276, + serialized_end=4324, +) +_sym_db.RegisterEnumDescriptor(_ENTITYMENTION_TYPE) + + +_DOCUMENT = _descriptor.Descriptor( + name='Document', + full_name='google.cloud.language.v1.Document', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='type', full_name='google.cloud.language.v1.Document.type', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='content', full_name='google.cloud.language.v1.Document.content', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='gcs_content_uri', full_name='google.cloud.language.v1.Document.gcs_content_uri', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='language', full_name='google.cloud.language.v1.Document.language', index=3, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _DOCUMENT_TYPE, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='source', full_name='google.cloud.language.v1.Document.source', + index=0, containing_type=None, fields=[]), + ], + serialized_start=114, + serialized_end=309, +) + + +_SENTENCE = _descriptor.Descriptor( + name='Sentence', + full_name='google.cloud.language.v1.Sentence', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='text', full_name='google.cloud.language.v1.Sentence.text', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sentiment', full_name='google.cloud.language.v1.Sentence.sentiment', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=311, + serialized_end=427, +) + + +_ENTITY_METADATAENTRY = _descriptor.Descriptor( + name='MetadataEntry', + full_name='google.cloud.language.v1.Entity.MetadataEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.cloud.language.v1.Entity.MetadataEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.cloud.language.v1.Entity.MetadataEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=650, + serialized_end=697, +) + +_ENTITY = _descriptor.Descriptor( + name='Entity', + full_name='google.cloud.language.v1.Entity', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.cloud.language.v1.Entity.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='google.cloud.language.v1.Entity.type', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='metadata', full_name='google.cloud.language.v1.Entity.metadata', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='salience', full_name='google.cloud.language.v1.Entity.salience', index=3, + number=4, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='mentions', full_name='google.cloud.language.v1.Entity.mentions', index=4, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_ENTITY_METADATAENTRY, ], + enum_types=[ + _ENTITY_TYPE, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=430, + serialized_end=820, +) + + +_TOKEN = _descriptor.Descriptor( + name='Token', + full_name='google.cloud.language.v1.Token', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='text', full_name='google.cloud.language.v1.Token.text', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='part_of_speech', full_name='google.cloud.language.v1.Token.part_of_speech', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dependency_edge', full_name='google.cloud.language.v1.Token.dependency_edge', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='lemma', full_name='google.cloud.language.v1.Token.lemma', index=3, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=823, + serialized_end=1026, +) + + +_SENTIMENT = _descriptor.Descriptor( + name='Sentiment', + full_name='google.cloud.language.v1.Sentiment', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='magnitude', full_name='google.cloud.language.v1.Sentiment.magnitude', index=0, + number=2, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='score', full_name='google.cloud.language.v1.Sentiment.score', index=1, + number=3, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1028, + serialized_end=1073, +) + + +_PARTOFSPEECH = _descriptor.Descriptor( + name='PartOfSpeech', + full_name='google.cloud.language.v1.PartOfSpeech', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='tag', full_name='google.cloud.language.v1.PartOfSpeech.tag', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='aspect', full_name='google.cloud.language.v1.PartOfSpeech.aspect', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='case', full_name='google.cloud.language.v1.PartOfSpeech.case', index=2, + number=3, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='form', full_name='google.cloud.language.v1.PartOfSpeech.form', index=3, + number=4, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='gender', full_name='google.cloud.language.v1.PartOfSpeech.gender', index=4, + number=5, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='mood', full_name='google.cloud.language.v1.PartOfSpeech.mood', index=5, + number=6, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='number', full_name='google.cloud.language.v1.PartOfSpeech.number', index=6, + number=7, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='person', full_name='google.cloud.language.v1.PartOfSpeech.person', index=7, + number=8, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='proper', full_name='google.cloud.language.v1.PartOfSpeech.proper', index=8, + number=9, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='reciprocity', full_name='google.cloud.language.v1.PartOfSpeech.reciprocity', index=9, + number=10, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='tense', full_name='google.cloud.language.v1.PartOfSpeech.tense', index=10, + number=11, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='voice', full_name='google.cloud.language.v1.PartOfSpeech.voice', index=11, + number=12, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _PARTOFSPEECH_TAG, + _PARTOFSPEECH_ASPECT, + _PARTOFSPEECH_CASE, + _PARTOFSPEECH_FORM, + _PARTOFSPEECH_GENDER, + _PARTOFSPEECH_MOOD, + _PARTOFSPEECH_NUMBER, + _PARTOFSPEECH_PERSON, + _PARTOFSPEECH_PROPER, + _PARTOFSPEECH_RECIPROCITY, + _PARTOFSPEECH_TENSE, + _PARTOFSPEECH_VOICE, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1076, + serialized_end=3159, +) + + +_DEPENDENCYEDGE = _descriptor.Descriptor( + name='DependencyEdge', + full_name='google.cloud.language.v1.DependencyEdge', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='head_token_index', full_name='google.cloud.language.v1.DependencyEdge.head_token_index', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='label', full_name='google.cloud.language.v1.DependencyEdge.label', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _DEPENDENCYEDGE_LABEL, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3162, + serialized_end=4146, +) + + +_ENTITYMENTION = _descriptor.Descriptor( + name='EntityMention', + full_name='google.cloud.language.v1.EntityMention', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='text', full_name='google.cloud.language.v1.EntityMention.text', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='google.cloud.language.v1.EntityMention.type', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _ENTITYMENTION_TYPE, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4149, + serialized_end=4324, +) + + +_TEXTSPAN = _descriptor.Descriptor( + name='TextSpan', + full_name='google.cloud.language.v1.TextSpan', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='content', full_name='google.cloud.language.v1.TextSpan.content', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='begin_offset', full_name='google.cloud.language.v1.TextSpan.begin_offset', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4326, + serialized_end=4375, +) + + +_ANALYZESENTIMENTREQUEST = _descriptor.Descriptor( + name='AnalyzeSentimentRequest', + full_name='google.cloud.language.v1.AnalyzeSentimentRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='document', full_name='google.cloud.language.v1.AnalyzeSentimentRequest.document', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='encoding_type', full_name='google.cloud.language.v1.AnalyzeSentimentRequest.encoding_type', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4378, + serialized_end=4520, +) + + +_ANALYZESENTIMENTRESPONSE = _descriptor.Descriptor( + name='AnalyzeSentimentResponse', + full_name='google.cloud.language.v1.AnalyzeSentimentResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='document_sentiment', full_name='google.cloud.language.v1.AnalyzeSentimentResponse.document_sentiment', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='language', full_name='google.cloud.language.v1.AnalyzeSentimentResponse.language', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sentences', full_name='google.cloud.language.v1.AnalyzeSentimentResponse.sentences', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4523, + serialized_end=4687, +) + + +_ANALYZEENTITIESREQUEST = _descriptor.Descriptor( + name='AnalyzeEntitiesRequest', + full_name='google.cloud.language.v1.AnalyzeEntitiesRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='document', full_name='google.cloud.language.v1.AnalyzeEntitiesRequest.document', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='encoding_type', full_name='google.cloud.language.v1.AnalyzeEntitiesRequest.encoding_type', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4690, + serialized_end=4831, +) + + +_ANALYZEENTITIESRESPONSE = _descriptor.Descriptor( + name='AnalyzeEntitiesResponse', + full_name='google.cloud.language.v1.AnalyzeEntitiesResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='entities', full_name='google.cloud.language.v1.AnalyzeEntitiesResponse.entities', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='language', full_name='google.cloud.language.v1.AnalyzeEntitiesResponse.language', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4833, + serialized_end=4928, +) + + +_ANALYZESYNTAXREQUEST = _descriptor.Descriptor( + name='AnalyzeSyntaxRequest', + full_name='google.cloud.language.v1.AnalyzeSyntaxRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='document', full_name='google.cloud.language.v1.AnalyzeSyntaxRequest.document', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='encoding_type', full_name='google.cloud.language.v1.AnalyzeSyntaxRequest.encoding_type', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4931, + serialized_end=5070, +) + + +_ANALYZESYNTAXRESPONSE = _descriptor.Descriptor( + name='AnalyzeSyntaxResponse', + full_name='google.cloud.language.v1.AnalyzeSyntaxResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='sentences', full_name='google.cloud.language.v1.AnalyzeSyntaxResponse.sentences', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='tokens', full_name='google.cloud.language.v1.AnalyzeSyntaxResponse.tokens', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='language', full_name='google.cloud.language.v1.AnalyzeSyntaxResponse.language', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5073, + serialized_end=5218, +) + + +_ANNOTATETEXTREQUEST_FEATURES = _descriptor.Descriptor( + name='Features', + full_name='google.cloud.language.v1.AnnotateTextRequest.Features', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='extract_syntax', full_name='google.cloud.language.v1.AnnotateTextRequest.Features.extract_syntax', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='extract_entities', full_name='google.cloud.language.v1.AnnotateTextRequest.Features.extract_entities', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='extract_document_sentiment', full_name='google.cloud.language.v1.AnnotateTextRequest.Features.extract_document_sentiment', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5435, + serialized_end=5531, +) + +_ANNOTATETEXTREQUEST = _descriptor.Descriptor( + name='AnnotateTextRequest', + full_name='google.cloud.language.v1.AnnotateTextRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='document', full_name='google.cloud.language.v1.AnnotateTextRequest.document', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='features', full_name='google.cloud.language.v1.AnnotateTextRequest.features', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='encoding_type', full_name='google.cloud.language.v1.AnnotateTextRequest.encoding_type', index=2, + number=3, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_ANNOTATETEXTREQUEST_FEATURES, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5221, + serialized_end=5531, +) + + +_ANNOTATETEXTRESPONSE = _descriptor.Descriptor( + name='AnnotateTextResponse', + full_name='google.cloud.language.v1.AnnotateTextResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='sentences', full_name='google.cloud.language.v1.AnnotateTextResponse.sentences', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='tokens', full_name='google.cloud.language.v1.AnnotateTextResponse.tokens', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='entities', full_name='google.cloud.language.v1.AnnotateTextResponse.entities', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='document_sentiment', full_name='google.cloud.language.v1.AnnotateTextResponse.document_sentiment', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='language', full_name='google.cloud.language.v1.AnnotateTextResponse.language', index=4, + number=5, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5534, + serialized_end=5795, +) + +_DOCUMENT.fields_by_name['type'].enum_type = _DOCUMENT_TYPE +_DOCUMENT_TYPE.containing_type = _DOCUMENT +_DOCUMENT.oneofs_by_name['source'].fields.append( + _DOCUMENT.fields_by_name['content']) +_DOCUMENT.fields_by_name['content'].containing_oneof = _DOCUMENT.oneofs_by_name['source'] +_DOCUMENT.oneofs_by_name['source'].fields.append( + _DOCUMENT.fields_by_name['gcs_content_uri']) +_DOCUMENT.fields_by_name['gcs_content_uri'].containing_oneof = _DOCUMENT.oneofs_by_name['source'] +_SENTENCE.fields_by_name['text'].message_type = _TEXTSPAN +_SENTENCE.fields_by_name['sentiment'].message_type = _SENTIMENT +_ENTITY_METADATAENTRY.containing_type = _ENTITY +_ENTITY.fields_by_name['type'].enum_type = _ENTITY_TYPE +_ENTITY.fields_by_name['metadata'].message_type = _ENTITY_METADATAENTRY +_ENTITY.fields_by_name['mentions'].message_type = _ENTITYMENTION +_ENTITY_TYPE.containing_type = _ENTITY +_TOKEN.fields_by_name['text'].message_type = _TEXTSPAN +_TOKEN.fields_by_name['part_of_speech'].message_type = _PARTOFSPEECH +_TOKEN.fields_by_name['dependency_edge'].message_type = _DEPENDENCYEDGE +_PARTOFSPEECH.fields_by_name['tag'].enum_type = _PARTOFSPEECH_TAG +_PARTOFSPEECH.fields_by_name['aspect'].enum_type = _PARTOFSPEECH_ASPECT +_PARTOFSPEECH.fields_by_name['case'].enum_type = _PARTOFSPEECH_CASE +_PARTOFSPEECH.fields_by_name['form'].enum_type = _PARTOFSPEECH_FORM +_PARTOFSPEECH.fields_by_name['gender'].enum_type = _PARTOFSPEECH_GENDER +_PARTOFSPEECH.fields_by_name['mood'].enum_type = _PARTOFSPEECH_MOOD +_PARTOFSPEECH.fields_by_name['number'].enum_type = _PARTOFSPEECH_NUMBER +_PARTOFSPEECH.fields_by_name['person'].enum_type = _PARTOFSPEECH_PERSON +_PARTOFSPEECH.fields_by_name['proper'].enum_type = _PARTOFSPEECH_PROPER +_PARTOFSPEECH.fields_by_name['reciprocity'].enum_type = _PARTOFSPEECH_RECIPROCITY +_PARTOFSPEECH.fields_by_name['tense'].enum_type = _PARTOFSPEECH_TENSE +_PARTOFSPEECH.fields_by_name['voice'].enum_type = _PARTOFSPEECH_VOICE +_PARTOFSPEECH_TAG.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_ASPECT.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_CASE.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_FORM.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_GENDER.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_MOOD.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_NUMBER.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_PERSON.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_PROPER.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_RECIPROCITY.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_TENSE.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_VOICE.containing_type = _PARTOFSPEECH +_DEPENDENCYEDGE.fields_by_name['label'].enum_type = _DEPENDENCYEDGE_LABEL +_DEPENDENCYEDGE_LABEL.containing_type = _DEPENDENCYEDGE +_ENTITYMENTION.fields_by_name['text'].message_type = _TEXTSPAN +_ENTITYMENTION.fields_by_name['type'].enum_type = _ENTITYMENTION_TYPE +_ENTITYMENTION_TYPE.containing_type = _ENTITYMENTION +_ANALYZESENTIMENTREQUEST.fields_by_name['document'].message_type = _DOCUMENT +_ANALYZESENTIMENTREQUEST.fields_by_name['encoding_type'].enum_type = _ENCODINGTYPE +_ANALYZESENTIMENTRESPONSE.fields_by_name['document_sentiment'].message_type = _SENTIMENT +_ANALYZESENTIMENTRESPONSE.fields_by_name['sentences'].message_type = _SENTENCE +_ANALYZEENTITIESREQUEST.fields_by_name['document'].message_type = _DOCUMENT +_ANALYZEENTITIESREQUEST.fields_by_name['encoding_type'].enum_type = _ENCODINGTYPE +_ANALYZEENTITIESRESPONSE.fields_by_name['entities'].message_type = _ENTITY +_ANALYZESYNTAXREQUEST.fields_by_name['document'].message_type = _DOCUMENT +_ANALYZESYNTAXREQUEST.fields_by_name['encoding_type'].enum_type = _ENCODINGTYPE +_ANALYZESYNTAXRESPONSE.fields_by_name['sentences'].message_type = _SENTENCE +_ANALYZESYNTAXRESPONSE.fields_by_name['tokens'].message_type = _TOKEN +_ANNOTATETEXTREQUEST_FEATURES.containing_type = _ANNOTATETEXTREQUEST +_ANNOTATETEXTREQUEST.fields_by_name['document'].message_type = _DOCUMENT +_ANNOTATETEXTREQUEST.fields_by_name['features'].message_type = _ANNOTATETEXTREQUEST_FEATURES +_ANNOTATETEXTREQUEST.fields_by_name['encoding_type'].enum_type = _ENCODINGTYPE +_ANNOTATETEXTRESPONSE.fields_by_name['sentences'].message_type = _SENTENCE +_ANNOTATETEXTRESPONSE.fields_by_name['tokens'].message_type = _TOKEN +_ANNOTATETEXTRESPONSE.fields_by_name['entities'].message_type = _ENTITY +_ANNOTATETEXTRESPONSE.fields_by_name['document_sentiment'].message_type = _SENTIMENT +DESCRIPTOR.message_types_by_name['Document'] = _DOCUMENT +DESCRIPTOR.message_types_by_name['Sentence'] = _SENTENCE +DESCRIPTOR.message_types_by_name['Entity'] = _ENTITY +DESCRIPTOR.message_types_by_name['Token'] = _TOKEN +DESCRIPTOR.message_types_by_name['Sentiment'] = _SENTIMENT +DESCRIPTOR.message_types_by_name['PartOfSpeech'] = _PARTOFSPEECH +DESCRIPTOR.message_types_by_name['DependencyEdge'] = _DEPENDENCYEDGE +DESCRIPTOR.message_types_by_name['EntityMention'] = _ENTITYMENTION +DESCRIPTOR.message_types_by_name['TextSpan'] = _TEXTSPAN +DESCRIPTOR.message_types_by_name['AnalyzeSentimentRequest'] = _ANALYZESENTIMENTREQUEST +DESCRIPTOR.message_types_by_name['AnalyzeSentimentResponse'] = _ANALYZESENTIMENTRESPONSE +DESCRIPTOR.message_types_by_name['AnalyzeEntitiesRequest'] = _ANALYZEENTITIESREQUEST +DESCRIPTOR.message_types_by_name['AnalyzeEntitiesResponse'] = _ANALYZEENTITIESRESPONSE +DESCRIPTOR.message_types_by_name['AnalyzeSyntaxRequest'] = _ANALYZESYNTAXREQUEST +DESCRIPTOR.message_types_by_name['AnalyzeSyntaxResponse'] = _ANALYZESYNTAXRESPONSE +DESCRIPTOR.message_types_by_name['AnnotateTextRequest'] = _ANNOTATETEXTREQUEST +DESCRIPTOR.message_types_by_name['AnnotateTextResponse'] = _ANNOTATETEXTRESPONSE +DESCRIPTOR.enum_types_by_name['EncodingType'] = _ENCODINGTYPE + +Document = _reflection.GeneratedProtocolMessageType('Document', (_message.Message,), dict( + DESCRIPTOR = _DOCUMENT, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """Represents the input to API methods. + + + Attributes: + type: + Required. If the type is not set or is ``TYPE_UNSPECIFIED``, + returns an ``INVALID_ARGUMENT`` error. + source: + The source of the document: a string containing the content or + a Google Cloud Storage URI. + content: + The content of the input in string format. + gcs_content_uri: + The Google Cloud Storage URI where the file content is + located. This URI must be of the form: + gs://bucket\_name/object\_name. For more details, see + https://cloud.google.com/storage/docs/reference-uris. NOTE: + Cloud Storage object versioning is not supported. + language: + The language of the document (if not specified, the language + is automatically detected). Both ISO and BCP-47 language codes + are accepted. `Language Support + <https://cloud.google.com/natural-language/docs/languages>`__ + lists currently supported languages for each API method. If + the language (either specified by the caller or automatically + detected) is not supported by the called API method, an + ``INVALID_ARGUMENT`` error is returned. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.Document) + )) +_sym_db.RegisterMessage(Document) + +Sentence = _reflection.GeneratedProtocolMessageType('Sentence', (_message.Message,), dict( + DESCRIPTOR = _SENTENCE, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """Represents a sentence in the input document. + + + Attributes: + text: + The sentence text. + sentiment: + For calls to [AnalyzeSentiment][] or if [AnnotateTextRequest.F + eatures.extract\_document\_sentiment][google.cloud.language.v1 + .AnnotateTextRequest.Features.extract\_document\_sentiment] is + set to true, this field will contain the sentiment for the + sentence. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.Sentence) + )) +_sym_db.RegisterMessage(Sentence) + +Entity = _reflection.GeneratedProtocolMessageType('Entity', (_message.Message,), dict( + + MetadataEntry = _reflection.GeneratedProtocolMessageType('MetadataEntry', (_message.Message,), dict( + DESCRIPTOR = _ENTITY_METADATAENTRY, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.Entity.MetadataEntry) + )) + , + DESCRIPTOR = _ENTITY, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """Represents a phrase in the text that is a known entity, such as a + person, an organization, or location. The API associates information, + such as salience and mentions, with entities. + + + Attributes: + name: + The representative name for the entity. + type: + The entity type. + metadata: + Metadata associated with the entity. Currently, Wikipedia + URLs and Knowledge Graph MIDs are provided, if available. The + associated keys are "wikipedia\_url" and "mid", respectively. + salience: + The salience score associated with the entity in the [0, 1.0] + range. The salience score for an entity provides information + about the importance or centrality of that entity to the + entire document text. Scores closer to 0 are less salient, + while scores closer to 1.0 are highly salient. + mentions: + The mentions of this entity in the input document. The API + currently supports proper noun mentions. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.Entity) + )) +_sym_db.RegisterMessage(Entity) +_sym_db.RegisterMessage(Entity.MetadataEntry) + +Token = _reflection.GeneratedProtocolMessageType('Token', (_message.Message,), dict( + DESCRIPTOR = _TOKEN, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """Represents the smallest syntactic building block of the text. + + + Attributes: + text: + The token text. + part_of_speech: + Parts of speech tag for this token. + dependency_edge: + Dependency tree parse for this token. + lemma: + `Lemma + <https://en.wikipedia.org/wiki/Lemma_%28morphology%29>`__ of + the token. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.Token) + )) +_sym_db.RegisterMessage(Token) + +Sentiment = _reflection.GeneratedProtocolMessageType('Sentiment', (_message.Message,), dict( + DESCRIPTOR = _SENTIMENT, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """Represents the feeling associated with the entire text or entities in + the text. + + + Attributes: + magnitude: + A non-negative number in the [0, +inf) range, which represents + the absolute magnitude of sentiment regardless of score + (positive or negative). + score: + Sentiment score between -1.0 (negative sentiment) and 1.0 + (positive sentiment). + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.Sentiment) + )) +_sym_db.RegisterMessage(Sentiment) + +PartOfSpeech = _reflection.GeneratedProtocolMessageType('PartOfSpeech', (_message.Message,), dict( + DESCRIPTOR = _PARTOFSPEECH, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """Represents part of speech information for a token. Parts of speech are + as defined in + http://www.lrec-conf.org/proceedings/lrec2012/pdf/274\_Paper.pdf + + + Attributes: + tag: + The part of speech tag. + aspect: + The grammatical aspect. + case: + The grammatical case. + form: + The grammatical form. + gender: + The grammatical gender. + mood: + The grammatical mood. + number: + The grammatical number. + person: + The grammatical person. + proper: + The grammatical properness. + reciprocity: + The grammatical reciprocity. + tense: + The grammatical tense. + voice: + The grammatical voice. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.PartOfSpeech) + )) +_sym_db.RegisterMessage(PartOfSpeech) + +DependencyEdge = _reflection.GeneratedProtocolMessageType('DependencyEdge', (_message.Message,), dict( + DESCRIPTOR = _DEPENDENCYEDGE, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """Represents dependency parse tree information for a token. (For more + information on dependency labels, see + http://www.aclweb.org/anthology/P13-2017 + + + Attributes: + head_token_index: + Represents the head of this token in the dependency tree. This + is the index of the token which has an arc going to this + token. The index is the position of the token in the array of + tokens returned by the API method. If this token is a root + token, then the ``head_token_index`` is its own index. + label: + The parse label for the token. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.DependencyEdge) + )) +_sym_db.RegisterMessage(DependencyEdge) + +EntityMention = _reflection.GeneratedProtocolMessageType('EntityMention', (_message.Message,), dict( + DESCRIPTOR = _ENTITYMENTION, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """Represents a mention for an entity in the text. Currently, proper noun + mentions are supported. + + + Attributes: + text: + The mention text. + type: + The type of the entity mention. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.EntityMention) + )) +_sym_db.RegisterMessage(EntityMention) + +TextSpan = _reflection.GeneratedProtocolMessageType('TextSpan', (_message.Message,), dict( + DESCRIPTOR = _TEXTSPAN, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """Represents an output piece of text. + + + Attributes: + content: + The content of the output text. + begin_offset: + The API calculates the beginning offset of the content in the + original document according to the + [EncodingType][google.cloud.language.v1.EncodingType] + specified in the API request. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.TextSpan) + )) +_sym_db.RegisterMessage(TextSpan) + +AnalyzeSentimentRequest = _reflection.GeneratedProtocolMessageType('AnalyzeSentimentRequest', (_message.Message,), dict( + DESCRIPTOR = _ANALYZESENTIMENTREQUEST, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """The sentiment analysis request message. + + + Attributes: + document: + Input document. + encoding_type: + The encoding type used by the API to calculate sentence + offsets. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.AnalyzeSentimentRequest) + )) +_sym_db.RegisterMessage(AnalyzeSentimentRequest) + +AnalyzeSentimentResponse = _reflection.GeneratedProtocolMessageType('AnalyzeSentimentResponse', (_message.Message,), dict( + DESCRIPTOR = _ANALYZESENTIMENTRESPONSE, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """The sentiment analysis response message. + + + Attributes: + document_sentiment: + The overall sentiment of the input document. + language: + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See [Document.language][googl + e.cloud.language.v1.Document.language] field for more details. + sentences: + The sentiment for all the sentences in the document. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.AnalyzeSentimentResponse) + )) +_sym_db.RegisterMessage(AnalyzeSentimentResponse) + +AnalyzeEntitiesRequest = _reflection.GeneratedProtocolMessageType('AnalyzeEntitiesRequest', (_message.Message,), dict( + DESCRIPTOR = _ANALYZEENTITIESREQUEST, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """The entity analysis request message. + + + Attributes: + document: + Input document. + encoding_type: + The encoding type used by the API to calculate offsets. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.AnalyzeEntitiesRequest) + )) +_sym_db.RegisterMessage(AnalyzeEntitiesRequest) + +AnalyzeEntitiesResponse = _reflection.GeneratedProtocolMessageType('AnalyzeEntitiesResponse', (_message.Message,), dict( + DESCRIPTOR = _ANALYZEENTITIESRESPONSE, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """The entity analysis response message. + + + Attributes: + entities: + The recognized entities in the input document. + language: + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See [Document.language][googl + e.cloud.language.v1.Document.language] field for more details. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.AnalyzeEntitiesResponse) + )) +_sym_db.RegisterMessage(AnalyzeEntitiesResponse) + +AnalyzeSyntaxRequest = _reflection.GeneratedProtocolMessageType('AnalyzeSyntaxRequest', (_message.Message,), dict( + DESCRIPTOR = _ANALYZESYNTAXREQUEST, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """The syntax analysis request message. + + + Attributes: + document: + Input document. + encoding_type: + The encoding type used by the API to calculate offsets. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.AnalyzeSyntaxRequest) + )) +_sym_db.RegisterMessage(AnalyzeSyntaxRequest) + +AnalyzeSyntaxResponse = _reflection.GeneratedProtocolMessageType('AnalyzeSyntaxResponse', (_message.Message,), dict( + DESCRIPTOR = _ANALYZESYNTAXRESPONSE, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """The syntax analysis response message. + + + Attributes: + sentences: + Sentences in the input document. + tokens: + Tokens, along with their syntactic information, in the input + document. + language: + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See [Document.language][googl + e.cloud.language.v1.Document.language] field for more details. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.AnalyzeSyntaxResponse) + )) +_sym_db.RegisterMessage(AnalyzeSyntaxResponse) + +AnnotateTextRequest = _reflection.GeneratedProtocolMessageType('AnnotateTextRequest', (_message.Message,), dict( + + Features = _reflection.GeneratedProtocolMessageType('Features', (_message.Message,), dict( + DESCRIPTOR = _ANNOTATETEXTREQUEST_FEATURES, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """All available features for sentiment, syntax, and semantic analysis. + Setting each one to true will enable that specific analysis for the + input. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.AnnotateTextRequest.Features) + )) + , + DESCRIPTOR = _ANNOTATETEXTREQUEST, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """The request message for the text annotation API, which can perform + multiple analysis types (sentiment, entities, and syntax) in one call. + + + Attributes: + extract_syntax: + Extract syntax information. + extract_entities: + Extract entities. + extract_document_sentiment: + Extract document-level sentiment. + document: + Input document. + features: + The enabled features. + encoding_type: + The encoding type used by the API to calculate offsets. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.AnnotateTextRequest) + )) +_sym_db.RegisterMessage(AnnotateTextRequest) +_sym_db.RegisterMessage(AnnotateTextRequest.Features) + +AnnotateTextResponse = _reflection.GeneratedProtocolMessageType('AnnotateTextResponse', (_message.Message,), dict( + DESCRIPTOR = _ANNOTATETEXTRESPONSE, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """The text annotations response message. + + + Attributes: + sentences: + Sentences in the input document. Populated if the user enables + [AnnotateTextRequest.Features.extract\_syntax][google.cloud.la + nguage.v1.AnnotateTextRequest.Features.extract\_syntax]. + tokens: + Tokens, along with their syntactic information, in the input + document. Populated if the user enables [AnnotateTextRequest.F + eatures.extract\_syntax][google.cloud.language.v1.AnnotateText + Request.Features.extract\_syntax]. + entities: + Entities, along with their semantic information, in the input + document. Populated if the user enables [AnnotateTextRequest.F + eatures.extract\_entities][google.cloud.language.v1.AnnotateTe + xtRequest.Features.extract\_entities]. + document_sentiment: + The overall sentiment for the document. Populated if the user + enables [AnnotateTextRequest.Features.extract\_document\_senti + ment][google.cloud.language.v1.AnnotateTextRequest.Features.ex + tract\_document\_sentiment]. + language: + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See [Document.language][googl + e.cloud.language.v1.Document.language] field for more details. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.AnnotateTextResponse) + )) +_sym_db.RegisterMessage(AnnotateTextResponse) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\034com.google.cloud.language.v1B\024LanguageServiceProtoP\001Z@google.golang.org/genproto/googleapis/cloud/language/v1;language')) +_ENTITY_METADATAENTRY.has_options = True +_ENTITY_METADATAENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +try: + # THESE ELEMENTS WILL BE DEPRECATED. + # Please use the generated *_pb2_grpc.py files instead. + import grpc + from grpc.beta import implementations as beta_implementations + from grpc.beta import interfaces as beta_interfaces + from grpc.framework.common import cardinality + from grpc.framework.interfaces.face import utilities as face_utilities + + + class LanguageServiceStub(object): + """Provides text analysis operations such as sentiment analysis and entity + recognition. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.AnalyzeSentiment = channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnalyzeSentiment', + request_serializer=AnalyzeSentimentRequest.SerializeToString, + response_deserializer=AnalyzeSentimentResponse.FromString, + ) + self.AnalyzeEntities = channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnalyzeEntities', + request_serializer=AnalyzeEntitiesRequest.SerializeToString, + response_deserializer=AnalyzeEntitiesResponse.FromString, + ) + self.AnalyzeSyntax = channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnalyzeSyntax', + request_serializer=AnalyzeSyntaxRequest.SerializeToString, + response_deserializer=AnalyzeSyntaxResponse.FromString, + ) + self.AnnotateText = channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnnotateText', + request_serializer=AnnotateTextRequest.SerializeToString, + response_deserializer=AnnotateTextResponse.FromString, + ) + + + class LanguageServiceServicer(object): + """Provides text analysis operations such as sentiment analysis and entity + recognition. + """ + + def AnalyzeSentiment(self, request, context): + """Analyzes the sentiment of the provided text. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnalyzeEntities(self, request, context): + """Finds named entities (currently proper names and common nouns) in the text + along with entity types, salience, mentions for each entity, and + other properties. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnalyzeSyntax(self, request, context): + """Analyzes the syntax of the text and provides sentence boundaries and + tokenization along with part of speech tags, dependency trees, and other + properties. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnnotateText(self, request, context): + """A convenience method that provides all the features that analyzeSentiment, + analyzeEntities, and analyzeSyntax provide in one call. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + + def add_LanguageServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'AnalyzeSentiment': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeSentiment, + request_deserializer=AnalyzeSentimentRequest.FromString, + response_serializer=AnalyzeSentimentResponse.SerializeToString, + ), + 'AnalyzeEntities': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeEntities, + request_deserializer=AnalyzeEntitiesRequest.FromString, + response_serializer=AnalyzeEntitiesResponse.SerializeToString, + ), + 'AnalyzeSyntax': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeSyntax, + request_deserializer=AnalyzeSyntaxRequest.FromString, + response_serializer=AnalyzeSyntaxResponse.SerializeToString, + ), + 'AnnotateText': grpc.unary_unary_rpc_method_handler( + servicer.AnnotateText, + request_deserializer=AnnotateTextRequest.FromString, + response_serializer=AnnotateTextResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.cloud.language.v1.LanguageService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + class BetaLanguageServiceServicer(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """Provides text analysis operations such as sentiment analysis and entity + recognition. + """ + def AnalyzeSentiment(self, request, context): + """Analyzes the sentiment of the provided text. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def AnalyzeEntities(self, request, context): + """Finds named entities (currently proper names and common nouns) in the text + along with entity types, salience, mentions for each entity, and + other properties. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def AnalyzeSyntax(self, request, context): + """Analyzes the syntax of the text and provides sentence boundaries and + tokenization along with part of speech tags, dependency trees, and other + properties. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def AnnotateText(self, request, context): + """A convenience method that provides all the features that analyzeSentiment, + analyzeEntities, and analyzeSyntax provide in one call. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + + + class BetaLanguageServiceStub(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """Provides text analysis operations such as sentiment analysis and entity + recognition. + """ + def AnalyzeSentiment(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Analyzes the sentiment of the provided text. + """ + raise NotImplementedError() + AnalyzeSentiment.future = None + def AnalyzeEntities(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Finds named entities (currently proper names and common nouns) in the text + along with entity types, salience, mentions for each entity, and + other properties. + """ + raise NotImplementedError() + AnalyzeEntities.future = None + def AnalyzeSyntax(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Analyzes the syntax of the text and provides sentence boundaries and + tokenization along with part of speech tags, dependency trees, and other + properties. + """ + raise NotImplementedError() + AnalyzeSyntax.future = None + def AnnotateText(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """A convenience method that provides all the features that analyzeSentiment, + analyzeEntities, and analyzeSyntax provide in one call. + """ + raise NotImplementedError() + AnnotateText.future = None + + + def beta_create_LanguageService_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_deserializers = { + ('google.cloud.language.v1.LanguageService', 'AnalyzeEntities'): AnalyzeEntitiesRequest.FromString, + ('google.cloud.language.v1.LanguageService', 'AnalyzeSentiment'): AnalyzeSentimentRequest.FromString, + ('google.cloud.language.v1.LanguageService', 'AnalyzeSyntax'): AnalyzeSyntaxRequest.FromString, + ('google.cloud.language.v1.LanguageService', 'AnnotateText'): AnnotateTextRequest.FromString, + } + response_serializers = { + ('google.cloud.language.v1.LanguageService', 'AnalyzeEntities'): AnalyzeEntitiesResponse.SerializeToString, + ('google.cloud.language.v1.LanguageService', 'AnalyzeSentiment'): AnalyzeSentimentResponse.SerializeToString, + ('google.cloud.language.v1.LanguageService', 'AnalyzeSyntax'): AnalyzeSyntaxResponse.SerializeToString, + ('google.cloud.language.v1.LanguageService', 'AnnotateText'): AnnotateTextResponse.SerializeToString, + } + method_implementations = { + ('google.cloud.language.v1.LanguageService', 'AnalyzeEntities'): face_utilities.unary_unary_inline(servicer.AnalyzeEntities), + ('google.cloud.language.v1.LanguageService', 'AnalyzeSentiment'): face_utilities.unary_unary_inline(servicer.AnalyzeSentiment), + ('google.cloud.language.v1.LanguageService', 'AnalyzeSyntax'): face_utilities.unary_unary_inline(servicer.AnalyzeSyntax), + ('google.cloud.language.v1.LanguageService', 'AnnotateText'): face_utilities.unary_unary_inline(servicer.AnnotateText), + } + server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) + return beta_implementations.server(method_implementations, options=server_options) + + + def beta_create_LanguageService_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_serializers = { + ('google.cloud.language.v1.LanguageService', 'AnalyzeEntities'): AnalyzeEntitiesRequest.SerializeToString, + ('google.cloud.language.v1.LanguageService', 'AnalyzeSentiment'): AnalyzeSentimentRequest.SerializeToString, + ('google.cloud.language.v1.LanguageService', 'AnalyzeSyntax'): AnalyzeSyntaxRequest.SerializeToString, + ('google.cloud.language.v1.LanguageService', 'AnnotateText'): AnnotateTextRequest.SerializeToString, + } + response_deserializers = { + ('google.cloud.language.v1.LanguageService', 'AnalyzeEntities'): AnalyzeEntitiesResponse.FromString, + ('google.cloud.language.v1.LanguageService', 'AnalyzeSentiment'): AnalyzeSentimentResponse.FromString, + ('google.cloud.language.v1.LanguageService', 'AnalyzeSyntax'): AnalyzeSyntaxResponse.FromString, + ('google.cloud.language.v1.LanguageService', 'AnnotateText'): AnnotateTextResponse.FromString, + } + cardinalities = { + 'AnalyzeEntities': cardinality.Cardinality.UNARY_UNARY, + 'AnalyzeSentiment': cardinality.Cardinality.UNARY_UNARY, + 'AnalyzeSyntax': cardinality.Cardinality.UNARY_UNARY, + 'AnnotateText': cardinality.Cardinality.UNARY_UNARY, + } + stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) + return beta_implementations.dynamic_stub(channel, 'google.cloud.language.v1.LanguageService', cardinalities, options=stub_options) +except ImportError: + pass +# @@protoc_insertion_point(module_scope) diff --git a/language/google/cloud/proto/language/v1/language_service_pb2_grpc.py b/language/google/cloud/proto/language/v1/language_service_pb2_grpc.py new file mode 100644 index 000000000000..19ab43fae3f0 --- /dev/null +++ b/language/google/cloud/proto/language/v1/language_service_pb2_grpc.py @@ -0,0 +1,104 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +import google.cloud.proto.language.v1.language_service_pb2 as google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2 + + +class LanguageServiceStub(object): + """Provides text analysis operations such as sentiment analysis and entity + recognition. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.AnalyzeSentiment = channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnalyzeSentiment', + request_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeSentimentRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeSentimentResponse.FromString, + ) + self.AnalyzeEntities = channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnalyzeEntities', + request_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeEntitiesRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeEntitiesResponse.FromString, + ) + self.AnalyzeSyntax = channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnalyzeSyntax', + request_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeSyntaxRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeSyntaxResponse.FromString, + ) + self.AnnotateText = channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnnotateText', + request_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnnotateTextRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnnotateTextResponse.FromString, + ) + + +class LanguageServiceServicer(object): + """Provides text analysis operations such as sentiment analysis and entity + recognition. + """ + + def AnalyzeSentiment(self, request, context): + """Analyzes the sentiment of the provided text. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnalyzeEntities(self, request, context): + """Finds named entities (currently proper names and common nouns) in the text + along with entity types, salience, mentions for each entity, and + other properties. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnalyzeSyntax(self, request, context): + """Analyzes the syntax of the text and provides sentence boundaries and + tokenization along with part of speech tags, dependency trees, and other + properties. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnnotateText(self, request, context): + """A convenience method that provides all the features that analyzeSentiment, + analyzeEntities, and analyzeSyntax provide in one call. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_LanguageServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'AnalyzeSentiment': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeSentiment, + request_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeSentimentRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeSentimentResponse.SerializeToString, + ), + 'AnalyzeEntities': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeEntities, + request_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeEntitiesRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeEntitiesResponse.SerializeToString, + ), + 'AnalyzeSyntax': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeSyntax, + request_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeSyntaxRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeSyntaxResponse.SerializeToString, + ), + 'AnnotateText': grpc.unary_unary_rpc_method_handler( + servicer.AnnotateText, + request_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnnotateTextRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnnotateTextResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.cloud.language.v1.LanguageService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/language/google/cloud/proto/language/v1beta2/__init__.py b/language/google/cloud/proto/language/v1beta2/__init__.py new file mode 100644 index 000000000000..8b137891791f --- /dev/null +++ b/language/google/cloud/proto/language/v1beta2/__init__.py @@ -0,0 +1 @@ + diff --git a/language/google/cloud/proto/language/v1beta2/language_service_pb2.py b/language/google/cloud/proto/language/v1beta2/language_service_pb2.py new file mode 100644 index 000000000000..d3e1d150af8d --- /dev/null +++ b/language/google/cloud/proto/language/v1beta2/language_service_pb2.py @@ -0,0 +1,2843 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/proto/language/v1beta2/language_service.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.longrunning import operations_pb2 as google_dot_longrunning_dot_operations__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/cloud/proto/language/v1beta2/language_service.proto', + package='google.cloud.language.v1beta2', + syntax='proto3', + serialized_pb=_b('\n:google/cloud/proto/language/v1beta2/language_service.proto\x12\x1dgoogle.cloud.language.v1beta2\x1a\x1cgoogle/api/annotations.proto\x1a#google/longrunning/operations.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\"\xc8\x01\n\x08\x44ocument\x12:\n\x04type\x18\x01 \x01(\x0e\x32,.google.cloud.language.v1beta2.Document.Type\x12\x11\n\x07\x63ontent\x18\x02 \x01(\tH\x00\x12\x19\n\x0fgcs_content_uri\x18\x03 \x01(\tH\x00\x12\x10\n\x08language\x18\x04 \x01(\t\"6\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nPLAIN_TEXT\x10\x01\x12\x08\n\x04HTML\x10\x02\x42\x08\n\x06source\"~\n\x08Sentence\x12\x35\n\x04text\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.TextSpan\x12;\n\tsentiment\x18\x02 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment\"\xd2\x03\n\x06\x45ntity\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x04type\x18\x02 \x01(\x0e\x32*.google.cloud.language.v1beta2.Entity.Type\x12\x45\n\x08metadata\x18\x03 \x03(\x0b\x32\x33.google.cloud.language.v1beta2.Entity.MetadataEntry\x12\x10\n\x08salience\x18\x04 \x01(\x02\x12>\n\x08mentions\x18\x05 \x03(\x0b\x32,.google.cloud.language.v1beta2.EntityMention\x12;\n\tsentiment\x18\x06 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"y\n\x04Type\x12\x0b\n\x07UNKNOWN\x10\x00\x12\n\n\x06PERSON\x10\x01\x12\x0c\n\x08LOCATION\x10\x02\x12\x10\n\x0cORGANIZATION\x10\x03\x12\t\n\x05\x45VENT\x10\x04\x12\x0f\n\x0bWORK_OF_ART\x10\x05\x12\x11\n\rCONSUMER_GOOD\x10\x06\x12\t\n\x05OTHER\x10\x07\"\xda\x01\n\x05Token\x12\x35\n\x04text\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.TextSpan\x12\x43\n\x0epart_of_speech\x18\x02 \x01(\x0b\x32+.google.cloud.language.v1beta2.PartOfSpeech\x12\x46\n\x0f\x64\x65pendency_edge\x18\x03 \x01(\x0b\x32-.google.cloud.language.v1beta2.DependencyEdge\x12\r\n\x05lemma\x18\x04 \x01(\t\"-\n\tSentiment\x12\x11\n\tmagnitude\x18\x02 \x01(\x02\x12\r\n\x05score\x18\x03 \x01(\x02\"\xdf\x10\n\x0cPartOfSpeech\x12<\n\x03tag\x18\x01 \x01(\x0e\x32/.google.cloud.language.v1beta2.PartOfSpeech.Tag\x12\x42\n\x06\x61spect\x18\x02 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Aspect\x12>\n\x04\x63\x61se\x18\x03 \x01(\x0e\x32\x30.google.cloud.language.v1beta2.PartOfSpeech.Case\x12>\n\x04\x66orm\x18\x04 \x01(\x0e\x32\x30.google.cloud.language.v1beta2.PartOfSpeech.Form\x12\x42\n\x06gender\x18\x05 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Gender\x12>\n\x04mood\x18\x06 \x01(\x0e\x32\x30.google.cloud.language.v1beta2.PartOfSpeech.Mood\x12\x42\n\x06number\x18\x07 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Number\x12\x42\n\x06person\x18\x08 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Person\x12\x42\n\x06proper\x18\t \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Proper\x12L\n\x0breciprocity\x18\n \x01(\x0e\x32\x37.google.cloud.language.v1beta2.PartOfSpeech.Reciprocity\x12@\n\x05tense\x18\x0b \x01(\x0e\x32\x31.google.cloud.language.v1beta2.PartOfSpeech.Tense\x12@\n\x05voice\x18\x0c \x01(\x0e\x32\x31.google.cloud.language.v1beta2.PartOfSpeech.Voice\"\x8d\x01\n\x03Tag\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x07\n\x03\x41\x44J\x10\x01\x12\x07\n\x03\x41\x44P\x10\x02\x12\x07\n\x03\x41\x44V\x10\x03\x12\x08\n\x04\x43ONJ\x10\x04\x12\x07\n\x03\x44\x45T\x10\x05\x12\x08\n\x04NOUN\x10\x06\x12\x07\n\x03NUM\x10\x07\x12\x08\n\x04PRON\x10\x08\x12\x07\n\x03PRT\x10\t\x12\t\n\x05PUNCT\x10\n\x12\x08\n\x04VERB\x10\x0b\x12\x05\n\x01X\x10\x0c\x12\t\n\x05\x41\x46\x46IX\x10\r\"O\n\x06\x41spect\x12\x12\n\x0e\x41SPECT_UNKNOWN\x10\x00\x12\x0e\n\nPERFECTIVE\x10\x01\x12\x10\n\x0cIMPERFECTIVE\x10\x02\x12\x0f\n\x0bPROGRESSIVE\x10\x03\"\xf8\x01\n\x04\x43\x61se\x12\x10\n\x0c\x43\x41SE_UNKNOWN\x10\x00\x12\x0e\n\nACCUSATIVE\x10\x01\x12\r\n\tADVERBIAL\x10\x02\x12\x11\n\rCOMPLEMENTIVE\x10\x03\x12\n\n\x06\x44\x41TIVE\x10\x04\x12\x0c\n\x08GENITIVE\x10\x05\x12\x10\n\x0cINSTRUMENTAL\x10\x06\x12\x0c\n\x08LOCATIVE\x10\x07\x12\x0e\n\nNOMINATIVE\x10\x08\x12\x0b\n\x07OBLIQUE\x10\t\x12\r\n\tPARTITIVE\x10\n\x12\x11\n\rPREPOSITIONAL\x10\x0b\x12\x12\n\x0eREFLEXIVE_CASE\x10\x0c\x12\x11\n\rRELATIVE_CASE\x10\r\x12\x0c\n\x08VOCATIVE\x10\x0e\"\xaf\x01\n\x04\x46orm\x12\x10\n\x0c\x46ORM_UNKNOWN\x10\x00\x12\x0c\n\x08\x41\x44NOMIAL\x10\x01\x12\r\n\tAUXILIARY\x10\x02\x12\x12\n\x0e\x43OMPLEMENTIZER\x10\x03\x12\x10\n\x0c\x46INAL_ENDING\x10\x04\x12\n\n\x06GERUND\x10\x05\x12\n\n\x06REALIS\x10\x06\x12\x0c\n\x08IRREALIS\x10\x07\x12\t\n\x05SHORT\x10\x08\x12\x08\n\x04LONG\x10\t\x12\t\n\x05ORDER\x10\n\x12\x0c\n\x08SPECIFIC\x10\x0b\"E\n\x06Gender\x12\x12\n\x0eGENDER_UNKNOWN\x10\x00\x12\x0c\n\x08\x46\x45MININE\x10\x01\x12\r\n\tMASCULINE\x10\x02\x12\n\n\x06NEUTER\x10\x03\"\x7f\n\x04Mood\x12\x10\n\x0cMOOD_UNKNOWN\x10\x00\x12\x14\n\x10\x43ONDITIONAL_MOOD\x10\x01\x12\x0e\n\nIMPERATIVE\x10\x02\x12\x0e\n\nINDICATIVE\x10\x03\x12\x11\n\rINTERROGATIVE\x10\x04\x12\x0b\n\x07JUSSIVE\x10\x05\x12\x0f\n\x0bSUBJUNCTIVE\x10\x06\"@\n\x06Number\x12\x12\n\x0eNUMBER_UNKNOWN\x10\x00\x12\x0c\n\x08SINGULAR\x10\x01\x12\n\n\x06PLURAL\x10\x02\x12\x08\n\x04\x44UAL\x10\x03\"T\n\x06Person\x12\x12\n\x0ePERSON_UNKNOWN\x10\x00\x12\t\n\x05\x46IRST\x10\x01\x12\n\n\x06SECOND\x10\x02\x12\t\n\x05THIRD\x10\x03\x12\x14\n\x10REFLEXIVE_PERSON\x10\x04\"8\n\x06Proper\x12\x12\n\x0ePROPER_UNKNOWN\x10\x00\x12\n\n\x06PROPER\x10\x01\x12\x0e\n\nNOT_PROPER\x10\x02\"J\n\x0bReciprocity\x12\x17\n\x13RECIPROCITY_UNKNOWN\x10\x00\x12\x0e\n\nRECIPROCAL\x10\x01\x12\x12\n\x0eNON_RECIPROCAL\x10\x02\"s\n\x05Tense\x12\x11\n\rTENSE_UNKNOWN\x10\x00\x12\x15\n\x11\x43ONDITIONAL_TENSE\x10\x01\x12\n\n\x06\x46UTURE\x10\x02\x12\x08\n\x04PAST\x10\x03\x12\x0b\n\x07PRESENT\x10\x04\x12\r\n\tIMPERFECT\x10\x05\x12\x0e\n\nPLUPERFECT\x10\x06\"B\n\x05Voice\x12\x11\n\rVOICE_UNKNOWN\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\r\n\tCAUSATIVE\x10\x02\x12\x0b\n\x07PASSIVE\x10\x03\"\xdd\x07\n\x0e\x44\x65pendencyEdge\x12\x18\n\x10head_token_index\x18\x01 \x01(\x05\x12\x42\n\x05label\x18\x02 \x01(\x0e\x32\x33.google.cloud.language.v1beta2.DependencyEdge.Label\"\xec\x06\n\x05Label\x12\x0b\n\x07UNKNOWN\x10\x00\x12\n\n\x06\x41\x42\x42REV\x10\x01\x12\t\n\x05\x41\x43OMP\x10\x02\x12\t\n\x05\x41\x44VCL\x10\x03\x12\n\n\x06\x41\x44VMOD\x10\x04\x12\x08\n\x04\x41MOD\x10\x05\x12\t\n\x05\x41PPOS\x10\x06\x12\x08\n\x04\x41TTR\x10\x07\x12\x07\n\x03\x41UX\x10\x08\x12\x0b\n\x07\x41UXPASS\x10\t\x12\x06\n\x02\x43\x43\x10\n\x12\t\n\x05\x43\x43OMP\x10\x0b\x12\x08\n\x04\x43ONJ\x10\x0c\x12\t\n\x05\x43SUBJ\x10\r\x12\r\n\tCSUBJPASS\x10\x0e\x12\x07\n\x03\x44\x45P\x10\x0f\x12\x07\n\x03\x44\x45T\x10\x10\x12\r\n\tDISCOURSE\x10\x11\x12\x08\n\x04\x44OBJ\x10\x12\x12\x08\n\x04\x45XPL\x10\x13\x12\x0c\n\x08GOESWITH\x10\x14\x12\x08\n\x04IOBJ\x10\x15\x12\x08\n\x04MARK\x10\x16\x12\x07\n\x03MWE\x10\x17\x12\x07\n\x03MWV\x10\x18\x12\x07\n\x03NEG\x10\x19\x12\x06\n\x02NN\x10\x1a\x12\x0c\n\x08NPADVMOD\x10\x1b\x12\t\n\x05NSUBJ\x10\x1c\x12\r\n\tNSUBJPASS\x10\x1d\x12\x07\n\x03NUM\x10\x1e\x12\n\n\x06NUMBER\x10\x1f\x12\x05\n\x01P\x10 \x12\r\n\tPARATAXIS\x10!\x12\x0b\n\x07PARTMOD\x10\"\x12\t\n\x05PCOMP\x10#\x12\x08\n\x04POBJ\x10$\x12\x08\n\x04POSS\x10%\x12\x0b\n\x07POSTNEG\x10&\x12\x0b\n\x07PRECOMP\x10\'\x12\x0b\n\x07PRECONJ\x10(\x12\n\n\x06PREDET\x10)\x12\x08\n\x04PREF\x10*\x12\x08\n\x04PREP\x10+\x12\t\n\x05PRONL\x10,\x12\x07\n\x03PRT\x10-\x12\x06\n\x02PS\x10.\x12\x0c\n\x08QUANTMOD\x10/\x12\t\n\x05RCMOD\x10\x30\x12\x0c\n\x08RCMODREL\x10\x31\x12\t\n\x05RDROP\x10\x32\x12\x07\n\x03REF\x10\x33\x12\x0b\n\x07REMNANT\x10\x34\x12\x0e\n\nREPARANDUM\x10\x35\x12\x08\n\x04ROOT\x10\x36\x12\x08\n\x04SNUM\x10\x37\x12\x08\n\x04SUFF\x10\x38\x12\x08\n\x04TMOD\x10\x39\x12\t\n\x05TOPIC\x10:\x12\x08\n\x04VMOD\x10;\x12\x0c\n\x08VOCATIVE\x10<\x12\t\n\x05XCOMP\x10=\x12\n\n\x06SUFFIX\x10>\x12\t\n\x05TITLE\x10?\x12\x0c\n\x08\x41\x44VPHMOD\x10@\x12\x0b\n\x07\x41UXCAUS\x10\x41\x12\t\n\x05\x41UXVV\x10\x42\x12\t\n\x05\x44TMOD\x10\x43\x12\x0b\n\x07\x46OREIGN\x10\x44\x12\x06\n\x02KW\x10\x45\x12\x08\n\x04LIST\x10\x46\x12\x08\n\x04NOMC\x10G\x12\x0c\n\x08NOMCSUBJ\x10H\x12\x10\n\x0cNOMCSUBJPASS\x10I\x12\x08\n\x04NUMC\x10J\x12\x07\n\x03\x43OP\x10K\x12\x0e\n\nDISLOCATED\x10L\"\xf6\x01\n\rEntityMention\x12\x35\n\x04text\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.TextSpan\x12?\n\x04type\x18\x02 \x01(\x0e\x32\x31.google.cloud.language.v1beta2.EntityMention.Type\x12;\n\tsentiment\x18\x03 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment\"0\n\x04Type\x12\x10\n\x0cTYPE_UNKNOWN\x10\x00\x12\n\n\x06PROPER\x10\x01\x12\n\n\x06\x43OMMON\x10\x02\"1\n\x08TextSpan\x12\x0f\n\x07\x63ontent\x18\x01 \x01(\t\x12\x14\n\x0c\x62\x65gin_offset\x18\x02 \x01(\x05\"\x98\x01\n\x17\x41nalyzeSentimentRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.Document\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType\"\xae\x01\n\x18\x41nalyzeSentimentResponse\x12\x44\n\x12\x64ocument_sentiment\x18\x01 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment\x12\x10\n\x08language\x18\x02 \x01(\t\x12:\n\tsentences\x18\x03 \x03(\x0b\x32\'.google.cloud.language.v1beta2.Sentence\"\x9e\x01\n\x1d\x41nalyzeEntitySentimentRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.Document\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType\"k\n\x1e\x41nalyzeEntitySentimentResponse\x12\x37\n\x08\x65ntities\x18\x01 \x03(\x0b\x32%.google.cloud.language.v1beta2.Entity\x12\x10\n\x08language\x18\x02 \x01(\t\"\x97\x01\n\x16\x41nalyzeEntitiesRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.Document\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType\"d\n\x17\x41nalyzeEntitiesResponse\x12\x37\n\x08\x65ntities\x18\x01 \x03(\x0b\x32%.google.cloud.language.v1beta2.Entity\x12\x10\n\x08language\x18\x02 \x01(\t\"\x95\x01\n\x14\x41nalyzeSyntaxRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.Document\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType\"\x9b\x01\n\x15\x41nalyzeSyntaxResponse\x12:\n\tsentences\x18\x01 \x03(\x0b\x32\'.google.cloud.language.v1beta2.Sentence\x12\x34\n\x06tokens\x18\x02 \x03(\x0b\x32$.google.cloud.language.v1beta2.Token\x12\x10\n\x08language\x18\x03 \x01(\t\"\xe8\x02\n\x13\x41nnotateTextRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.Document\x12M\n\x08\x66\x65\x61tures\x18\x02 \x01(\x0b\x32;.google.cloud.language.v1beta2.AnnotateTextRequest.Features\x12\x42\n\rencoding_type\x18\x03 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType\x1a\x82\x01\n\x08\x46\x65\x61tures\x12\x16\n\x0e\x65xtract_syntax\x18\x01 \x01(\x08\x12\x18\n\x10\x65xtract_entities\x18\x02 \x01(\x08\x12\"\n\x1a\x65xtract_document_sentiment\x18\x03 \x01(\x08\x12 \n\x18\x65xtract_entity_sentiment\x18\x04 \x01(\x08\"\x99\x02\n\x14\x41nnotateTextResponse\x12:\n\tsentences\x18\x01 \x03(\x0b\x32\'.google.cloud.language.v1beta2.Sentence\x12\x34\n\x06tokens\x18\x02 \x03(\x0b\x32$.google.cloud.language.v1beta2.Token\x12\x37\n\x08\x65ntities\x18\x03 \x03(\x0b\x32%.google.cloud.language.v1beta2.Entity\x12\x44\n\x12\x64ocument_sentiment\x18\x04 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment\x12\x10\n\x08language\x18\x05 \x01(\t*8\n\x0c\x45ncodingType\x12\x08\n\x04NONE\x10\x00\x12\x08\n\x04UTF8\x10\x01\x12\t\n\x05UTF16\x10\x02\x12\t\n\x05UTF32\x10\x03\x32\x97\x07\n\x0fLanguageService\x12\xb3\x01\n\x10\x41nalyzeSentiment\x12\x36.google.cloud.language.v1beta2.AnalyzeSentimentRequest\x1a\x37.google.cloud.language.v1beta2.AnalyzeSentimentResponse\".\x82\xd3\xe4\x93\x02(\"#/v1beta2/documents:analyzeSentiment:\x01*\x12\xaf\x01\n\x0f\x41nalyzeEntities\x12\x35.google.cloud.language.v1beta2.AnalyzeEntitiesRequest\x1a\x36.google.cloud.language.v1beta2.AnalyzeEntitiesResponse\"-\x82\xd3\xe4\x93\x02\'\"\"/v1beta2/documents:analyzeEntities:\x01*\x12\xcb\x01\n\x16\x41nalyzeEntitySentiment\x12<.google.cloud.language.v1beta2.AnalyzeEntitySentimentRequest\x1a=.google.cloud.language.v1beta2.AnalyzeEntitySentimentResponse\"4\x82\xd3\xe4\x93\x02.\")/v1beta2/documents:analyzeEntitySentiment:\x01*\x12\xa7\x01\n\rAnalyzeSyntax\x12\x33.google.cloud.language.v1beta2.AnalyzeSyntaxRequest\x1a\x34.google.cloud.language.v1beta2.AnalyzeSyntaxResponse\"+\x82\xd3\xe4\x93\x02%\" /v1beta2/documents:analyzeSyntax:\x01*\x12\xa3\x01\n\x0c\x41nnotateText\x12\x32.google.cloud.language.v1beta2.AnnotateTextRequest\x1a\x33.google.cloud.language.v1beta2.AnnotateTextResponse\"*\x82\xd3\xe4\x93\x02$\"\x1f/v1beta2/documents:annotateText:\x01*B\x82\x01\n!com.google.cloud.language.v1beta2B\x14LanguageServiceProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/language/v1beta2;languageb\x06proto3') + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_longrunning_dot_operations__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,google_dot_rpc_dot_status__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +_ENCODINGTYPE = _descriptor.EnumDescriptor( + name='EncodingType', + full_name='google.cloud.language.v1beta2.EncodingType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='NONE', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='UTF8', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='UTF16', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='UTF32', index=3, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=6539, + serialized_end=6595, +) +_sym_db.RegisterEnumDescriptor(_ENCODINGTYPE) + +EncodingType = enum_type_wrapper.EnumTypeWrapper(_ENCODINGTYPE) +NONE = 0 +UTF8 = 1 +UTF16 = 2 +UTF32 = 3 + + +_DOCUMENT_TYPE = _descriptor.EnumDescriptor( + name='Type', + full_name='google.cloud.language.v1beta2.Document.Type', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='TYPE_UNSPECIFIED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PLAIN_TEXT', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='HTML', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=355, + serialized_end=409, +) +_sym_db.RegisterEnumDescriptor(_DOCUMENT_TYPE) + +_ENTITY_TYPE = _descriptor.EnumDescriptor( + name='Type', + full_name='google.cloud.language.v1beta2.Entity.Type', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PERSON', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='LOCATION', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ORGANIZATION', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='EVENT', index=4, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='WORK_OF_ART', index=5, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CONSUMER_GOOD', index=6, number=6, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='OTHER', index=7, number=7, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=895, + serialized_end=1016, +) +_sym_db.RegisterEnumDescriptor(_ENTITY_TYPE) + +_PARTOFSPEECH_TAG = _descriptor.EnumDescriptor( + name='Tag', + full_name='google.cloud.language.v1beta2.PartOfSpeech.Tag', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADJ', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADP', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADV', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CONJ', index=4, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DET', index=5, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NOUN', index=6, number=6, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NUM', index=7, number=7, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PRON', index=8, number=8, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PRT', index=9, number=9, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PUNCT', index=10, number=10, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='VERB', index=11, number=11, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='X', index=12, number=12, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AFFIX', index=13, number=13, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2108, + serialized_end=2249, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_TAG) + +_PARTOFSPEECH_ASPECT = _descriptor.EnumDescriptor( + name='Aspect', + full_name='google.cloud.language.v1beta2.PartOfSpeech.Aspect', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='ASPECT_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PERFECTIVE', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='IMPERFECTIVE', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PROGRESSIVE', index=3, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2251, + serialized_end=2330, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_ASPECT) + +_PARTOFSPEECH_CASE = _descriptor.EnumDescriptor( + name='Case', + full_name='google.cloud.language.v1beta2.PartOfSpeech.Case', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='CASE_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ACCUSATIVE', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADVERBIAL', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='COMPLEMENTIVE', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DATIVE', index=4, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='GENITIVE', index=5, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='INSTRUMENTAL', index=6, number=6, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='LOCATIVE', index=7, number=7, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NOMINATIVE', index=8, number=8, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='OBLIQUE', index=9, number=9, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PARTITIVE', index=10, number=10, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PREPOSITIONAL', index=11, number=11, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='REFLEXIVE_CASE', index=12, number=12, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='RELATIVE_CASE', index=13, number=13, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='VOCATIVE', index=14, number=14, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2333, + serialized_end=2581, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_CASE) + +_PARTOFSPEECH_FORM = _descriptor.EnumDescriptor( + name='Form', + full_name='google.cloud.language.v1beta2.PartOfSpeech.Form', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='FORM_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADNOMIAL', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AUXILIARY', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='COMPLEMENTIZER', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FINAL_ENDING', index=4, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='GERUND', index=5, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='REALIS', index=6, number=6, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='IRREALIS', index=7, number=7, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SHORT', index=8, number=8, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='LONG', index=9, number=9, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ORDER', index=10, number=10, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SPECIFIC', index=11, number=11, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2584, + serialized_end=2759, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_FORM) + +_PARTOFSPEECH_GENDER = _descriptor.EnumDescriptor( + name='Gender', + full_name='google.cloud.language.v1beta2.PartOfSpeech.Gender', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='GENDER_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FEMININE', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MASCULINE', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NEUTER', index=3, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2761, + serialized_end=2830, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_GENDER) + +_PARTOFSPEECH_MOOD = _descriptor.EnumDescriptor( + name='Mood', + full_name='google.cloud.language.v1beta2.PartOfSpeech.Mood', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='MOOD_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CONDITIONAL_MOOD', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='IMPERATIVE', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='INDICATIVE', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='INTERROGATIVE', index=4, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='JUSSIVE', index=5, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SUBJUNCTIVE', index=6, number=6, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2832, + serialized_end=2959, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_MOOD) + +_PARTOFSPEECH_NUMBER = _descriptor.EnumDescriptor( + name='Number', + full_name='google.cloud.language.v1beta2.PartOfSpeech.Number', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='NUMBER_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SINGULAR', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PLURAL', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DUAL', index=3, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2961, + serialized_end=3025, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_NUMBER) + +_PARTOFSPEECH_PERSON = _descriptor.EnumDescriptor( + name='Person', + full_name='google.cloud.language.v1beta2.PartOfSpeech.Person', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='PERSON_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FIRST', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SECOND', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='THIRD', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='REFLEXIVE_PERSON', index=4, number=4, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=3027, + serialized_end=3111, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_PERSON) + +_PARTOFSPEECH_PROPER = _descriptor.EnumDescriptor( + name='Proper', + full_name='google.cloud.language.v1beta2.PartOfSpeech.Proper', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='PROPER_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PROPER', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NOT_PROPER', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=3113, + serialized_end=3169, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_PROPER) + +_PARTOFSPEECH_RECIPROCITY = _descriptor.EnumDescriptor( + name='Reciprocity', + full_name='google.cloud.language.v1beta2.PartOfSpeech.Reciprocity', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='RECIPROCITY_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='RECIPROCAL', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NON_RECIPROCAL', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=3171, + serialized_end=3245, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_RECIPROCITY) + +_PARTOFSPEECH_TENSE = _descriptor.EnumDescriptor( + name='Tense', + full_name='google.cloud.language.v1beta2.PartOfSpeech.Tense', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='TENSE_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CONDITIONAL_TENSE', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FUTURE', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PAST', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PRESENT', index=4, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='IMPERFECT', index=5, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PLUPERFECT', index=6, number=6, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=3247, + serialized_end=3362, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_TENSE) + +_PARTOFSPEECH_VOICE = _descriptor.EnumDescriptor( + name='Voice', + full_name='google.cloud.language.v1beta2.PartOfSpeech.Voice', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='VOICE_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ACTIVE', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CAUSATIVE', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PASSIVE', index=3, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=3364, + serialized_end=3430, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_VOICE) + +_DEPENDENCYEDGE_LABEL = _descriptor.EnumDescriptor( + name='Label', + full_name='google.cloud.language.v1beta2.DependencyEdge.Label', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ABBREV', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ACOMP', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADVCL', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADVMOD', index=4, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AMOD', index=5, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='APPOS', index=6, number=6, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ATTR', index=7, number=7, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AUX', index=8, number=8, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AUXPASS', index=9, number=9, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CC', index=10, number=10, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CCOMP', index=11, number=11, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CONJ', index=12, number=12, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CSUBJ', index=13, number=13, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CSUBJPASS', index=14, number=14, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DEP', index=15, number=15, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DET', index=16, number=16, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DISCOURSE', index=17, number=17, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DOBJ', index=18, number=18, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='EXPL', index=19, number=19, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='GOESWITH', index=20, number=20, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='IOBJ', index=21, number=21, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MARK', index=22, number=22, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MWE', index=23, number=23, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MWV', index=24, number=24, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NEG', index=25, number=25, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NN', index=26, number=26, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NPADVMOD', index=27, number=27, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NSUBJ', index=28, number=28, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NSUBJPASS', index=29, number=29, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NUM', index=30, number=30, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NUMBER', index=31, number=31, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='P', index=32, number=32, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PARATAXIS', index=33, number=33, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PARTMOD', index=34, number=34, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PCOMP', index=35, number=35, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='POBJ', index=36, number=36, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='POSS', index=37, number=37, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='POSTNEG', index=38, number=38, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PRECOMP', index=39, number=39, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PRECONJ', index=40, number=40, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PREDET', index=41, number=41, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PREF', index=42, number=42, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PREP', index=43, number=43, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PRONL', index=44, number=44, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PRT', index=45, number=45, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PS', index=46, number=46, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='QUANTMOD', index=47, number=47, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='RCMOD', index=48, number=48, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='RCMODREL', index=49, number=49, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='RDROP', index=50, number=50, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='REF', index=51, number=51, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='REMNANT', index=52, number=52, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='REPARANDUM', index=53, number=53, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ROOT', index=54, number=54, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SNUM', index=55, number=55, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SUFF', index=56, number=56, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TMOD', index=57, number=57, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TOPIC', index=58, number=58, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='VMOD', index=59, number=59, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='VOCATIVE', index=60, number=60, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='XCOMP', index=61, number=61, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SUFFIX', index=62, number=62, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TITLE', index=63, number=63, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADVPHMOD', index=64, number=64, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AUXCAUS', index=65, number=65, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AUXVV', index=66, number=66, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DTMOD', index=67, number=67, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FOREIGN', index=68, number=68, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='KW', index=69, number=69, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='LIST', index=70, number=70, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NOMC', index=71, number=71, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NOMCSUBJ', index=72, number=72, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NOMCSUBJPASS', index=73, number=73, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NUMC', index=74, number=74, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='COP', index=75, number=75, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DISLOCATED', index=76, number=76, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=3546, + serialized_end=4422, +) +_sym_db.RegisterEnumDescriptor(_DEPENDENCYEDGE_LABEL) + +_ENTITYMENTION_TYPE = _descriptor.EnumDescriptor( + name='Type', + full_name='google.cloud.language.v1beta2.EntityMention.Type', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='TYPE_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PROPER', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='COMMON', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=4623, + serialized_end=4671, +) +_sym_db.RegisterEnumDescriptor(_ENTITYMENTION_TYPE) + + +_DOCUMENT = _descriptor.Descriptor( + name='Document', + full_name='google.cloud.language.v1beta2.Document', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='type', full_name='google.cloud.language.v1beta2.Document.type', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='content', full_name='google.cloud.language.v1beta2.Document.content', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='gcs_content_uri', full_name='google.cloud.language.v1beta2.Document.gcs_content_uri', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='language', full_name='google.cloud.language.v1beta2.Document.language', index=3, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _DOCUMENT_TYPE, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='source', full_name='google.cloud.language.v1beta2.Document.source', + index=0, containing_type=None, fields=[]), + ], + serialized_start=219, + serialized_end=419, +) + + +_SENTENCE = _descriptor.Descriptor( + name='Sentence', + full_name='google.cloud.language.v1beta2.Sentence', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='text', full_name='google.cloud.language.v1beta2.Sentence.text', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sentiment', full_name='google.cloud.language.v1beta2.Sentence.sentiment', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=421, + serialized_end=547, +) + + +_ENTITY_METADATAENTRY = _descriptor.Descriptor( + name='MetadataEntry', + full_name='google.cloud.language.v1beta2.Entity.MetadataEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.cloud.language.v1beta2.Entity.MetadataEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.cloud.language.v1beta2.Entity.MetadataEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=846, + serialized_end=893, +) + +_ENTITY = _descriptor.Descriptor( + name='Entity', + full_name='google.cloud.language.v1beta2.Entity', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.cloud.language.v1beta2.Entity.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='google.cloud.language.v1beta2.Entity.type', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='metadata', full_name='google.cloud.language.v1beta2.Entity.metadata', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='salience', full_name='google.cloud.language.v1beta2.Entity.salience', index=3, + number=4, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='mentions', full_name='google.cloud.language.v1beta2.Entity.mentions', index=4, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sentiment', full_name='google.cloud.language.v1beta2.Entity.sentiment', index=5, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_ENTITY_METADATAENTRY, ], + enum_types=[ + _ENTITY_TYPE, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=550, + serialized_end=1016, +) + + +_TOKEN = _descriptor.Descriptor( + name='Token', + full_name='google.cloud.language.v1beta2.Token', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='text', full_name='google.cloud.language.v1beta2.Token.text', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='part_of_speech', full_name='google.cloud.language.v1beta2.Token.part_of_speech', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dependency_edge', full_name='google.cloud.language.v1beta2.Token.dependency_edge', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='lemma', full_name='google.cloud.language.v1beta2.Token.lemma', index=3, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1019, + serialized_end=1237, +) + + +_SENTIMENT = _descriptor.Descriptor( + name='Sentiment', + full_name='google.cloud.language.v1beta2.Sentiment', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='magnitude', full_name='google.cloud.language.v1beta2.Sentiment.magnitude', index=0, + number=2, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='score', full_name='google.cloud.language.v1beta2.Sentiment.score', index=1, + number=3, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1239, + serialized_end=1284, +) + + +_PARTOFSPEECH = _descriptor.Descriptor( + name='PartOfSpeech', + full_name='google.cloud.language.v1beta2.PartOfSpeech', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='tag', full_name='google.cloud.language.v1beta2.PartOfSpeech.tag', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='aspect', full_name='google.cloud.language.v1beta2.PartOfSpeech.aspect', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='case', full_name='google.cloud.language.v1beta2.PartOfSpeech.case', index=2, + number=3, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='form', full_name='google.cloud.language.v1beta2.PartOfSpeech.form', index=3, + number=4, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='gender', full_name='google.cloud.language.v1beta2.PartOfSpeech.gender', index=4, + number=5, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='mood', full_name='google.cloud.language.v1beta2.PartOfSpeech.mood', index=5, + number=6, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='number', full_name='google.cloud.language.v1beta2.PartOfSpeech.number', index=6, + number=7, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='person', full_name='google.cloud.language.v1beta2.PartOfSpeech.person', index=7, + number=8, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='proper', full_name='google.cloud.language.v1beta2.PartOfSpeech.proper', index=8, + number=9, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='reciprocity', full_name='google.cloud.language.v1beta2.PartOfSpeech.reciprocity', index=9, + number=10, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='tense', full_name='google.cloud.language.v1beta2.PartOfSpeech.tense', index=10, + number=11, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='voice', full_name='google.cloud.language.v1beta2.PartOfSpeech.voice', index=11, + number=12, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _PARTOFSPEECH_TAG, + _PARTOFSPEECH_ASPECT, + _PARTOFSPEECH_CASE, + _PARTOFSPEECH_FORM, + _PARTOFSPEECH_GENDER, + _PARTOFSPEECH_MOOD, + _PARTOFSPEECH_NUMBER, + _PARTOFSPEECH_PERSON, + _PARTOFSPEECH_PROPER, + _PARTOFSPEECH_RECIPROCITY, + _PARTOFSPEECH_TENSE, + _PARTOFSPEECH_VOICE, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1287, + serialized_end=3430, +) + + +_DEPENDENCYEDGE = _descriptor.Descriptor( + name='DependencyEdge', + full_name='google.cloud.language.v1beta2.DependencyEdge', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='head_token_index', full_name='google.cloud.language.v1beta2.DependencyEdge.head_token_index', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='label', full_name='google.cloud.language.v1beta2.DependencyEdge.label', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _DEPENDENCYEDGE_LABEL, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3433, + serialized_end=4422, +) + + +_ENTITYMENTION = _descriptor.Descriptor( + name='EntityMention', + full_name='google.cloud.language.v1beta2.EntityMention', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='text', full_name='google.cloud.language.v1beta2.EntityMention.text', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='google.cloud.language.v1beta2.EntityMention.type', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sentiment', full_name='google.cloud.language.v1beta2.EntityMention.sentiment', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _ENTITYMENTION_TYPE, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4425, + serialized_end=4671, +) + + +_TEXTSPAN = _descriptor.Descriptor( + name='TextSpan', + full_name='google.cloud.language.v1beta2.TextSpan', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='content', full_name='google.cloud.language.v1beta2.TextSpan.content', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='begin_offset', full_name='google.cloud.language.v1beta2.TextSpan.begin_offset', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4673, + serialized_end=4722, +) + + +_ANALYZESENTIMENTREQUEST = _descriptor.Descriptor( + name='AnalyzeSentimentRequest', + full_name='google.cloud.language.v1beta2.AnalyzeSentimentRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='document', full_name='google.cloud.language.v1beta2.AnalyzeSentimentRequest.document', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='encoding_type', full_name='google.cloud.language.v1beta2.AnalyzeSentimentRequest.encoding_type', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4725, + serialized_end=4877, +) + + +_ANALYZESENTIMENTRESPONSE = _descriptor.Descriptor( + name='AnalyzeSentimentResponse', + full_name='google.cloud.language.v1beta2.AnalyzeSentimentResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='document_sentiment', full_name='google.cloud.language.v1beta2.AnalyzeSentimentResponse.document_sentiment', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='language', full_name='google.cloud.language.v1beta2.AnalyzeSentimentResponse.language', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sentences', full_name='google.cloud.language.v1beta2.AnalyzeSentimentResponse.sentences', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4880, + serialized_end=5054, +) + + +_ANALYZEENTITYSENTIMENTREQUEST = _descriptor.Descriptor( + name='AnalyzeEntitySentimentRequest', + full_name='google.cloud.language.v1beta2.AnalyzeEntitySentimentRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='document', full_name='google.cloud.language.v1beta2.AnalyzeEntitySentimentRequest.document', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='encoding_type', full_name='google.cloud.language.v1beta2.AnalyzeEntitySentimentRequest.encoding_type', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5057, + serialized_end=5215, +) + + +_ANALYZEENTITYSENTIMENTRESPONSE = _descriptor.Descriptor( + name='AnalyzeEntitySentimentResponse', + full_name='google.cloud.language.v1beta2.AnalyzeEntitySentimentResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='entities', full_name='google.cloud.language.v1beta2.AnalyzeEntitySentimentResponse.entities', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='language', full_name='google.cloud.language.v1beta2.AnalyzeEntitySentimentResponse.language', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5217, + serialized_end=5324, +) + + +_ANALYZEENTITIESREQUEST = _descriptor.Descriptor( + name='AnalyzeEntitiesRequest', + full_name='google.cloud.language.v1beta2.AnalyzeEntitiesRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='document', full_name='google.cloud.language.v1beta2.AnalyzeEntitiesRequest.document', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='encoding_type', full_name='google.cloud.language.v1beta2.AnalyzeEntitiesRequest.encoding_type', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5327, + serialized_end=5478, +) + + +_ANALYZEENTITIESRESPONSE = _descriptor.Descriptor( + name='AnalyzeEntitiesResponse', + full_name='google.cloud.language.v1beta2.AnalyzeEntitiesResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='entities', full_name='google.cloud.language.v1beta2.AnalyzeEntitiesResponse.entities', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='language', full_name='google.cloud.language.v1beta2.AnalyzeEntitiesResponse.language', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5480, + serialized_end=5580, +) + + +_ANALYZESYNTAXREQUEST = _descriptor.Descriptor( + name='AnalyzeSyntaxRequest', + full_name='google.cloud.language.v1beta2.AnalyzeSyntaxRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='document', full_name='google.cloud.language.v1beta2.AnalyzeSyntaxRequest.document', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='encoding_type', full_name='google.cloud.language.v1beta2.AnalyzeSyntaxRequest.encoding_type', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5583, + serialized_end=5732, +) + + +_ANALYZESYNTAXRESPONSE = _descriptor.Descriptor( + name='AnalyzeSyntaxResponse', + full_name='google.cloud.language.v1beta2.AnalyzeSyntaxResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='sentences', full_name='google.cloud.language.v1beta2.AnalyzeSyntaxResponse.sentences', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='tokens', full_name='google.cloud.language.v1beta2.AnalyzeSyntaxResponse.tokens', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='language', full_name='google.cloud.language.v1beta2.AnalyzeSyntaxResponse.language', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5735, + serialized_end=5890, +) + + +_ANNOTATETEXTREQUEST_FEATURES = _descriptor.Descriptor( + name='Features', + full_name='google.cloud.language.v1beta2.AnnotateTextRequest.Features', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='extract_syntax', full_name='google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_syntax', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='extract_entities', full_name='google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entities', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='extract_document_sentiment', full_name='google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_document_sentiment', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='extract_entity_sentiment', full_name='google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entity_sentiment', index=3, + number=4, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=6123, + serialized_end=6253, +) + +_ANNOTATETEXTREQUEST = _descriptor.Descriptor( + name='AnnotateTextRequest', + full_name='google.cloud.language.v1beta2.AnnotateTextRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='document', full_name='google.cloud.language.v1beta2.AnnotateTextRequest.document', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='features', full_name='google.cloud.language.v1beta2.AnnotateTextRequest.features', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='encoding_type', full_name='google.cloud.language.v1beta2.AnnotateTextRequest.encoding_type', index=2, + number=3, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_ANNOTATETEXTREQUEST_FEATURES, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5893, + serialized_end=6253, +) + + +_ANNOTATETEXTRESPONSE = _descriptor.Descriptor( + name='AnnotateTextResponse', + full_name='google.cloud.language.v1beta2.AnnotateTextResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='sentences', full_name='google.cloud.language.v1beta2.AnnotateTextResponse.sentences', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='tokens', full_name='google.cloud.language.v1beta2.AnnotateTextResponse.tokens', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='entities', full_name='google.cloud.language.v1beta2.AnnotateTextResponse.entities', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='document_sentiment', full_name='google.cloud.language.v1beta2.AnnotateTextResponse.document_sentiment', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='language', full_name='google.cloud.language.v1beta2.AnnotateTextResponse.language', index=4, + number=5, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=6256, + serialized_end=6537, +) + +_DOCUMENT.fields_by_name['type'].enum_type = _DOCUMENT_TYPE +_DOCUMENT_TYPE.containing_type = _DOCUMENT +_DOCUMENT.oneofs_by_name['source'].fields.append( + _DOCUMENT.fields_by_name['content']) +_DOCUMENT.fields_by_name['content'].containing_oneof = _DOCUMENT.oneofs_by_name['source'] +_DOCUMENT.oneofs_by_name['source'].fields.append( + _DOCUMENT.fields_by_name['gcs_content_uri']) +_DOCUMENT.fields_by_name['gcs_content_uri'].containing_oneof = _DOCUMENT.oneofs_by_name['source'] +_SENTENCE.fields_by_name['text'].message_type = _TEXTSPAN +_SENTENCE.fields_by_name['sentiment'].message_type = _SENTIMENT +_ENTITY_METADATAENTRY.containing_type = _ENTITY +_ENTITY.fields_by_name['type'].enum_type = _ENTITY_TYPE +_ENTITY.fields_by_name['metadata'].message_type = _ENTITY_METADATAENTRY +_ENTITY.fields_by_name['mentions'].message_type = _ENTITYMENTION +_ENTITY.fields_by_name['sentiment'].message_type = _SENTIMENT +_ENTITY_TYPE.containing_type = _ENTITY +_TOKEN.fields_by_name['text'].message_type = _TEXTSPAN +_TOKEN.fields_by_name['part_of_speech'].message_type = _PARTOFSPEECH +_TOKEN.fields_by_name['dependency_edge'].message_type = _DEPENDENCYEDGE +_PARTOFSPEECH.fields_by_name['tag'].enum_type = _PARTOFSPEECH_TAG +_PARTOFSPEECH.fields_by_name['aspect'].enum_type = _PARTOFSPEECH_ASPECT +_PARTOFSPEECH.fields_by_name['case'].enum_type = _PARTOFSPEECH_CASE +_PARTOFSPEECH.fields_by_name['form'].enum_type = _PARTOFSPEECH_FORM +_PARTOFSPEECH.fields_by_name['gender'].enum_type = _PARTOFSPEECH_GENDER +_PARTOFSPEECH.fields_by_name['mood'].enum_type = _PARTOFSPEECH_MOOD +_PARTOFSPEECH.fields_by_name['number'].enum_type = _PARTOFSPEECH_NUMBER +_PARTOFSPEECH.fields_by_name['person'].enum_type = _PARTOFSPEECH_PERSON +_PARTOFSPEECH.fields_by_name['proper'].enum_type = _PARTOFSPEECH_PROPER +_PARTOFSPEECH.fields_by_name['reciprocity'].enum_type = _PARTOFSPEECH_RECIPROCITY +_PARTOFSPEECH.fields_by_name['tense'].enum_type = _PARTOFSPEECH_TENSE +_PARTOFSPEECH.fields_by_name['voice'].enum_type = _PARTOFSPEECH_VOICE +_PARTOFSPEECH_TAG.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_ASPECT.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_CASE.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_FORM.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_GENDER.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_MOOD.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_NUMBER.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_PERSON.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_PROPER.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_RECIPROCITY.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_TENSE.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_VOICE.containing_type = _PARTOFSPEECH +_DEPENDENCYEDGE.fields_by_name['label'].enum_type = _DEPENDENCYEDGE_LABEL +_DEPENDENCYEDGE_LABEL.containing_type = _DEPENDENCYEDGE +_ENTITYMENTION.fields_by_name['text'].message_type = _TEXTSPAN +_ENTITYMENTION.fields_by_name['type'].enum_type = _ENTITYMENTION_TYPE +_ENTITYMENTION.fields_by_name['sentiment'].message_type = _SENTIMENT +_ENTITYMENTION_TYPE.containing_type = _ENTITYMENTION +_ANALYZESENTIMENTREQUEST.fields_by_name['document'].message_type = _DOCUMENT +_ANALYZESENTIMENTREQUEST.fields_by_name['encoding_type'].enum_type = _ENCODINGTYPE +_ANALYZESENTIMENTRESPONSE.fields_by_name['document_sentiment'].message_type = _SENTIMENT +_ANALYZESENTIMENTRESPONSE.fields_by_name['sentences'].message_type = _SENTENCE +_ANALYZEENTITYSENTIMENTREQUEST.fields_by_name['document'].message_type = _DOCUMENT +_ANALYZEENTITYSENTIMENTREQUEST.fields_by_name['encoding_type'].enum_type = _ENCODINGTYPE +_ANALYZEENTITYSENTIMENTRESPONSE.fields_by_name['entities'].message_type = _ENTITY +_ANALYZEENTITIESREQUEST.fields_by_name['document'].message_type = _DOCUMENT +_ANALYZEENTITIESREQUEST.fields_by_name['encoding_type'].enum_type = _ENCODINGTYPE +_ANALYZEENTITIESRESPONSE.fields_by_name['entities'].message_type = _ENTITY +_ANALYZESYNTAXREQUEST.fields_by_name['document'].message_type = _DOCUMENT +_ANALYZESYNTAXREQUEST.fields_by_name['encoding_type'].enum_type = _ENCODINGTYPE +_ANALYZESYNTAXRESPONSE.fields_by_name['sentences'].message_type = _SENTENCE +_ANALYZESYNTAXRESPONSE.fields_by_name['tokens'].message_type = _TOKEN +_ANNOTATETEXTREQUEST_FEATURES.containing_type = _ANNOTATETEXTREQUEST +_ANNOTATETEXTREQUEST.fields_by_name['document'].message_type = _DOCUMENT +_ANNOTATETEXTREQUEST.fields_by_name['features'].message_type = _ANNOTATETEXTREQUEST_FEATURES +_ANNOTATETEXTREQUEST.fields_by_name['encoding_type'].enum_type = _ENCODINGTYPE +_ANNOTATETEXTRESPONSE.fields_by_name['sentences'].message_type = _SENTENCE +_ANNOTATETEXTRESPONSE.fields_by_name['tokens'].message_type = _TOKEN +_ANNOTATETEXTRESPONSE.fields_by_name['entities'].message_type = _ENTITY +_ANNOTATETEXTRESPONSE.fields_by_name['document_sentiment'].message_type = _SENTIMENT +DESCRIPTOR.message_types_by_name['Document'] = _DOCUMENT +DESCRIPTOR.message_types_by_name['Sentence'] = _SENTENCE +DESCRIPTOR.message_types_by_name['Entity'] = _ENTITY +DESCRIPTOR.message_types_by_name['Token'] = _TOKEN +DESCRIPTOR.message_types_by_name['Sentiment'] = _SENTIMENT +DESCRIPTOR.message_types_by_name['PartOfSpeech'] = _PARTOFSPEECH +DESCRIPTOR.message_types_by_name['DependencyEdge'] = _DEPENDENCYEDGE +DESCRIPTOR.message_types_by_name['EntityMention'] = _ENTITYMENTION +DESCRIPTOR.message_types_by_name['TextSpan'] = _TEXTSPAN +DESCRIPTOR.message_types_by_name['AnalyzeSentimentRequest'] = _ANALYZESENTIMENTREQUEST +DESCRIPTOR.message_types_by_name['AnalyzeSentimentResponse'] = _ANALYZESENTIMENTRESPONSE +DESCRIPTOR.message_types_by_name['AnalyzeEntitySentimentRequest'] = _ANALYZEENTITYSENTIMENTREQUEST +DESCRIPTOR.message_types_by_name['AnalyzeEntitySentimentResponse'] = _ANALYZEENTITYSENTIMENTRESPONSE +DESCRIPTOR.message_types_by_name['AnalyzeEntitiesRequest'] = _ANALYZEENTITIESREQUEST +DESCRIPTOR.message_types_by_name['AnalyzeEntitiesResponse'] = _ANALYZEENTITIESRESPONSE +DESCRIPTOR.message_types_by_name['AnalyzeSyntaxRequest'] = _ANALYZESYNTAXREQUEST +DESCRIPTOR.message_types_by_name['AnalyzeSyntaxResponse'] = _ANALYZESYNTAXRESPONSE +DESCRIPTOR.message_types_by_name['AnnotateTextRequest'] = _ANNOTATETEXTREQUEST +DESCRIPTOR.message_types_by_name['AnnotateTextResponse'] = _ANNOTATETEXTRESPONSE +DESCRIPTOR.enum_types_by_name['EncodingType'] = _ENCODINGTYPE + +Document = _reflection.GeneratedProtocolMessageType('Document', (_message.Message,), dict( + DESCRIPTOR = _DOCUMENT, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """Represents the input to API methods. + + + Attributes: + type: + Required. If the type is not set or is ``TYPE_UNSPECIFIED``, + returns an ``INVALID_ARGUMENT`` error. + source: + The source of the document: a string containing the content or + a Google Cloud Storage URI. + content: + The content of the input in string format. + gcs_content_uri: + The Google Cloud Storage URI where the file content is + located. This URI must be of the form: + gs://bucket\_name/object\_name. For more details, see + https://cloud.google.com/storage/docs/reference-uris. NOTE: + Cloud Storage object versioning is not supported. + language: + The language of the document (if not specified, the language + is automatically detected). Both ISO and BCP-47 language codes + are accepted. `Language Support + <https://cloud.google.com/natural-language/docs/languages>`__ + lists currently supported languages for each API method. If + the language (either specified by the caller or automatically + detected) is not supported by the called API method, an + ``INVALID_ARGUMENT`` error is returned. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.Document) + )) +_sym_db.RegisterMessage(Document) + +Sentence = _reflection.GeneratedProtocolMessageType('Sentence', (_message.Message,), dict( + DESCRIPTOR = _SENTENCE, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """Represents a sentence in the input document. + + + Attributes: + text: + The sentence text. + sentiment: + For calls to [AnalyzeSentiment][] or if [AnnotateTextRequest.F + eatures.extract\_document\_sentiment][google.cloud.language.v1 + beta2.AnnotateTextRequest.Features.extract\_document\_sentimen + t] is set to true, this field will contain the sentiment for + the sentence. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.Sentence) + )) +_sym_db.RegisterMessage(Sentence) + +Entity = _reflection.GeneratedProtocolMessageType('Entity', (_message.Message,), dict( + + MetadataEntry = _reflection.GeneratedProtocolMessageType('MetadataEntry', (_message.Message,), dict( + DESCRIPTOR = _ENTITY_METADATAENTRY, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.Entity.MetadataEntry) + )) + , + DESCRIPTOR = _ENTITY, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """Represents a phrase in the text that is a known entity, such as a + person, an organization, or location. The API associates information, + such as salience and mentions, with entities. + + + Attributes: + name: + The representative name for the entity. + type: + The entity type. + metadata: + Metadata associated with the entity. Currently, Wikipedia + URLs and Knowledge Graph MIDs are provided, if available. The + associated keys are "wikipedia\_url" and "mid", respectively. + salience: + The salience score associated with the entity in the [0, 1.0] + range. The salience score for an entity provides information + about the importance or centrality of that entity to the + entire document text. Scores closer to 0 are less salient, + while scores closer to 1.0 are highly salient. + mentions: + The mentions of this entity in the input document. The API + currently supports proper noun mentions. + sentiment: + For calls to [AnalyzeEntitySentiment][] or if [AnnotateTextReq + uest.Features.extract\_entity\_sentiment][google.cloud.languag + e.v1beta2.AnnotateTextRequest.Features.extract\_entity\_sentim + ent] is set to true, this field will contain the aggregate + sentiment expressed for this entity in the provided document. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.Entity) + )) +_sym_db.RegisterMessage(Entity) +_sym_db.RegisterMessage(Entity.MetadataEntry) + +Token = _reflection.GeneratedProtocolMessageType('Token', (_message.Message,), dict( + DESCRIPTOR = _TOKEN, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """Represents the smallest syntactic building block of the text. + + + Attributes: + text: + The token text. + part_of_speech: + Parts of speech tag for this token. + dependency_edge: + Dependency tree parse for this token. + lemma: + `Lemma + <https://en.wikipedia.org/wiki/Lemma_%28morphology%29>`__ of + the token. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.Token) + )) +_sym_db.RegisterMessage(Token) + +Sentiment = _reflection.GeneratedProtocolMessageType('Sentiment', (_message.Message,), dict( + DESCRIPTOR = _SENTIMENT, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """Represents the feeling associated with the entire text or entities in + the text. + + + Attributes: + magnitude: + A non-negative number in the [0, +inf) range, which represents + the absolute magnitude of sentiment regardless of score + (positive or negative). + score: + Sentiment score between -1.0 (negative sentiment) and 1.0 + (positive sentiment). + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.Sentiment) + )) +_sym_db.RegisterMessage(Sentiment) + +PartOfSpeech = _reflection.GeneratedProtocolMessageType('PartOfSpeech', (_message.Message,), dict( + DESCRIPTOR = _PARTOFSPEECH, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """Represents part of speech information for a token. + + + Attributes: + tag: + The part of speech tag. + aspect: + The grammatical aspect. + case: + The grammatical case. + form: + The grammatical form. + gender: + The grammatical gender. + mood: + The grammatical mood. + number: + The grammatical number. + person: + The grammatical person. + proper: + The grammatical properness. + reciprocity: + The grammatical reciprocity. + tense: + The grammatical tense. + voice: + The grammatical voice. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.PartOfSpeech) + )) +_sym_db.RegisterMessage(PartOfSpeech) + +DependencyEdge = _reflection.GeneratedProtocolMessageType('DependencyEdge', (_message.Message,), dict( + DESCRIPTOR = _DEPENDENCYEDGE, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """Represents dependency parse tree information for a token. + + + Attributes: + head_token_index: + Represents the head of this token in the dependency tree. This + is the index of the token which has an arc going to this + token. The index is the position of the token in the array of + tokens returned by the API method. If this token is a root + token, then the ``head_token_index`` is its own index. + label: + The parse label for the token. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.DependencyEdge) + )) +_sym_db.RegisterMessage(DependencyEdge) + +EntityMention = _reflection.GeneratedProtocolMessageType('EntityMention', (_message.Message,), dict( + DESCRIPTOR = _ENTITYMENTION, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """Represents a mention for an entity in the text. Currently, proper noun + mentions are supported. + + + Attributes: + text: + The mention text. + type: + The type of the entity mention. + sentiment: + For calls to [AnalyzeEntitySentiment][] or if [AnnotateTextReq + uest.Features.extract\_entity\_sentiment][google.cloud.languag + e.v1beta2.AnnotateTextRequest.Features.extract\_entity\_sentim + ent] is set to true, this field will contain the sentiment + expressed for this mention of the entity in the provided + document. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.EntityMention) + )) +_sym_db.RegisterMessage(EntityMention) + +TextSpan = _reflection.GeneratedProtocolMessageType('TextSpan', (_message.Message,), dict( + DESCRIPTOR = _TEXTSPAN, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """Represents an output piece of text. + + + Attributes: + content: + The content of the output text. + begin_offset: + The API calculates the beginning offset of the content in the + original document according to the + [EncodingType][google.cloud.language.v1beta2.EncodingType] + specified in the API request. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.TextSpan) + )) +_sym_db.RegisterMessage(TextSpan) + +AnalyzeSentimentRequest = _reflection.GeneratedProtocolMessageType('AnalyzeSentimentRequest', (_message.Message,), dict( + DESCRIPTOR = _ANALYZESENTIMENTREQUEST, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """The sentiment analysis request message. + + + Attributes: + document: + Input document. + encoding_type: + The encoding type used by the API to calculate sentence + offsets for the sentence sentiment. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnalyzeSentimentRequest) + )) +_sym_db.RegisterMessage(AnalyzeSentimentRequest) + +AnalyzeSentimentResponse = _reflection.GeneratedProtocolMessageType('AnalyzeSentimentResponse', (_message.Message,), dict( + DESCRIPTOR = _ANALYZESENTIMENTRESPONSE, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """The sentiment analysis response message. + + + Attributes: + document_sentiment: + The overall sentiment of the input document. + language: + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See [Document.language][googl + e.cloud.language.v1beta2.Document.language] field for more + details. + sentences: + The sentiment for all the sentences in the document. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnalyzeSentimentResponse) + )) +_sym_db.RegisterMessage(AnalyzeSentimentResponse) + +AnalyzeEntitySentimentRequest = _reflection.GeneratedProtocolMessageType('AnalyzeEntitySentimentRequest', (_message.Message,), dict( + DESCRIPTOR = _ANALYZEENTITYSENTIMENTREQUEST, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """The entity-level sentiment analysis request message. + + + Attributes: + document: + Input document. + encoding_type: + The encoding type used by the API to calculate offsets. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnalyzeEntitySentimentRequest) + )) +_sym_db.RegisterMessage(AnalyzeEntitySentimentRequest) + +AnalyzeEntitySentimentResponse = _reflection.GeneratedProtocolMessageType('AnalyzeEntitySentimentResponse', (_message.Message,), dict( + DESCRIPTOR = _ANALYZEENTITYSENTIMENTRESPONSE, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """The entity-level sentiment analysis response message. + + + Attributes: + entities: + The recognized entities in the input document with associated + sentiments. + language: + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See [Document.language][googl + e.cloud.language.v1beta2.Document.language] field for more + details. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnalyzeEntitySentimentResponse) + )) +_sym_db.RegisterMessage(AnalyzeEntitySentimentResponse) + +AnalyzeEntitiesRequest = _reflection.GeneratedProtocolMessageType('AnalyzeEntitiesRequest', (_message.Message,), dict( + DESCRIPTOR = _ANALYZEENTITIESREQUEST, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """The entity analysis request message. + + + Attributes: + document: + Input document. + encoding_type: + The encoding type used by the API to calculate offsets. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnalyzeEntitiesRequest) + )) +_sym_db.RegisterMessage(AnalyzeEntitiesRequest) + +AnalyzeEntitiesResponse = _reflection.GeneratedProtocolMessageType('AnalyzeEntitiesResponse', (_message.Message,), dict( + DESCRIPTOR = _ANALYZEENTITIESRESPONSE, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """The entity analysis response message. + + + Attributes: + entities: + The recognized entities in the input document. + language: + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See [Document.language][googl + e.cloud.language.v1beta2.Document.language] field for more + details. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnalyzeEntitiesResponse) + )) +_sym_db.RegisterMessage(AnalyzeEntitiesResponse) + +AnalyzeSyntaxRequest = _reflection.GeneratedProtocolMessageType('AnalyzeSyntaxRequest', (_message.Message,), dict( + DESCRIPTOR = _ANALYZESYNTAXREQUEST, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """The syntax analysis request message. + + + Attributes: + document: + Input document. + encoding_type: + The encoding type used by the API to calculate offsets. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnalyzeSyntaxRequest) + )) +_sym_db.RegisterMessage(AnalyzeSyntaxRequest) + +AnalyzeSyntaxResponse = _reflection.GeneratedProtocolMessageType('AnalyzeSyntaxResponse', (_message.Message,), dict( + DESCRIPTOR = _ANALYZESYNTAXRESPONSE, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """The syntax analysis response message. + + + Attributes: + sentences: + Sentences in the input document. + tokens: + Tokens, along with their syntactic information, in the input + document. + language: + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See [Document.language][googl + e.cloud.language.v1beta2.Document.language] field for more + details. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnalyzeSyntaxResponse) + )) +_sym_db.RegisterMessage(AnalyzeSyntaxResponse) + +AnnotateTextRequest = _reflection.GeneratedProtocolMessageType('AnnotateTextRequest', (_message.Message,), dict( + + Features = _reflection.GeneratedProtocolMessageType('Features', (_message.Message,), dict( + DESCRIPTOR = _ANNOTATETEXTREQUEST_FEATURES, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """All available features for sentiment, syntax, and semantic analysis. + Setting each one to true will enable that specific analysis for the + input. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnnotateTextRequest.Features) + )) + , + DESCRIPTOR = _ANNOTATETEXTREQUEST, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """The request message for the text annotation API, which can perform + multiple analysis types (sentiment, entities, and syntax) in one call. + + + Attributes: + extract_syntax: + Extract syntax information. + extract_entities: + Extract entities. + extract_document_sentiment: + Extract document-level sentiment. + extract_entity_sentiment: + Extract entities and their associated sentiment. + document: + Input document. + features: + The enabled features. + encoding_type: + The encoding type used by the API to calculate offsets. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnnotateTextRequest) + )) +_sym_db.RegisterMessage(AnnotateTextRequest) +_sym_db.RegisterMessage(AnnotateTextRequest.Features) + +AnnotateTextResponse = _reflection.GeneratedProtocolMessageType('AnnotateTextResponse', (_message.Message,), dict( + DESCRIPTOR = _ANNOTATETEXTRESPONSE, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """The text annotations response message. + + + Attributes: + sentences: + Sentences in the input document. Populated if the user enables + [AnnotateTextRequest.Features.extract\_syntax][google.cloud.la + nguage.v1beta2.AnnotateTextRequest.Features.extract\_syntax]. + tokens: + Tokens, along with their syntactic information, in the input + document. Populated if the user enables [AnnotateTextRequest.F + eatures.extract\_syntax][google.cloud.language.v1beta2.Annotat + eTextRequest.Features.extract\_syntax]. + entities: + Entities, along with their semantic information, in the input + document. Populated if the user enables [AnnotateTextRequest.F + eatures.extract\_entities][google.cloud.language.v1beta2.Annot + ateTextRequest.Features.extract\_entities]. + document_sentiment: + The overall sentiment for the document. Populated if the user + enables [AnnotateTextRequest.Features.extract\_document\_senti + ment][google.cloud.language.v1beta2.AnnotateTextRequest.Featur + es.extract\_document\_sentiment]. + language: + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See [Document.language][googl + e.cloud.language.v1beta2.Document.language] field for more + details. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnnotateTextResponse) + )) +_sym_db.RegisterMessage(AnnotateTextResponse) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n!com.google.cloud.language.v1beta2B\024LanguageServiceProtoP\001ZEgoogle.golang.org/genproto/googleapis/cloud/language/v1beta2;language')) +_ENTITY_METADATAENTRY.has_options = True +_ENTITY_METADATAENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +try: + # THESE ELEMENTS WILL BE DEPRECATED. + # Please use the generated *_pb2_grpc.py files instead. + import grpc + from grpc.beta import implementations as beta_implementations + from grpc.beta import interfaces as beta_interfaces + from grpc.framework.common import cardinality + from grpc.framework.interfaces.face import utilities as face_utilities + + + class LanguageServiceStub(object): + """Provides text analysis operations such as sentiment analysis and entity + recognition. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.AnalyzeSentiment = channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeSentiment', + request_serializer=AnalyzeSentimentRequest.SerializeToString, + response_deserializer=AnalyzeSentimentResponse.FromString, + ) + self.AnalyzeEntities = channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeEntities', + request_serializer=AnalyzeEntitiesRequest.SerializeToString, + response_deserializer=AnalyzeEntitiesResponse.FromString, + ) + self.AnalyzeEntitySentiment = channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeEntitySentiment', + request_serializer=AnalyzeEntitySentimentRequest.SerializeToString, + response_deserializer=AnalyzeEntitySentimentResponse.FromString, + ) + self.AnalyzeSyntax = channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeSyntax', + request_serializer=AnalyzeSyntaxRequest.SerializeToString, + response_deserializer=AnalyzeSyntaxResponse.FromString, + ) + self.AnnotateText = channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnnotateText', + request_serializer=AnnotateTextRequest.SerializeToString, + response_deserializer=AnnotateTextResponse.FromString, + ) + + + class LanguageServiceServicer(object): + """Provides text analysis operations such as sentiment analysis and entity + recognition. + """ + + def AnalyzeSentiment(self, request, context): + """Analyzes the sentiment of the provided text. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnalyzeEntities(self, request, context): + """Finds named entities (currently proper names and common nouns) in the text + along with entity types, salience, mentions for each entity, and + other properties. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnalyzeEntitySentiment(self, request, context): + """Finds entities, similar to [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] in the text and analyzes + sentiment associated with each entity and its mentions. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnalyzeSyntax(self, request, context): + """Analyzes the syntax of the text and provides sentence boundaries and + tokenization along with part of speech tags, dependency trees, and other + properties. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnnotateText(self, request, context): + """A convenience method that provides all syntax, sentiment, and entity + features in one call. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + + def add_LanguageServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'AnalyzeSentiment': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeSentiment, + request_deserializer=AnalyzeSentimentRequest.FromString, + response_serializer=AnalyzeSentimentResponse.SerializeToString, + ), + 'AnalyzeEntities': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeEntities, + request_deserializer=AnalyzeEntitiesRequest.FromString, + response_serializer=AnalyzeEntitiesResponse.SerializeToString, + ), + 'AnalyzeEntitySentiment': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeEntitySentiment, + request_deserializer=AnalyzeEntitySentimentRequest.FromString, + response_serializer=AnalyzeEntitySentimentResponse.SerializeToString, + ), + 'AnalyzeSyntax': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeSyntax, + request_deserializer=AnalyzeSyntaxRequest.FromString, + response_serializer=AnalyzeSyntaxResponse.SerializeToString, + ), + 'AnnotateText': grpc.unary_unary_rpc_method_handler( + servicer.AnnotateText, + request_deserializer=AnnotateTextRequest.FromString, + response_serializer=AnnotateTextResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.cloud.language.v1beta2.LanguageService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + class BetaLanguageServiceServicer(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """Provides text analysis operations such as sentiment analysis and entity + recognition. + """ + def AnalyzeSentiment(self, request, context): + """Analyzes the sentiment of the provided text. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def AnalyzeEntities(self, request, context): + """Finds named entities (currently proper names and common nouns) in the text + along with entity types, salience, mentions for each entity, and + other properties. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def AnalyzeEntitySentiment(self, request, context): + """Finds entities, similar to [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] in the text and analyzes + sentiment associated with each entity and its mentions. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def AnalyzeSyntax(self, request, context): + """Analyzes the syntax of the text and provides sentence boundaries and + tokenization along with part of speech tags, dependency trees, and other + properties. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def AnnotateText(self, request, context): + """A convenience method that provides all syntax, sentiment, and entity + features in one call. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + + + class BetaLanguageServiceStub(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """Provides text analysis operations such as sentiment analysis and entity + recognition. + """ + def AnalyzeSentiment(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Analyzes the sentiment of the provided text. + """ + raise NotImplementedError() + AnalyzeSentiment.future = None + def AnalyzeEntities(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Finds named entities (currently proper names and common nouns) in the text + along with entity types, salience, mentions for each entity, and + other properties. + """ + raise NotImplementedError() + AnalyzeEntities.future = None + def AnalyzeEntitySentiment(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Finds entities, similar to [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] in the text and analyzes + sentiment associated with each entity and its mentions. + """ + raise NotImplementedError() + AnalyzeEntitySentiment.future = None + def AnalyzeSyntax(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Analyzes the syntax of the text and provides sentence boundaries and + tokenization along with part of speech tags, dependency trees, and other + properties. + """ + raise NotImplementedError() + AnalyzeSyntax.future = None + def AnnotateText(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """A convenience method that provides all syntax, sentiment, and entity + features in one call. + """ + raise NotImplementedError() + AnnotateText.future = None + + + def beta_create_LanguageService_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_deserializers = { + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeEntities'): AnalyzeEntitiesRequest.FromString, + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeEntitySentiment'): AnalyzeEntitySentimentRequest.FromString, + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeSentiment'): AnalyzeSentimentRequest.FromString, + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeSyntax'): AnalyzeSyntaxRequest.FromString, + ('google.cloud.language.v1beta2.LanguageService', 'AnnotateText'): AnnotateTextRequest.FromString, + } + response_serializers = { + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeEntities'): AnalyzeEntitiesResponse.SerializeToString, + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeEntitySentiment'): AnalyzeEntitySentimentResponse.SerializeToString, + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeSentiment'): AnalyzeSentimentResponse.SerializeToString, + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeSyntax'): AnalyzeSyntaxResponse.SerializeToString, + ('google.cloud.language.v1beta2.LanguageService', 'AnnotateText'): AnnotateTextResponse.SerializeToString, + } + method_implementations = { + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeEntities'): face_utilities.unary_unary_inline(servicer.AnalyzeEntities), + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeEntitySentiment'): face_utilities.unary_unary_inline(servicer.AnalyzeEntitySentiment), + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeSentiment'): face_utilities.unary_unary_inline(servicer.AnalyzeSentiment), + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeSyntax'): face_utilities.unary_unary_inline(servicer.AnalyzeSyntax), + ('google.cloud.language.v1beta2.LanguageService', 'AnnotateText'): face_utilities.unary_unary_inline(servicer.AnnotateText), + } + server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) + return beta_implementations.server(method_implementations, options=server_options) + + + def beta_create_LanguageService_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_serializers = { + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeEntities'): AnalyzeEntitiesRequest.SerializeToString, + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeEntitySentiment'): AnalyzeEntitySentimentRequest.SerializeToString, + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeSentiment'): AnalyzeSentimentRequest.SerializeToString, + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeSyntax'): AnalyzeSyntaxRequest.SerializeToString, + ('google.cloud.language.v1beta2.LanguageService', 'AnnotateText'): AnnotateTextRequest.SerializeToString, + } + response_deserializers = { + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeEntities'): AnalyzeEntitiesResponse.FromString, + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeEntitySentiment'): AnalyzeEntitySentimentResponse.FromString, + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeSentiment'): AnalyzeSentimentResponse.FromString, + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeSyntax'): AnalyzeSyntaxResponse.FromString, + ('google.cloud.language.v1beta2.LanguageService', 'AnnotateText'): AnnotateTextResponse.FromString, + } + cardinalities = { + 'AnalyzeEntities': cardinality.Cardinality.UNARY_UNARY, + 'AnalyzeEntitySentiment': cardinality.Cardinality.UNARY_UNARY, + 'AnalyzeSentiment': cardinality.Cardinality.UNARY_UNARY, + 'AnalyzeSyntax': cardinality.Cardinality.UNARY_UNARY, + 'AnnotateText': cardinality.Cardinality.UNARY_UNARY, + } + stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) + return beta_implementations.dynamic_stub(channel, 'google.cloud.language.v1beta2.LanguageService', cardinalities, options=stub_options) +except ImportError: + pass +# @@protoc_insertion_point(module_scope) diff --git a/language/google/cloud/proto/language/v1beta2/language_service_pb2_grpc.py b/language/google/cloud/proto/language/v1beta2/language_service_pb2_grpc.py new file mode 100644 index 000000000000..264d6d43f468 --- /dev/null +++ b/language/google/cloud/proto/language/v1beta2/language_service_pb2_grpc.py @@ -0,0 +1,122 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +import google.cloud.proto.language.v1beta2.language_service_pb2 as google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2 + + +class LanguageServiceStub(object): + """Provides text analysis operations such as sentiment analysis and entity + recognition. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.AnalyzeSentiment = channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeSentiment', + request_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeSentimentRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeSentimentResponse.FromString, + ) + self.AnalyzeEntities = channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeEntities', + request_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeEntitiesRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeEntitiesResponse.FromString, + ) + self.AnalyzeEntitySentiment = channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeEntitySentiment', + request_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeEntitySentimentRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeEntitySentimentResponse.FromString, + ) + self.AnalyzeSyntax = channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeSyntax', + request_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeSyntaxRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeSyntaxResponse.FromString, + ) + self.AnnotateText = channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnnotateText', + request_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnnotateTextRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnnotateTextResponse.FromString, + ) + + +class LanguageServiceServicer(object): + """Provides text analysis operations such as sentiment analysis and entity + recognition. + """ + + def AnalyzeSentiment(self, request, context): + """Analyzes the sentiment of the provided text. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnalyzeEntities(self, request, context): + """Finds named entities (currently proper names and common nouns) in the text + along with entity types, salience, mentions for each entity, and + other properties. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnalyzeEntitySentiment(self, request, context): + """Finds entities, similar to [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] in the text and analyzes + sentiment associated with each entity and its mentions. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnalyzeSyntax(self, request, context): + """Analyzes the syntax of the text and provides sentence boundaries and + tokenization along with part of speech tags, dependency trees, and other + properties. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnnotateText(self, request, context): + """A convenience method that provides all syntax, sentiment, and entity + features in one call. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_LanguageServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'AnalyzeSentiment': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeSentiment, + request_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeSentimentRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeSentimentResponse.SerializeToString, + ), + 'AnalyzeEntities': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeEntities, + request_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeEntitiesRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeEntitiesResponse.SerializeToString, + ), + 'AnalyzeEntitySentiment': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeEntitySentiment, + request_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeEntitySentimentRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeEntitySentimentResponse.SerializeToString, + ), + 'AnalyzeSyntax': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeSyntax, + request_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeSyntaxRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeSyntaxResponse.SerializeToString, + ), + 'AnnotateText': grpc.unary_unary_rpc_method_handler( + servicer.AnnotateText, + request_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnnotateTextRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnnotateTextResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.cloud.language.v1beta2.LanguageService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/language/nox.py b/language/nox.py index 2b4f372786ed..43212b1e8f38 100644 --- a/language/nox.py +++ b/language/nox.py @@ -30,6 +30,9 @@ def unit_tests(session, python_version): # Run unit tests against all supported versions of Python. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'unit-' + python_version + # Install all test dependencies, then install this package in-place. session.install('mock', 'pytest', 'pytest-cov', *LOCAL_DEPS) session.install('-e', '.') @@ -49,11 +52,14 @@ def system_tests(session, python_version): # Sanity check: Only run system tests if the environment variable is set. if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): - return + session.skip('Credentials must be set via environment variable.') # Run the system tests against latest Python 2 and Python 3 only. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'sys-' + python_version + # Install all test dependencies, then install this package into the # virutalenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) @@ -66,21 +72,32 @@ def system_tests(session, python_version): @nox.session def lint(session): - """Run flake8. + """Run linters. - Returns a failure if flake8 finds linting errors or sufficiently + Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ session.interpreter = 'python3.6' - session.install('flake8', *LOCAL_DEPS) + session.install('flake8', 'pylint', 'gcp-devrel-py-tools', *LOCAL_DEPS) session.install('.') session.run('flake8', 'google/cloud/language') + session.run( + 'gcp-devrel-py-tools', 'run-pylint', + '--config', 'pylint.config.py', + '--library-filesets', 'google', + '--test-filesets', 'tests', + # Temporarily allow this to fail. + success_codes=range(0, 100)) @nox.session def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.interpreter = 'python3.6' + + # Set the virtualenv dirname. + session.virtualenv_dirname = 'setup' + session.install('docutils', 'Pygments') session.run( 'python', 'setup.py', 'check', '--restructuredtext', '--strict') diff --git a/language/pylint.config.py b/language/pylint.config.py new file mode 100644 index 000000000000..b618319b8b61 --- /dev/null +++ b/language/pylint.config.py @@ -0,0 +1,25 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module is used to configure gcp-devrel-py-tools run-pylint.""" + +# Library configuration + +# library_additions = {} +# library_replacements = {} + +# Test configuration + +# test_additions = copy.deepcopy(library_additions) +# test_replacements = copy.deepcopy(library_replacements) diff --git a/language/setup.py b/language/setup.py index 6e10d9abbd44..16ee4d5603ad 100644 --- a/language/setup.py +++ b/language/setup.py @@ -27,7 +27,7 @@ # consolidate. SETUP_BASE = { 'author': 'Google Cloud Platform', - 'author_email': 'jjg+google-cloud-python@google.com', + 'author_email': 'googleapis-publisher@google.com', 'scripts': [], 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', 'license': 'Apache 2.0', @@ -51,19 +51,29 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.24.0, < 0.25dev', + 'google-cloud-core >= 0.25.0, < 0.26dev', + 'google-gax >= 0.15.13, < 0.16dev', + 'googleapis-common-protos[grpc] >= 1.5.2, < 2.0dev', ] +EXTRAS_REQUIRE = { + ':python_version<"3.4"': ['enum34'], +} setup( name='google-cloud-language', - version='0.24.1', + version='0.25.0', description='Python Client for Google Cloud Natural Language', long_description=README, namespace_packages=[ 'google', 'google.cloud', + 'google.cloud.gapic', + 'google.cloud.gapic.language', + 'google.cloud.proto', + 'google.cloud.proto.language', ], packages=find_packages(exclude=('tests*',)), install_requires=REQUIREMENTS, + extras_require=EXTRAS_REQUIRE, **SETUP_BASE ) diff --git a/language/tests/gapic/v1/language_service_smoke_test.py b/language/tests/gapic/v1/language_service_smoke_test.py new file mode 100644 index 000000000000..67839505c670 --- /dev/null +++ b/language/tests/gapic/v1/language_service_smoke_test.py @@ -0,0 +1,30 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time +import unittest + +from google.cloud.gapic.language.v1 import enums +from google.cloud.gapic.language.v1 import language_service_client +from google.cloud.proto.language.v1 import language_service_pb2 + + +class LanguageServiceSmokeTest(unittest.TestCase): + def test_analyze_sentiment(self): + + client = language_service_client.LanguageServiceClient() + content = 'Hello, world!' + type_ = enums.Document.Type.PLAIN_TEXT + document = language_service_pb2.Document(content=content, type=type_) + response = client.analyze_sentiment(document) diff --git a/language/tests/gapic/v1/test_language_service_client_v1.py b/language/tests/gapic/v1/test_language_service_client_v1.py new file mode 100644 index 000000000000..a0b1931727ce --- /dev/null +++ b/language/tests/gapic/v1/test_language_service_client_v1.py @@ -0,0 +1,232 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Unit tests.""" + +import mock +import unittest + +from google.gax import errors + +from google.cloud.gapic.language.v1 import enums +from google.cloud.gapic.language.v1 import language_service_client +from google.cloud.proto.language.v1 import language_service_pb2 + + +class CustomException(Exception): + pass + + +class TestLanguageServiceClient(unittest.TestCase): + @mock.patch('google.gax.config.create_stub', spec=True) + def test_analyze_sentiment(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + + # Mock response + language = 'language-1613589672' + expected_response = language_service_pb2.AnalyzeSentimentResponse( + language=language) + grpc_stub.AnalyzeSentiment.return_value = expected_response + + response = client.analyze_sentiment(document) + self.assertEqual(expected_response, response) + + grpc_stub.AnalyzeSentiment.assert_called_once() + args, kwargs = grpc_stub.AnalyzeSentiment.call_args + self.assertEqual(len(args), 2) + self.assertEqual(len(kwargs), 1) + self.assertIn('metadata', kwargs) + actual_request = args[0] + + expected_request = language_service_pb2.AnalyzeSentimentRequest( + document=document) + self.assertEqual(expected_request, actual_request) + + @mock.patch('google.gax.config.API_ERRORS', (CustomException, )) + @mock.patch('google.gax.config.create_stub', spec=True) + def test_analyze_sentiment_exception(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + + # Mock exception response + grpc_stub.AnalyzeSentiment.side_effect = CustomException() + + self.assertRaises(errors.GaxError, client.analyze_sentiment, document) + + @mock.patch('google.gax.config.create_stub', spec=True) + def test_analyze_entities(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + encoding_type = enums.EncodingType.NONE + + # Mock response + language = 'language-1613589672' + expected_response = language_service_pb2.AnalyzeEntitiesResponse( + language=language) + grpc_stub.AnalyzeEntities.return_value = expected_response + + response = client.analyze_entities(document, encoding_type) + self.assertEqual(expected_response, response) + + grpc_stub.AnalyzeEntities.assert_called_once() + args, kwargs = grpc_stub.AnalyzeEntities.call_args + self.assertEqual(len(args), 2) + self.assertEqual(len(kwargs), 1) + self.assertIn('metadata', kwargs) + actual_request = args[0] + + expected_request = language_service_pb2.AnalyzeEntitiesRequest( + document=document, encoding_type=encoding_type) + self.assertEqual(expected_request, actual_request) + + @mock.patch('google.gax.config.API_ERRORS', (CustomException, )) + @mock.patch('google.gax.config.create_stub', spec=True) + def test_analyze_entities_exception(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + encoding_type = enums.EncodingType.NONE + + # Mock exception response + grpc_stub.AnalyzeEntities.side_effect = CustomException() + + self.assertRaises(errors.GaxError, client.analyze_entities, document, + encoding_type) + + @mock.patch('google.gax.config.create_stub', spec=True) + def test_analyze_syntax(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + encoding_type = enums.EncodingType.NONE + + # Mock response + language = 'language-1613589672' + expected_response = language_service_pb2.AnalyzeSyntaxResponse( + language=language) + grpc_stub.AnalyzeSyntax.return_value = expected_response + + response = client.analyze_syntax(document, encoding_type) + self.assertEqual(expected_response, response) + + grpc_stub.AnalyzeSyntax.assert_called_once() + args, kwargs = grpc_stub.AnalyzeSyntax.call_args + self.assertEqual(len(args), 2) + self.assertEqual(len(kwargs), 1) + self.assertIn('metadata', kwargs) + actual_request = args[0] + + expected_request = language_service_pb2.AnalyzeSyntaxRequest( + document=document, encoding_type=encoding_type) + self.assertEqual(expected_request, actual_request) + + @mock.patch('google.gax.config.API_ERRORS', (CustomException, )) + @mock.patch('google.gax.config.create_stub', spec=True) + def test_analyze_syntax_exception(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + encoding_type = enums.EncodingType.NONE + + # Mock exception response + grpc_stub.AnalyzeSyntax.side_effect = CustomException() + + self.assertRaises(errors.GaxError, client.analyze_syntax, document, + encoding_type) + + @mock.patch('google.gax.config.create_stub', spec=True) + def test_annotate_text(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + features = language_service_pb2.AnnotateTextRequest.Features() + encoding_type = enums.EncodingType.NONE + + # Mock response + language = 'language-1613589672' + expected_response = language_service_pb2.AnnotateTextResponse( + language=language) + grpc_stub.AnnotateText.return_value = expected_response + + response = client.annotate_text(document, features, encoding_type) + self.assertEqual(expected_response, response) + + grpc_stub.AnnotateText.assert_called_once() + args, kwargs = grpc_stub.AnnotateText.call_args + self.assertEqual(len(args), 2) + self.assertEqual(len(kwargs), 1) + self.assertIn('metadata', kwargs) + actual_request = args[0] + + expected_request = language_service_pb2.AnnotateTextRequest( + document=document, features=features, encoding_type=encoding_type) + self.assertEqual(expected_request, actual_request) + + @mock.patch('google.gax.config.API_ERRORS', (CustomException, )) + @mock.patch('google.gax.config.create_stub', spec=True) + def test_annotate_text_exception(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + features = language_service_pb2.AnnotateTextRequest.Features() + encoding_type = enums.EncodingType.NONE + + # Mock exception response + grpc_stub.AnnotateText.side_effect = CustomException() + + self.assertRaises(errors.GaxError, client.annotate_text, document, + features, encoding_type) diff --git a/language/tests/gapic/v1beta2/language_service_smoke_test.py b/language/tests/gapic/v1beta2/language_service_smoke_test.py new file mode 100644 index 000000000000..d94531f88f75 --- /dev/null +++ b/language/tests/gapic/v1beta2/language_service_smoke_test.py @@ -0,0 +1,30 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time +import unittest + +from google.cloud.gapic.language.v1beta2 import enums +from google.cloud.gapic.language.v1beta2 import language_service_client +from google.cloud.proto.language.v1beta2 import language_service_pb2 + + +class LanguageServiceSmokeTest(unittest.TestCase): + def test_analyze_sentiment(self): + + client = language_service_client.LanguageServiceClient() + content = 'Hello, world!' + type_ = enums.Document.Type.PLAIN_TEXT + document = language_service_pb2.Document(content=content, type=type_) + response = client.analyze_sentiment(document) diff --git a/language/tests/gapic/v1beta2/test_language_service_client_v1beta2.py b/language/tests/gapic/v1beta2/test_language_service_client_v1beta2.py new file mode 100644 index 000000000000..fea1c572d4ce --- /dev/null +++ b/language/tests/gapic/v1beta2/test_language_service_client_v1beta2.py @@ -0,0 +1,283 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Unit tests.""" + +import mock +import unittest + +from google.gax import errors + +from google.cloud.gapic.language.v1beta2 import enums +from google.cloud.gapic.language.v1beta2 import language_service_client +from google.cloud.proto.language.v1beta2 import language_service_pb2 + + +class CustomException(Exception): + pass + + +class TestLanguageServiceClient(unittest.TestCase): + @mock.patch('google.gax.config.create_stub', spec=True) + def test_analyze_sentiment(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + + # Mock response + language = 'language-1613589672' + expected_response = language_service_pb2.AnalyzeSentimentResponse( + language=language) + grpc_stub.AnalyzeSentiment.return_value = expected_response + + response = client.analyze_sentiment(document) + self.assertEqual(expected_response, response) + + grpc_stub.AnalyzeSentiment.assert_called_once() + args, kwargs = grpc_stub.AnalyzeSentiment.call_args + self.assertEqual(len(args), 2) + self.assertEqual(len(kwargs), 1) + self.assertIn('metadata', kwargs) + actual_request = args[0] + + expected_request = language_service_pb2.AnalyzeSentimentRequest( + document=document) + self.assertEqual(expected_request, actual_request) + + @mock.patch('google.gax.config.API_ERRORS', (CustomException, )) + @mock.patch('google.gax.config.create_stub', spec=True) + def test_analyze_sentiment_exception(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + + # Mock exception response + grpc_stub.AnalyzeSentiment.side_effect = CustomException() + + self.assertRaises(errors.GaxError, client.analyze_sentiment, document) + + @mock.patch('google.gax.config.create_stub', spec=True) + def test_analyze_entities(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + encoding_type = enums.EncodingType.NONE + + # Mock response + language = 'language-1613589672' + expected_response = language_service_pb2.AnalyzeEntitiesResponse( + language=language) + grpc_stub.AnalyzeEntities.return_value = expected_response + + response = client.analyze_entities(document, encoding_type) + self.assertEqual(expected_response, response) + + grpc_stub.AnalyzeEntities.assert_called_once() + args, kwargs = grpc_stub.AnalyzeEntities.call_args + self.assertEqual(len(args), 2) + self.assertEqual(len(kwargs), 1) + self.assertIn('metadata', kwargs) + actual_request = args[0] + + expected_request = language_service_pb2.AnalyzeEntitiesRequest( + document=document, encoding_type=encoding_type) + self.assertEqual(expected_request, actual_request) + + @mock.patch('google.gax.config.API_ERRORS', (CustomException, )) + @mock.patch('google.gax.config.create_stub', spec=True) + def test_analyze_entities_exception(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + encoding_type = enums.EncodingType.NONE + + # Mock exception response + grpc_stub.AnalyzeEntities.side_effect = CustomException() + + self.assertRaises(errors.GaxError, client.analyze_entities, document, + encoding_type) + + @mock.patch('google.gax.config.create_stub', spec=True) + def test_analyze_entity_sentiment(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + encoding_type = enums.EncodingType.NONE + + # Mock response + language = 'language-1613589672' + expected_response = language_service_pb2.AnalyzeEntitySentimentResponse( + language=language) + grpc_stub.AnalyzeEntitySentiment.return_value = expected_response + + response = client.analyze_entity_sentiment(document, encoding_type) + self.assertEqual(expected_response, response) + + grpc_stub.AnalyzeEntitySentiment.assert_called_once() + args, kwargs = grpc_stub.AnalyzeEntitySentiment.call_args + self.assertEqual(len(args), 2) + self.assertEqual(len(kwargs), 1) + self.assertIn('metadata', kwargs) + actual_request = args[0] + + expected_request = language_service_pb2.AnalyzeEntitySentimentRequest( + document=document, encoding_type=encoding_type) + self.assertEqual(expected_request, actual_request) + + @mock.patch('google.gax.config.API_ERRORS', (CustomException, )) + @mock.patch('google.gax.config.create_stub', spec=True) + def test_analyze_entity_sentiment_exception(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + encoding_type = enums.EncodingType.NONE + + # Mock exception response + grpc_stub.AnalyzeEntitySentiment.side_effect = CustomException() + + self.assertRaises(errors.GaxError, client.analyze_entity_sentiment, + document, encoding_type) + + @mock.patch('google.gax.config.create_stub', spec=True) + def test_analyze_syntax(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + encoding_type = enums.EncodingType.NONE + + # Mock response + language = 'language-1613589672' + expected_response = language_service_pb2.AnalyzeSyntaxResponse( + language=language) + grpc_stub.AnalyzeSyntax.return_value = expected_response + + response = client.analyze_syntax(document, encoding_type) + self.assertEqual(expected_response, response) + + grpc_stub.AnalyzeSyntax.assert_called_once() + args, kwargs = grpc_stub.AnalyzeSyntax.call_args + self.assertEqual(len(args), 2) + self.assertEqual(len(kwargs), 1) + self.assertIn('metadata', kwargs) + actual_request = args[0] + + expected_request = language_service_pb2.AnalyzeSyntaxRequest( + document=document, encoding_type=encoding_type) + self.assertEqual(expected_request, actual_request) + + @mock.patch('google.gax.config.API_ERRORS', (CustomException, )) + @mock.patch('google.gax.config.create_stub', spec=True) + def test_analyze_syntax_exception(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + encoding_type = enums.EncodingType.NONE + + # Mock exception response + grpc_stub.AnalyzeSyntax.side_effect = CustomException() + + self.assertRaises(errors.GaxError, client.analyze_syntax, document, + encoding_type) + + @mock.patch('google.gax.config.create_stub', spec=True) + def test_annotate_text(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + features = language_service_pb2.AnnotateTextRequest.Features() + encoding_type = enums.EncodingType.NONE + + # Mock response + language = 'language-1613589672' + expected_response = language_service_pb2.AnnotateTextResponse( + language=language) + grpc_stub.AnnotateText.return_value = expected_response + + response = client.annotate_text(document, features, encoding_type) + self.assertEqual(expected_response, response) + + grpc_stub.AnnotateText.assert_called_once() + args, kwargs = grpc_stub.AnnotateText.call_args + self.assertEqual(len(args), 2) + self.assertEqual(len(kwargs), 1) + self.assertIn('metadata', kwargs) + actual_request = args[0] + + expected_request = language_service_pb2.AnnotateTextRequest( + document=document, features=features, encoding_type=encoding_type) + self.assertEqual(expected_request, actual_request) + + @mock.patch('google.gax.config.API_ERRORS', (CustomException, )) + @mock.patch('google.gax.config.create_stub', spec=True) + def test_annotate_text_exception(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + features = language_service_pb2.AnnotateTextRequest.Features() + encoding_type = enums.EncodingType.NONE + + # Mock exception response + grpc_stub.AnnotateText.side_effect = CustomException() + + self.assertRaises(errors.GaxError, client.annotate_text, document, + features, encoding_type) diff --git a/logging/MANIFEST.in b/logging/MANIFEST.in index 9f7100c9528a..fc77f8c82ff0 100644 --- a/logging/MANIFEST.in +++ b/logging/MANIFEST.in @@ -1,4 +1,4 @@ include README.rst LICENSE recursive-include google *.json *.proto -recursive-include unit_tests * +recursive-include tests * global-exclude *.pyc __pycache__ diff --git a/logging/README.rst b/logging/README.rst index 5df19dd1f79a..8cf274e4e4a1 100644 --- a/logging/README.rst +++ b/logging/README.rst @@ -9,7 +9,7 @@ Python Client for Stackdriver Logging - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/logging-usage.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/logging/usage.html Quick Start ----------- @@ -26,7 +26,7 @@ possible. Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html .. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication Using the API @@ -54,9 +54,9 @@ Example of fetching entries: See the ``google-cloud-python`` API `logging documentation`_ to learn how to connect to Stackdriver Logging using this Client Library. -.. _logging documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/logging-usage.html +.. _logging documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/logging/usage.html .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-logging.svg - :target: https://pypi.python.org/pypi/google-cloud-logging + :target: https://pypi.org/project/google-cloud-logging/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-logging.svg - :target: https://pypi.python.org/pypi/google-cloud-logging + :target: https://pypi.org/project/google-cloud-logging/ diff --git a/logging/google/cloud/logging/handlers/_helpers.py b/logging/google/cloud/logging/handlers/_helpers.py index 1ebb064ed228..864f0e53617e 100644 --- a/logging/google/cloud/logging/handlers/_helpers.py +++ b/logging/google/cloud/logging/handlers/_helpers.py @@ -22,11 +22,21 @@ except ImportError: # pragma: NO COVER flask = None +try: + import webapp2 +except (ImportError, SyntaxError): # pragma: NO COVER + # If you try to import webapp2 under python3, you'll get a syntax + # error (since it hasn't been ported yet). We just pretend it + # doesn't exist. This is unlikely to hit in real life but does + # in the tests. + webapp2 = None + from google.cloud.logging.handlers.middleware.request import ( _get_django_request) -_FLASK_TRACE_HEADER = 'X_CLOUD_TRACE_CONTEXT' _DJANGO_TRACE_HEADER = 'HTTP_X_CLOUD_TRACE_CONTEXT' +_FLASK_TRACE_HEADER = 'X_CLOUD_TRACE_CONTEXT' +_WEBAPP2_TRACE_HEADER = 'X-CLOUD-TRACE-CONTEXT' def format_stackdriver_json(record, message): @@ -54,7 +64,7 @@ def get_trace_id_from_flask(): """Get trace_id from flask request headers. :rtype: str - :return: Trace_id in HTTP request headers. + :returns: TraceID in HTTP request headers. """ if flask is None or not flask.request: return None @@ -69,11 +79,38 @@ def get_trace_id_from_flask(): return trace_id +def get_trace_id_from_webapp2(): + """Get trace_id from webapp2 request headers. + + :rtype: str + :returns: TraceID in HTTP request headers. + """ + if webapp2 is None: + return None + + try: + # get_request() succeeds if we're in the middle of a webapp2 + # request, or raises an assertion error otherwise: + # "Request global variable is not set". + req = webapp2.get_request() + except AssertionError: + return None + + header = req.headers.get(_WEBAPP2_TRACE_HEADER) + + if header is None: + return None + + trace_id = header.split('/', 1)[0] + + return trace_id + + def get_trace_id_from_django(): """Get trace_id from django request headers. :rtype: str - :return: Trace_id in HTTP request headers. + :returns: TraceID in HTTP request headers. """ request = _get_django_request() @@ -93,9 +130,11 @@ def get_trace_id(): """Helper to get trace_id from web application request header. :rtype: str - :returns: Trace_id in HTTP request headers. + :returns: TraceID in HTTP request headers. """ - checkers = (get_trace_id_from_django, get_trace_id_from_flask) + checkers = (get_trace_id_from_django, + get_trace_id_from_flask, + get_trace_id_from_webapp2) for checker in checkers: trace_id = checker() diff --git a/logging/nox.py b/logging/nox.py index fbbbec1958c1..ce8d1c0afbce 100644 --- a/logging/nox.py +++ b/logging/nox.py @@ -30,10 +30,13 @@ def unit_tests(session, python_version): # Run unit tests against all supported versions of Python. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'unit-' + python_version + # Install all test dependencies, then install this package in-place. session.install( 'mock', 'pytest', 'pytest-cov', - 'flask', 'django', *LOCAL_DEPS) + 'flask', 'webapp2', 'webob', 'django', *LOCAL_DEPS) session.install('-e', '.') # Run py.test against the unit tests. @@ -52,11 +55,14 @@ def system_tests(session, python_version): # Sanity check: Only run system tests if the environment variable is set. if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): - return + session.skip('Credentials must be set via environment variable.') # Run the system tests against latest Python 2 and Python 3 only. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'sys-' + python_version + # Install all test dependencies, then install this package into the # virutalenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) @@ -70,21 +76,32 @@ def system_tests(session, python_version): @nox.session def lint(session): - """Run flake8. + """Run linters. - Returns a failure if flake8 finds linting errors or sufficiently + Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ session.interpreter = 'python3.6' - session.install('flake8', *LOCAL_DEPS) + session.install('flake8', 'pylint', 'gcp-devrel-py-tools', *LOCAL_DEPS) session.install('.') session.run('flake8', 'google/cloud/logging') + session.run( + 'gcp-devrel-py-tools', 'run-pylint', + '--config', 'pylint.config.py', + '--library-filesets', 'google', + '--test-filesets', 'tests', + # Temporarily allow this to fail. + success_codes=range(0, 100)) @nox.session def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.interpreter = 'python3.6' + + # Set the virtualenv dirname. + session.virtualenv_dirname = 'setup' + session.install('docutils', 'Pygments') session.run( 'python', 'setup.py', 'check', '--restructuredtext', '--strict') diff --git a/logging/pylint.config.py b/logging/pylint.config.py new file mode 100644 index 000000000000..b618319b8b61 --- /dev/null +++ b/logging/pylint.config.py @@ -0,0 +1,25 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module is used to configure gcp-devrel-py-tools run-pylint.""" + +# Library configuration + +# library_additions = {} +# library_replacements = {} + +# Test configuration + +# test_additions = copy.deepcopy(library_additions) +# test_replacements = copy.deepcopy(library_replacements) diff --git a/logging/setup.py b/logging/setup.py index e3f8334cd5bb..82dc4f1fcf8a 100644 --- a/logging/setup.py +++ b/logging/setup.py @@ -27,7 +27,7 @@ # consolidate. SETUP_BASE = { 'author': 'Google Cloud Platform', - 'author_email': 'jjg+google-cloud-python@google.com', + 'author_email': 'googleapis-publisher@google.com', 'scripts': [], 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', 'license': 'Apache 2.0', @@ -51,14 +51,14 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.24.0, < 0.25dev', + 'google-cloud-core >= 0.25.0, < 0.26dev', 'grpcio >= 1.2.0, < 2.0dev', 'gapic-google-cloud-logging-v2 >= 0.91.0, < 0.92dev', ] setup( name='google-cloud-logging', - version='1.0.0', + version='1.1.0', description='Python Client for Stackdriver Logging', long_description=README, namespace_packages=[ diff --git a/logging/tests/unit/handlers/test__helpers.py b/logging/tests/unit/handlers/test__helpers.py index 0731c825d32c..f721881eea11 100644 --- a/logging/tests/unit/handlers/test__helpers.py +++ b/logging/tests/unit/handlers/test__helpers.py @@ -12,9 +12,18 @@ # See the License for the specific language governing permissions and # limitations under the License. +import json import unittest import mock +import six + +try: + from webapp2 import RequestHandler +except SyntaxError: + # webapp2 has not been ported to python3, so it will give a syntax + # error if we try. We'll just skip the webapp2 tests in that case. + RequestHandler = object class Test_get_trace_id_from_flask(unittest.TestCase): @@ -37,11 +46,9 @@ def index(): return app - def setUp(self): - self.app = self.create_app() - def test_no_context_header(self): - with self.app.test_request_context( + app = self.create_app() + with app.test_request_context( path='/', headers={}): trace_id = self._call_fut() @@ -53,7 +60,8 @@ def test_valid_context_header(self): expected_trace_id = 'testtraceidflask' flask_trace_id = expected_trace_id + '/testspanid' - context = self.app.test_request_context( + app = self.create_app() + context = app.test_request_context( path='/', headers={flask_trace_header: flask_trace_id}) @@ -63,6 +71,54 @@ def test_valid_context_header(self): self.assertEqual(trace_id, expected_trace_id) +class _GetTraceId(RequestHandler): + def get(self): + from google.cloud.logging.handlers import _helpers + + trace_id = _helpers.get_trace_id_from_webapp2() + self.response.content_type = 'application/json' + self.response.out.write(json.dumps(trace_id)) + + + +@unittest.skipIf(six.PY3, 'webapp2 is Python 2 only') +class Test_get_trace_id_from_webapp2(unittest.TestCase): + + @staticmethod + def create_app(): + import webapp2 + + app = webapp2.WSGIApplication([ + ('/', _GetTraceId), + ]) + + return app + + def test_no_context_header(self): + import webob + + req = webob.BaseRequest.blank('/') + response = req.get_response(self.create_app()) + trace_id = json.loads(response.body) + + self.assertEqual(None, trace_id) + + def test_valid_context_header(self): + import webob + + webapp2_trace_header = 'X-Cloud-Trace-Context' + expected_trace_id = 'testtraceidwebapp2' + webapp2_trace_id = expected_trace_id + '/testspanid' + + req = webob.BaseRequest.blank( + '/', + headers={webapp2_trace_header: webapp2_trace_id}) + response = req.get_response(self.create_app()) + trace_id = json.loads(response.body) + + self.assertEqual(trace_id, expected_trace_id) + + class Test_get_trace_id_from_django(unittest.TestCase): @staticmethod diff --git a/monitoring/MANIFEST.in b/monitoring/MANIFEST.in index 9f7100c9528a..fc77f8c82ff0 100644 --- a/monitoring/MANIFEST.in +++ b/monitoring/MANIFEST.in @@ -1,4 +1,4 @@ include README.rst LICENSE recursive-include google *.json *.proto -recursive-include unit_tests * +recursive-include tests * global-exclude *.pyc __pycache__ diff --git a/monitoring/README.rst b/monitoring/README.rst index 6c4889fb3925..f5a8bb8ecb77 100644 --- a/monitoring/README.rst +++ b/monitoring/README.rst @@ -9,7 +9,7 @@ Python Client for Stackdriver Monitoring - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/monitoring-usage.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/monitoring/usage.html Quick Start ----------- @@ -26,7 +26,7 @@ possible. Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html .. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication Using the API @@ -64,9 +64,9 @@ Display CPU utilization across your GCE instances during the last five minutes: See the ``google-cloud-python`` API `monitoring documentation`_ to learn how to connect to Stackdriver Monitoring using this Client Library. -.. _monitoring documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/monitoring-usage.html +.. _monitoring documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/monitoring/usage.html .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-monitoring.svg - :target: https://pypi.python.org/pypi/google-cloud-monitoring + :target: https://pypi.org/project/google-cloud-monitoring/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-monitoring.svg - :target: https://pypi.python.org/pypi/google-cloud-monitoring + :target: https://pypi.org/project/google-cloud-monitoring/ diff --git a/monitoring/nox.py b/monitoring/nox.py index ad69f1c610b3..b11a0cd5b693 100644 --- a/monitoring/nox.py +++ b/monitoring/nox.py @@ -30,6 +30,9 @@ def unit_tests(session, python_version): # Run unit tests against all supported versions of Python. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'unit-' + python_version + # Install all test dependencies, then install this package in-place. session.install('mock', 'pytest', 'pytest-cov', *LOCAL_DEPS) session.install('-e', '.') @@ -49,11 +52,14 @@ def system_tests(session, python_version): # Sanity check: Only run system tests if the environment variable is set. if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): - return + session.skip('Credentials must be set via environment variable.') # Run the system tests against latest Python 2 and Python 3 only. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'sys-' + python_version + # Install all test dependencies, then install this package into the # virutalenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) @@ -66,21 +72,32 @@ def system_tests(session, python_version): @nox.session def lint(session): - """Run flake8. + """Run linters. - Returns a failure if flake8 finds linting errors or sufficiently + Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ session.interpreter = 'python3.6' - session.install('flake8', *LOCAL_DEPS) + session.install('flake8', 'pylint', 'gcp-devrel-py-tools', *LOCAL_DEPS) session.install('.') session.run('flake8', 'google/cloud/monitoring') + session.run( + 'gcp-devrel-py-tools', 'run-pylint', + '--config', 'pylint.config.py', + '--library-filesets', 'google', + '--test-filesets', 'tests', + # Temporarily allow this to fail. + success_codes=range(0, 100)) @nox.session def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.interpreter = 'python3.6' + + # Set the virtualenv dirname. + session.virtualenv_dirname = 'setup' + session.install('docutils', 'Pygments') session.run( 'python', 'setup.py', 'check', '--restructuredtext', '--strict') diff --git a/monitoring/pylint.config.py b/monitoring/pylint.config.py new file mode 100644 index 000000000000..b618319b8b61 --- /dev/null +++ b/monitoring/pylint.config.py @@ -0,0 +1,25 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module is used to configure gcp-devrel-py-tools run-pylint.""" + +# Library configuration + +# library_additions = {} +# library_replacements = {} + +# Test configuration + +# test_additions = copy.deepcopy(library_additions) +# test_replacements = copy.deepcopy(library_replacements) diff --git a/monitoring/setup.py b/monitoring/setup.py index 98555e2a7bf3..bfb8ca155d82 100644 --- a/monitoring/setup.py +++ b/monitoring/setup.py @@ -27,7 +27,7 @@ # consolidate. SETUP_BASE = { 'author': 'Google Cloud Platform', - 'author_email': 'jjg+google-cloud-python@google.com', + 'author_email': 'googleapis-publisher@google.com', 'scripts': [], 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', 'license': 'Apache 2.0', @@ -51,12 +51,12 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.24.0, < 0.25dev', + 'google-cloud-core >= 0.25.0, < 0.26dev', ] setup( name='google-cloud-monitoring', - version='0.24.0', + version='0.25.0', description='Python Client for Stackdriver Monitoring', long_description=README, namespace_packages=[ diff --git a/nox.py b/nox.py index facf277e5006..25db4c616c4f 100644 --- a/nox.py +++ b/nox.py @@ -24,6 +24,9 @@ def docs(session): # Build docs against the latest version of Python, because we can. session.interpreter = 'python3.6' + # Set the virtualenv dirname. + session.virtualenv_dirname = 'docs' + # Install Sphinx and also all of the google-cloud-* packages. session.chdir(os.path.realpath(os.path.dirname(__file__))) session.install('Sphinx >= 1.6.2', 'sphinx_rtd_theme') @@ -43,6 +46,10 @@ def docs(session): def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.interpreter = 'python3.6' + + # Set the virtualenv dirname. + session.virtualenv_dirname = 'setup' + session.install('docutils', 'Pygments') session.run( 'python', 'setup.py', 'check', '--restructuredtext', '--strict') diff --git a/pubsub/MANIFEST.in b/pubsub/MANIFEST.in index 9f7100c9528a..fc77f8c82ff0 100644 --- a/pubsub/MANIFEST.in +++ b/pubsub/MANIFEST.in @@ -1,4 +1,4 @@ include README.rst LICENSE recursive-include google *.json *.proto -recursive-include unit_tests * +recursive-include tests * global-exclude *.pyc __pycache__ diff --git a/pubsub/README.rst b/pubsub/README.rst index 6bf9d77ee82e..bf116676a440 100644 --- a/pubsub/README.rst +++ b/pubsub/README.rst @@ -9,7 +9,7 @@ Python Client for Google Cloud Pub / Sub - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/pubsub-usage.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/pubsub/usage.html Quick Start ----------- @@ -26,7 +26,7 @@ possible. Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html .. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication Using the API @@ -45,7 +45,7 @@ independently written applications. See the ``google-cloud-python`` API `Pub/Sub documentation`_ to learn how to connect to Cloud Pub/Sub using this Client Library. -.. _Pub/Sub documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/pubsub-usage.html +.. _Pub/Sub documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/pubsub/usage.html To get started with this API, you'll need to create @@ -61,6 +61,6 @@ To get started with this API, you'll need to create attr1='value1', attr2='value2') .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-pubsub.svg - :target: https://pypi.python.org/pypi/google-cloud-pubsub + :target: https://pypi.org/project/google-cloud-pubsub/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-pubsub.svg - :target: https://pypi.python.org/pypi/google-cloud-pubsub + :target: https://pypi.org/project/google-cloud-pubsub/ diff --git a/pubsub/nox.py b/pubsub/nox.py index 209ed41f9bfc..4bcecafe66b4 100644 --- a/pubsub/nox.py +++ b/pubsub/nox.py @@ -30,6 +30,9 @@ def unit_tests(session, python_version): # Run unit tests against all supported versions of Python. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'unit-' + python_version + # Install all test dependencies, then install this package in-place. session.install('mock', 'pytest', 'pytest-cov', *LOCAL_DEPS) session.install('-e', '.') @@ -49,11 +52,14 @@ def system_tests(session, python_version): # Sanity check: Only run system tests if the environment variable is set. if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): - return + session.skip('Credentials must be set via environment variable.') # Run the system tests against latest Python 2 and Python 3 only. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'sys-' + python_version + # Install all test dependencies, then install this package into the # virutalenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) @@ -66,21 +72,32 @@ def system_tests(session, python_version): @nox.session def lint(session): - """Run flake8. + """Run linters. - Returns a failure if flake8 finds linting errors or sufficiently + Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ session.interpreter = 'python3.6' - session.install('flake8', *LOCAL_DEPS) + session.install('flake8', 'pylint', 'gcp-devrel-py-tools', *LOCAL_DEPS) session.install('.') session.run('flake8', 'google/cloud/pubsub') + session.run( + 'gcp-devrel-py-tools', 'run-pylint', + '--config', 'pylint.config.py', + '--library-filesets', 'google', + '--test-filesets', 'tests', + # Temporarily allow this to fail. + success_codes=range(0, 100)) @nox.session def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.interpreter = 'python3.6' + + # Set the virtualenv dirname. + session.virtualenv_dirname = 'setup' + session.install('docutils', 'Pygments') session.run( 'python', 'setup.py', 'check', '--restructuredtext', '--strict') diff --git a/pubsub/pylint.config.py b/pubsub/pylint.config.py new file mode 100644 index 000000000000..b618319b8b61 --- /dev/null +++ b/pubsub/pylint.config.py @@ -0,0 +1,25 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module is used to configure gcp-devrel-py-tools run-pylint.""" + +# Library configuration + +# library_additions = {} +# library_replacements = {} + +# Test configuration + +# test_additions = copy.deepcopy(library_additions) +# test_replacements = copy.deepcopy(library_replacements) diff --git a/pubsub/setup.py b/pubsub/setup.py index b1b1375ed870..856a59824a60 100644 --- a/pubsub/setup.py +++ b/pubsub/setup.py @@ -27,7 +27,7 @@ # consolidate. SETUP_BASE = { 'author': 'Google Cloud Platform', - 'author_email': 'jjg+google-cloud-python@google.com', + 'author_email': 'googleapis-publisher@google.com', 'scripts': [], 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', 'license': 'Apache 2.0', @@ -51,14 +51,14 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.24.1, < 0.25dev', + 'google-cloud-core >= 0.25.0, < 0.26dev', 'grpcio >= 1.0.2, < 2.0dev', 'gapic-google-cloud-pubsub-v1 >= 0.15.0, < 0.16dev', ] setup( name='google-cloud-pubsub', - version='0.25.0', + version='0.26.0', description='Python Client for Google Cloud Pub/Sub', long_description=README, namespace_packages=[ diff --git a/resource_manager/MANIFEST.in b/resource_manager/MANIFEST.in index 9f7100c9528a..fc77f8c82ff0 100644 --- a/resource_manager/MANIFEST.in +++ b/resource_manager/MANIFEST.in @@ -1,4 +1,4 @@ include README.rst LICENSE recursive-include google *.json *.proto -recursive-include unit_tests * +recursive-include tests * global-exclude *.pyc __pycache__ diff --git a/resource_manager/README.rst b/resource_manager/README.rst index 6d7482690273..fe6864580ed5 100644 --- a/resource_manager/README.rst +++ b/resource_manager/README.rst @@ -9,7 +9,7 @@ Python Client for Google Cloud Resource Manager - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/resource-manager-api.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/resource-manager/api.html Quick Start ----------- @@ -26,7 +26,7 @@ possible. Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html .. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication Using the API @@ -42,9 +42,9 @@ Google Cloud Platform. See the ``google-cloud-python`` API `Resource Manager documentation`_ to learn how to manage projects using this Client Library. -.. _Resource Manager documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/resource-manager-api.html +.. _Resource Manager documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/resource-manager/api.html .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-resource-manager.svg - :target: https://pypi.python.org/pypi/google-cloud-resource-manager + :target: https://pypi.org/project/google-cloud-resource-manager/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-resource-manager.svg - :target: https://pypi.python.org/pypi/google-cloud-resource-manager + :target: https://pypi.org/project/google-cloud-resource-manager/ diff --git a/resource_manager/nox.py b/resource_manager/nox.py index 8a5cb2f6fa5e..448ff93fc292 100644 --- a/resource_manager/nox.py +++ b/resource_manager/nox.py @@ -30,6 +30,9 @@ def unit_tests(session, python_version): # Run unit tests against all supported versions of Python. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'unit-' + python_version + # Install all test dependencies, then install this package in-place. session.install('mock', 'pytest', 'pytest-cov', *LOCAL_DEPS) session.install('-e', '.') @@ -44,21 +47,32 @@ def unit_tests(session, python_version): @nox.session def lint(session): - """Run flake8. + """Run linters. - Returns a failure if flake8 finds linting errors or sufficiently + Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ session.interpreter = 'python3.6' - session.install('flake8', *LOCAL_DEPS) + session.install('flake8', 'pylint', 'gcp-devrel-py-tools', *LOCAL_DEPS) session.install('.') session.run('flake8', 'google/cloud/resource_manager') + session.run( + 'gcp-devrel-py-tools', 'run-pylint', + '--config', 'pylint.config.py', + '--library-filesets', 'google', + '--test-filesets', 'tests', + # Temporarily allow this to fail. + success_codes=range(0, 100)) @nox.session def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.interpreter = 'python3.6' + + # Set the virtualenv dirname. + session.virtualenv_dirname = 'setup' + session.install('docutils', 'Pygments') session.run( 'python', 'setup.py', 'check', '--restructuredtext', '--strict') diff --git a/resource_manager/pylint.config.py b/resource_manager/pylint.config.py new file mode 100644 index 000000000000..b618319b8b61 --- /dev/null +++ b/resource_manager/pylint.config.py @@ -0,0 +1,25 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module is used to configure gcp-devrel-py-tools run-pylint.""" + +# Library configuration + +# library_additions = {} +# library_replacements = {} + +# Test configuration + +# test_additions = copy.deepcopy(library_additions) +# test_replacements = copy.deepcopy(library_replacements) diff --git a/resource_manager/setup.py b/resource_manager/setup.py index 64abeb02b62e..dd295b2973a5 100644 --- a/resource_manager/setup.py +++ b/resource_manager/setup.py @@ -27,7 +27,7 @@ # consolidate. SETUP_BASE = { 'author': 'Google Cloud Platform', - 'author_email': 'jjg+google-cloud-python@google.com', + 'author_email': 'googleapis-publisher@google.com', 'scripts': [], 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', 'license': 'Apache 2.0', @@ -51,12 +51,12 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.24.0, < 0.25dev', + 'google-cloud-core >= 0.25.0, < 0.26dev', ] setup( name='google-cloud-resource-manager', - version='0.24.0', + version='0.25.0', description='Python Client for Google Cloud Resource Manager', long_description=README, namespace_packages=[ diff --git a/runtimeconfig/MANIFEST.in b/runtimeconfig/MANIFEST.in index 9f7100c9528a..fc77f8c82ff0 100644 --- a/runtimeconfig/MANIFEST.in +++ b/runtimeconfig/MANIFEST.in @@ -1,4 +1,4 @@ include README.rst LICENSE recursive-include google *.json *.proto -recursive-include unit_tests * +recursive-include tests * global-exclude *.pyc __pycache__ diff --git a/runtimeconfig/README.rst b/runtimeconfig/README.rst index dcf71476fd41..d061bea3eda2 100644 --- a/runtimeconfig/README.rst +++ b/runtimeconfig/README.rst @@ -30,7 +30,7 @@ possible. Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html .. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication Using the API @@ -48,6 +48,6 @@ See the ``google-cloud-python`` API runtimeconfig `Documentation`_ to learn how to interact with Cloud RuntimeConfig using this Client Library. .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-runtimeconfig.svg - :target: https://pypi.python.org/pypi/google-cloud-runtimeconfig + :target: https://pypi.org/project/google-cloud-runtimeconfig/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-runtimeconfig.svg - :target: https://pypi.python.org/pypi/google-cloud-runtimeconfig + :target: https://pypi.org/project/google-cloud-runtimeconfig/ diff --git a/runtimeconfig/nox.py b/runtimeconfig/nox.py index 3d228059a8d0..2b48a111e2a1 100644 --- a/runtimeconfig/nox.py +++ b/runtimeconfig/nox.py @@ -30,6 +30,9 @@ def unit_tests(session, python_version): # Run unit tests against all supported versions of Python. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'unit-' + python_version + # Install all test dependencies, then install this package in-place. session.install('mock', 'pytest', 'pytest-cov', *LOCAL_DEPS) session.install('-e', '.') @@ -44,21 +47,32 @@ def unit_tests(session, python_version): @nox.session def lint(session): - """Run flake8. + """Run linters. - Returns a failure if flake8 finds linting errors or sufficiently + Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ session.interpreter = 'python3.6' - session.install('flake8', *LOCAL_DEPS) + session.install('flake8', 'pylint', 'gcp-devrel-py-tools', *LOCAL_DEPS) session.install('.') session.run('flake8', 'google/cloud/runtimeconfig') + session.run( + 'gcp-devrel-py-tools', 'run-pylint', + '--config', 'pylint.config.py', + '--library-filesets', 'google', + '--test-filesets', 'tests', + # Temporarily allow this to fail. + success_codes=range(0, 100)) @nox.session def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.interpreter = 'python3.6' + + # Set the virtualenv dirname. + session.virtualenv_dirname = 'setup' + session.install('docutils', 'Pygments') session.run( 'python', 'setup.py', 'check', '--restructuredtext', '--strict') diff --git a/runtimeconfig/pylint.config.py b/runtimeconfig/pylint.config.py new file mode 100644 index 000000000000..b618319b8b61 --- /dev/null +++ b/runtimeconfig/pylint.config.py @@ -0,0 +1,25 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module is used to configure gcp-devrel-py-tools run-pylint.""" + +# Library configuration + +# library_additions = {} +# library_replacements = {} + +# Test configuration + +# test_additions = copy.deepcopy(library_additions) +# test_replacements = copy.deepcopy(library_replacements) diff --git a/runtimeconfig/setup.py b/runtimeconfig/setup.py index ac54e459d8bd..f874d07f29bc 100644 --- a/runtimeconfig/setup.py +++ b/runtimeconfig/setup.py @@ -27,7 +27,7 @@ # consolidate. SETUP_BASE = { 'author': 'Google Cloud Platform', - 'author_email': 'jjg+google-cloud-python@google.com', + 'author_email': 'googleapis-publisher@google.com', 'scripts': [], 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', 'license': 'Apache 2.0', @@ -51,12 +51,12 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.24.0, < 0.25dev', + 'google-cloud-core >= 0.25.0, < 0.26dev', ] setup( name='google-cloud-runtimeconfig', - version='0.24.0', + version='0.25.0', description='Python Client for Google Cloud RuntimeConfig', long_description=README, namespace_packages=[ diff --git a/setup.cfg b/setup.cfg index 2a9acf13daa9..79874b747939 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,2 +1,5 @@ [bdist_wheel] universal = 1 + +[tool:pytest] +addopts = --tb=native diff --git a/setup.py b/setup.py index 9570fbb5ef4e..ca6491ec530e 100644 --- a/setup.py +++ b/setup.py @@ -26,7 +26,7 @@ # consolidate. SETUP_BASE = { 'author': 'Google Cloud Platform', - 'author_email': 'jjg+google-cloud-python@google.com', + 'author_email': 'googleapis-publisher@google.com', 'scripts': [], 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', 'license': 'Apache 2.0', @@ -50,29 +50,29 @@ REQUIREMENTS = [ - 'google-cloud-bigquery >= 0.24.0, < 0.25dev', - 'google-cloud-bigtable >= 0.24.0, < 0.25dev', - 'google-cloud-core >= 0.24.1, < 0.25dev', - 'google-cloud-datastore >= 1.0.0, < 2.0dev', - 'google-cloud-dns >= 0.24.0, < 0.25dev', - 'google-cloud-error-reporting >= 0.24.2, < 0.25dev', - 'google-cloud-language >= 0.24.0, < 0.25dev', - 'google-cloud-logging >= 1.0.0, < 2.0dev', - 'google-cloud-monitoring >= 0.24.0, < 0.25dev', - 'google-cloud-pubsub >= 0.25.0, < 0.26dev', - 'google-cloud-resource-manager >= 0.24.0, < 0.25dev', - 'google-cloud-spanner >= 0.24.1, < 0.25dev', - 'google-cloud-speech >= 0.25.0, < 0.26dev', - 'google-cloud-storage >= 1.1.0, < 2.0dev', - 'google-cloud-translate >= 0.24.0, < 0.25dev', + 'google-cloud-bigquery >= 0.25.0, < 0.26dev', + 'google-cloud-bigtable >= 0.25.0, < 0.26dev', + 'google-cloud-core >= 0.25.0, < 0.26dev', + 'google-cloud-datastore >= 1.1.0, < 1.2dev', + 'google-cloud-dns >= 0.25.0, < 0.26dev', + 'google-cloud-error-reporting >= 0.25.1, < 0.26dev', + 'google-cloud-language >= 0.25.0, < 0.26dev', + 'google-cloud-logging >= 1.1.0, < 1.2dev', + 'google-cloud-monitoring >= 0.25.0, < 0.26dev', + 'google-cloud-pubsub >= 0.26.0, < 0.27dev', + 'google-cloud-resource-manager >= 0.25.0, < 0.26dev', + 'google-cloud-runtimeconfig >= 0.25.0, < 0.26dev', + 'google-cloud-spanner >= 0.25.0, < 0.26dev', + 'google-cloud-speech >= 0.27.0, < 0.28dev', + 'google-cloud-storage >= 1.2.0, < 1.3dev', + 'google-cloud-translate >= 0.25.0, < 0.26dev', 'google-cloud-videointelligence >= 0.25.0, < 0.26dev', 'google-cloud-vision >= 0.25.0, < 0.26dev', - 'google-cloud-runtimeconfig >= 0.24.0, < 0.25dev', ] setup( name='google-cloud', - version='0.25.0', + version='0.26.2', description='API Client library for Google Cloud', long_description=README, install_requires=REQUIREMENTS, diff --git a/spanner/MANIFEST.in b/spanner/MANIFEST.in index 9f7100c9528a..fc77f8c82ff0 100644 --- a/spanner/MANIFEST.in +++ b/spanner/MANIFEST.in @@ -1,4 +1,4 @@ include README.rst LICENSE recursive-include google *.json *.proto -recursive-include unit_tests * +recursive-include tests * global-exclude *.pyc __pycache__ diff --git a/spanner/README.rst b/spanner/README.rst index fedabfb50fef..1580c27a71a0 100644 --- a/spanner/README.rst +++ b/spanner/README.rst @@ -3,7 +3,7 @@ Python Client for Cloud Spanner Python idiomatic client for `Cloud Spanner`_. -.. _Cloud Spanner: https://googlecloudplatform.github.io/google-cloud-python/latest/spanner-usage.html +.. _Cloud Spanner: https://googlecloudplatform.github.io/google-cloud-python/latest/spanner/usage.html Quick Start diff --git a/spanner/google/cloud/spanner/client.py b/spanner/google/cloud/spanner/client.py index 875238aed2bc..6274d28d9e18 100644 --- a/spanner/google/cloud/spanner/client.py +++ b/spanner/google/cloud/spanner/client.py @@ -24,7 +24,6 @@ :class:`~google.cloud.spanner.database.Database` """ -import google.auth.credentials from google.gax import INITIAL_PAGE # pylint: disable=line-too-long from google.cloud.gapic.spanner_admin_database.v1.database_admin_client import ( # noqa @@ -34,9 +33,7 @@ # pylint: enable=line-too-long from google.cloud._http import DEFAULT_USER_AGENT -from google.cloud.client import _ClientFactoryMixin -from google.cloud.client import _ClientProjectMixin -from google.cloud.credentials import get_credentials +from google.cloud.client import ClientWithProject from google.cloud.iterator import GAXIterator from google.cloud.spanner import __version__ from google.cloud.spanner._helpers import _options_with_prefix @@ -73,13 +70,13 @@ def from_pb(cls, config_pb): return cls(config_pb.name, config_pb.display_name) -class Client(_ClientFactoryMixin, _ClientProjectMixin): +class Client(ClientWithProject): """Client for interacting with Cloud Spanner API. .. note:: Since the Cloud Spanner API requires the gRPC transport, no - ``http`` argument is accepted by this class. + ``_http`` argument is accepted by this class. :type project: :class:`str` or :func:`unicode <unicode>` :param project: (Optional) The ID of the project which owns the @@ -104,21 +101,16 @@ class Client(_ClientFactoryMixin, _ClientProjectMixin): _database_admin_api = None _SET_PROJECT = True # Used by from_service_account_json() + SCOPE = (SPANNER_ADMIN_SCOPE,) + """The scopes required for Google Cloud Spanner.""" + def __init__(self, project=None, credentials=None, user_agent=DEFAULT_USER_AGENT): - - _ClientProjectMixin.__init__(self, project=project) - if credentials is None: - credentials = get_credentials() - - scopes = [ - SPANNER_ADMIN_SCOPE, - ] - - credentials = google.auth.credentials.with_scopes_if_required( - credentials, scopes) - - self._credentials = credentials + # NOTE: This API has no use for the _http argument, but sending it + # will have no impact since the _http() @property only lazily + # creates a working HTTP object. + super(Client, self).__init__( + project=project, credentials=credentials, _http=None) self.user_agent = user_agent @property @@ -181,19 +173,20 @@ def copy(self): :rtype: :class:`.Client` :returns: A copy of the current client. """ - credentials = self._credentials - copied_creds = credentials.create_scoped(credentials.scopes) return self.__class__( - self.project, - copied_creds, - self.user_agent, + project=self.project, + credentials=self._credentials, + user_agent=self.user_agent, ) def list_instance_configs(self, page_size=None, page_token=None): """List available instance configurations for the client's project. - See - https://cloud.google.com/spanner/reference/rpc/google.spanner.admin.database.v1#google.spanner.admin.database.v1.InstanceAdmin.ListInstanceConfigs + .. _RPC docs: https://cloud.google.com/spanner/docs/reference/rpc/\ + google.spanner.admin.instance.v1#google.spanner.admin.\ + instance.v1.InstanceAdmin.ListInstanceConfigs + + See `RPC docs`_. :type page_size: int :param page_size: (Optional) Maximum number of results to return. diff --git a/spanner/google/cloud/spanner/database.py b/spanner/google/cloud/spanner/database.py index 12af9ca20edb..8df06812949d 100644 --- a/spanner/google/cloud/spanner/database.py +++ b/spanner/google/cloud/spanner/database.py @@ -18,8 +18,6 @@ from google.gax.errors import GaxError from google.gax.grpc import exc_to_code -from google.cloud.proto.spanner.admin.database.v1 import ( - spanner_database_admin_pb2 as admin_v1_pb2) from google.cloud.gapic.spanner.v1.spanner_client import SpannerClient from grpc import StatusCode import six @@ -27,7 +25,6 @@ # pylint: disable=ungrouped-imports from google.cloud.exceptions import Conflict from google.cloud.exceptions import NotFound -from google.cloud.operation import register_type from google.cloud.spanner import __version__ from google.cloud.spanner._helpers import _options_with_prefix from google.cloud.spanner.batch import Batch @@ -44,10 +41,6 @@ r'databases/(?P<database_id>[a-z][a-z0-9_\-]*[a-z0-9])$' ) -register_type(admin_v1_pb2.Database) -register_type(admin_v1_pb2.CreateDatabaseMetadata) -register_type(admin_v1_pb2.UpdateDatabaseDdlMetadata) - class Database(object): """Representation of a Cloud Spanner Database. @@ -205,7 +198,6 @@ def create(self): )) raise - future.caller_metadata = {'request_type': 'CreateDatabase'} return future def exists(self): @@ -252,7 +244,7 @@ def update_ddl(self, ddl_statements): See https://cloud.google.com/spanner/reference/rpc/google.spanner.admin.database.v1#google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase - :rtype: :class:`google.cloud.operation.Operation` + :rtype: :class:`google.cloud.future.operation.Operation` :returns: an operation instance """ client = self._instance._client @@ -267,7 +259,6 @@ def update_ddl(self, ddl_statements): raise NotFound(self.name) raise - future.caller_metadata = {'request_type': 'UpdateDatabaseDdl'} return future def drop(self): @@ -389,8 +380,7 @@ def batch(self): """ return BatchCheckout(self) - def snapshot(self, read_timestamp=None, min_read_timestamp=None, - max_staleness=None, exact_staleness=None): + def snapshot(self, **kw): """Return an object which wraps a snapshot. The wrapper *must* be used as a context manager, with the snapshot @@ -399,38 +389,15 @@ def snapshot(self, read_timestamp=None, min_read_timestamp=None, See https://cloud.google.com/spanner/reference/rpc/google.spanner.v1#google.spanner.v1.TransactionOptions.ReadOnly - If no options are passed, reads will use the ``strong`` model, reading - at a timestamp where all previously committed transactions are visible. - - :type read_timestamp: :class:`datetime.datetime` - :param read_timestamp: Execute all reads at the given timestamp. - - :type min_read_timestamp: :class:`datetime.datetime` - :param min_read_timestamp: Execute all reads at a - timestamp >= ``min_read_timestamp``. - - :type max_staleness: :class:`datetime.timedelta` - :param max_staleness: Read data at a - timestamp >= NOW - ``max_staleness`` seconds. - - :type exact_staleness: :class:`datetime.timedelta` - :param exact_staleness: Execute all reads at a timestamp that is - ``exact_staleness`` old. - - :rtype: :class:`~google.cloud.spanner.snapshot.Snapshot` - :returns: a snapshot bound to this session - :raises: :exc:`ValueError` if the session has not yet been created. + :type kw: dict + :param kw: + Passed through to + :class:`~google.cloud.spanner.snapshot.Snapshot` constructor. :rtype: :class:`~google.cloud.spanner.database.SnapshotCheckout` :returns: new wrapper """ - return SnapshotCheckout( - self, - read_timestamp=read_timestamp, - min_read_timestamp=min_read_timestamp, - max_staleness=max_staleness, - exact_staleness=exact_staleness, - ) + return SnapshotCheckout(self, **kw) class BatchCheckout(object): @@ -476,40 +443,20 @@ class SnapshotCheckout(object): :type database: :class:`~google.cloud.spannder.database.Database` :param database: database to use - :type read_timestamp: :class:`datetime.datetime` - :param read_timestamp: Execute all reads at the given timestamp. - - :type min_read_timestamp: :class:`datetime.datetime` - :param min_read_timestamp: Execute all reads at a - timestamp >= ``min_read_timestamp``. - - :type max_staleness: :class:`datetime.timedelta` - :param max_staleness: Read data at a - timestamp >= NOW - ``max_staleness`` seconds. - - :type exact_staleness: :class:`datetime.timedelta` - :param exact_staleness: Execute all reads at a timestamp that is - ``exact_staleness`` old. + :type kw: dict + :param kw: + Passed through to + :class:`~google.cloud.spanner.snapshot.Snapshot` constructor. """ - def __init__(self, database, read_timestamp=None, min_read_timestamp=None, - max_staleness=None, exact_staleness=None): + def __init__(self, database, **kw): self._database = database self._session = None - self._read_timestamp = read_timestamp - self._min_read_timestamp = min_read_timestamp - self._max_staleness = max_staleness - self._exact_staleness = exact_staleness + self._kw = kw def __enter__(self): """Begin ``with`` block.""" session = self._session = self._database._pool.get() - return Snapshot( - session, - read_timestamp=self._read_timestamp, - min_read_timestamp=self._min_read_timestamp, - max_staleness=self._max_staleness, - exact_staleness=self._exact_staleness, - ) + return Snapshot(session, **self._kw) def __exit__(self, exc_type, exc_val, exc_tb): """End ``with`` block.""" diff --git a/spanner/google/cloud/spanner/instance.py b/spanner/google/cloud/spanner/instance.py index 711b8c489853..e67a0c31be6c 100644 --- a/spanner/google/cloud/spanner/instance.py +++ b/spanner/google/cloud/spanner/instance.py @@ -28,7 +28,6 @@ from google.cloud.exceptions import Conflict from google.cloud.exceptions import NotFound from google.cloud.iterator import GAXIterator -from google.cloud.operation import register_type from google.cloud.spanner._helpers import _options_with_prefix from google.cloud.spanner.database import Database from google.cloud.spanner.pool import BurstyPool @@ -41,10 +40,6 @@ DEFAULT_NODE_COUNT = 1 -register_type(admin_v1_pb2.Instance) -register_type(admin_v1_pb2.CreateInstanceMetadata) -register_type(admin_v1_pb2.UpdateInstanceMetadata) - class Instance(object): """Representation of a Cloud Spanner Instance. @@ -204,7 +199,7 @@ def create(self): before calling :meth:`create`. - :rtype: :class:`google.cloud.operation.Operation` + :rtype: :class:`google.cloud.future.operation.Operation` :returns: an operation instance """ api = self._client.instance_admin_api @@ -228,7 +223,6 @@ def create(self): raise Conflict(self.name) raise - future.caller_metadata = {'request_type': 'CreateInstance'} return future def exists(self): @@ -285,7 +279,7 @@ def update(self): before calling :meth:`update`. - :rtype: :class:`google.cloud.operation.Operation` + :rtype: :class:`google.cloud.future.operation.Operation` :returns: an operation instance """ api = self._client.instance_admin_api @@ -309,7 +303,6 @@ def update(self): raise NotFound(self.name) raise - future.caller_metadata = {'request_type': 'UpdateInstance'} return future def delete(self): diff --git a/spanner/google/cloud/spanner/session.py b/spanner/google/cloud/spanner/session.py index 45baffa92d43..19ff60de4e1b 100644 --- a/spanner/google/cloud/spanner/session.py +++ b/spanner/google/cloud/spanner/session.py @@ -139,30 +139,15 @@ def delete(self): raise NotFound(self.name) raise - def snapshot(self, read_timestamp=None, min_read_timestamp=None, - max_staleness=None, exact_staleness=None): + def snapshot(self, **kw): """Create a snapshot to perform a set of reads with shared staleness. See https://cloud.google.com/spanner/reference/rpc/google.spanner.v1#google.spanner.v1.TransactionOptions.ReadOnly - If no options are passed, reads will use the ``strong`` model, reading - at a timestamp where all previously committed transactions are visible. - - :type read_timestamp: :class:`datetime.datetime` - :param read_timestamp: Execute all reads at the given timestamp. - - :type min_read_timestamp: :class:`datetime.datetime` - :param min_read_timestamp: Execute all reads at a - timestamp >= ``min_read_timestamp``. - - :type max_staleness: :class:`datetime.timedelta` - :param max_staleness: Read data at a - timestamp >= NOW - ``max_staleness`` seconds. - - :type exact_staleness: :class:`datetime.timedelta` - :param exact_staleness: Execute all reads at a timestamp that is - ``exact_staleness`` old. + :type kw: dict + :param kw: Passed through to + :class:`~google.cloud.spanner.snapshot.Snapshot` ctor. :rtype: :class:`~google.cloud.spanner.snapshot.Snapshot` :returns: a snapshot bound to this session @@ -171,11 +156,7 @@ def snapshot(self, read_timestamp=None, min_read_timestamp=None, if self._session_id is None: raise ValueError("Session has not been created.") - return Snapshot(self, - read_timestamp=read_timestamp, - min_read_timestamp=min_read_timestamp, - max_staleness=max_staleness, - exact_staleness=exact_staleness) + return Snapshot(self, **kw) def read(self, table, columns, keyset, index='', limit=0, resume_token=b''): @@ -292,7 +273,7 @@ def run_in_transaction(self, func, *args, **kw): txn = self.transaction() else: txn = self._transaction - if txn._id is None: + if txn._transaction_id is None: txn.begin() try: func(txn, *args, **kw) @@ -302,7 +283,6 @@ def run_in_transaction(self, func, *args, **kw): continue except Exception: txn.rollback() - del self._transaction raise try: @@ -312,7 +292,6 @@ def run_in_transaction(self, func, *args, **kw): del self._transaction else: committed = txn.committed - del self._transaction return committed diff --git a/spanner/google/cloud/spanner/snapshot.py b/spanner/google/cloud/spanner/snapshot.py index 05fcba63f322..e0da23f3acd9 100644 --- a/spanner/google/cloud/spanner/snapshot.py +++ b/spanner/google/cloud/spanner/snapshot.py @@ -34,6 +34,10 @@ class _SnapshotBase(_SessionWrapper): :type session: :class:`~google.cloud.spanner.session.Session` :param session: the session used to perform the commit """ + _multi_use = False + _transaction_id = None + _read_request_count = 0 + def _make_txn_selector(self): # pylint: disable=redundant-returns-doc """Helper for :meth:`read` / :meth:`execute_sql`. @@ -70,7 +74,15 @@ def read(self, table, columns, keyset, index='', limit=0, :rtype: :class:`~google.cloud.spanner.streamed.StreamedResultSet` :returns: a result set instance which can be used to consume rows. + :raises: ValueError for reuse of single-use snapshots, or if a + transaction ID is pending for multiple-use snapshots. """ + if self._read_request_count > 0: + if not self._multi_use: + raise ValueError("Cannot re-use single-use snapshot.") + if self._transaction_id is None: + raise ValueError("Transaction ID pending.") + database = self._session._database api = database.spanner_api options = _options_with_prefix(database.name) @@ -81,7 +93,12 @@ def read(self, table, columns, keyset, index='', limit=0, transaction=transaction, index=index, limit=limit, resume_token=resume_token, options=options) - return StreamedResultSet(iterator) + self._read_request_count += 1 + + if self._multi_use: + return StreamedResultSet(iterator, source=self) + else: + return StreamedResultSet(iterator) def execute_sql(self, sql, params=None, param_types=None, query_mode=None, resume_token=b''): @@ -109,7 +126,15 @@ def execute_sql(self, sql, params=None, param_types=None, query_mode=None, :rtype: :class:`~google.cloud.spanner.streamed.StreamedResultSet` :returns: a result set instance which can be used to consume rows. + :raises: ValueError for reuse of single-use snapshots, or if a + transaction ID is pending for multiple-use snapshots. """ + if self._read_request_count > 0: + if not self._multi_use: + raise ValueError("Cannot re-use single-use snapshot.") + if self._transaction_id is None: + raise ValueError("Transaction ID pending.") + if params is not None: if param_types is None: raise ValueError( @@ -128,7 +153,12 @@ def execute_sql(self, sql, params=None, param_types=None, query_mode=None, transaction=transaction, params=params_pb, param_types=param_types, query_mode=query_mode, resume_token=resume_token, options=options) - return StreamedResultSet(iterator) + self._read_request_count += 1 + + if self._multi_use: + return StreamedResultSet(iterator, source=self) + else: + return StreamedResultSet(iterator) class Snapshot(_SnapshotBase): @@ -157,9 +187,16 @@ class Snapshot(_SnapshotBase): :type exact_staleness: :class:`datetime.timedelta` :param exact_staleness: Execute all reads at a timestamp that is ``exact_staleness`` old. + + :type multi_use: :class:`bool` + :param multi_use: If true, multipl :meth:`read` / :meth:`execute_sql` + calls can be performed with the snapshot in the + context of a read-only transaction, used to ensure + isolation / consistency. Incompatible with + ``max_staleness`` and ``min_read_timestamp``. """ def __init__(self, session, read_timestamp=None, min_read_timestamp=None, - max_staleness=None, exact_staleness=None): + max_staleness=None, exact_staleness=None, multi_use=False): super(Snapshot, self).__init__(session) opts = [ read_timestamp, min_read_timestamp, max_staleness, exact_staleness] @@ -168,14 +205,24 @@ def __init__(self, session, read_timestamp=None, min_read_timestamp=None, if len(flagged) > 1: raise ValueError("Supply zero or one options.") + if multi_use: + if min_read_timestamp is not None or max_staleness is not None: + raise ValueError( + "'multi_use' is incompatible with " + "'min_read_timestamp' / 'max_staleness'") + self._strong = len(flagged) == 0 self._read_timestamp = read_timestamp self._min_read_timestamp = min_read_timestamp self._max_staleness = max_staleness self._exact_staleness = exact_staleness + self._multi_use = multi_use def _make_txn_selector(self): """Helper for :meth:`read`.""" + if self._transaction_id is not None: + return TransactionSelector(id=self._transaction_id) + if self._read_timestamp: key = 'read_timestamp' value = _datetime_to_pb_timestamp(self._read_timestamp) @@ -194,4 +241,34 @@ def _make_txn_selector(self): options = TransactionOptions( read_only=TransactionOptions.ReadOnly(**{key: value})) - return TransactionSelector(single_use=options) + + if self._multi_use: + return TransactionSelector(begin=options) + else: + return TransactionSelector(single_use=options) + + def begin(self): + """Begin a transaction on the database. + + :rtype: bytes + :returns: the ID for the newly-begun transaction. + :raises: ValueError if the transaction is already begun, committed, + or rolled back. + """ + if not self._multi_use: + raise ValueError("Cannot call 'begin' single-use snapshots") + + if self._transaction_id is not None: + raise ValueError("Read-only transaction already begun") + + if self._read_request_count > 0: + raise ValueError("Read-only transaction already pending") + + database = self._session._database + api = database.spanner_api + options = _options_with_prefix(database.name) + txn_selector = self._make_txn_selector() + response = api.begin_transaction( + self._session.name, txn_selector.begin, options=options) + self._transaction_id = response.id + return self._transaction_id diff --git a/spanner/google/cloud/spanner/streamed.py b/spanner/google/cloud/spanner/streamed.py index 19333844b1c1..7aa0ca43156e 100644 --- a/spanner/google/cloud/spanner/streamed.py +++ b/spanner/google/cloud/spanner/streamed.py @@ -32,8 +32,11 @@ class StreamedResultSet(object): Iterator yielding :class:`google.cloud.proto.spanner.v1.result_set_pb2.PartialResultSet` instances. + + :type source: :class:`~google.cloud.spanner.snapshot.Snapshot` + :param source: Snapshot from which the result set was fetched. """ - def __init__(self, response_iterator): + def __init__(self, response_iterator, source=None): self._response_iterator = response_iterator self._rows = [] # Fully-processed rows self._counter = 0 # Counter for processed responses @@ -42,6 +45,7 @@ def __init__(self, response_iterator): self._resume_token = None # To resume from last received PRS self._current_row = [] # Accumulated values for incomplete row self._pending_chunk = None # Incomplete value + self._source = source # Source snapshot @property def rows(self): @@ -130,7 +134,11 @@ def consume_next(self): self._resume_token = response.resume_token if self._metadata is None: # first response - self._metadata = response.metadata + metadata = self._metadata = response.metadata + + source = self._source + if source is not None and source._transaction_id is None: + source._transaction_id = metadata.transaction.id if response.HasField('stats'): # last response self._stats = response.stats diff --git a/spanner/google/cloud/spanner/transaction.py b/spanner/google/cloud/spanner/transaction.py index af2140896830..598fb0c30407 100644 --- a/spanner/google/cloud/spanner/transaction.py +++ b/spanner/google/cloud/spanner/transaction.py @@ -27,11 +27,8 @@ class Transaction(_SnapshotBase, _BatchBase): """Implement read-write transaction semantics for a session.""" committed = None """Timestamp at which the transaction was successfully committed.""" - - def __init__(self, session): - super(Transaction, self).__init__(session) - self._id = None - self._rolled_back = False + _rolled_back = False + _multi_use = True def _check_state(self): """Helper for :meth:`commit` et al. @@ -39,7 +36,7 @@ def _check_state(self): :raises: :exc:`ValueError` if the object's state is invalid for making API requests. """ - if self._id is None: + if self._transaction_id is None: raise ValueError("Transaction is not begun") if self.committed is not None: @@ -56,7 +53,7 @@ def _make_txn_selector(self): :returns: a selector configured for read-write transaction semantics. """ self._check_state() - return TransactionSelector(id=self._id) + return TransactionSelector(id=self._transaction_id) def begin(self): """Begin a transaction on the database. @@ -66,7 +63,7 @@ def begin(self): :raises: ValueError if the transaction is already begun, committed, or rolled back. """ - if self._id is not None: + if self._transaction_id is not None: raise ValueError("Transaction already begun") if self.committed is not None: @@ -82,8 +79,8 @@ def begin(self): read_write=TransactionOptions.ReadWrite()) response = api.begin_transaction( self._session.name, txn_options, options=options) - self._id = response.id - return self._id + self._transaction_id = response.id + return self._transaction_id def rollback(self): """Roll back a transaction on the database.""" @@ -91,8 +88,9 @@ def rollback(self): database = self._session._database api = database.spanner_api options = _options_with_prefix(database.name) - api.rollback(self._session.name, self._id, options=options) + api.rollback(self._session.name, self._transaction_id, options=options) self._rolled_back = True + del self._session._transaction def commit(self): """Commit mutations to the database. @@ -111,9 +109,10 @@ def commit(self): options = _options_with_prefix(database.name) response = api.commit( self._session.name, self._mutations, - transaction_id=self._id, options=options) + transaction_id=self._transaction_id, options=options) self.committed = _pb_timestamp_to_datetime( response.commit_timestamp) + del self._session._transaction return self.committed def __enter__(self): diff --git a/spanner/nox.py b/spanner/nox.py index fa551267dde1..bdb2b4e4cbb6 100644 --- a/spanner/nox.py +++ b/spanner/nox.py @@ -30,15 +30,25 @@ def unit_tests(session, python_version): # Run unit tests against all supported versions of Python. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'unit-' + python_version + # Install all test dependencies, then install this package in-place. session.install('mock', 'pytest', 'pytest-cov', *LOCAL_DEPS) session.install('-e', '.') # Run py.test against the unit tests. - session.run('py.test', '--quiet', - '--cov=google.cloud.spanner', '--cov=tests.unit', '--cov-append', - '--cov-config=.coveragerc', '--cov-report=', '--cov-fail-under=97', + session.run( + 'py.test', + '--quiet', + '--cov=google.cloud.spanner', + '--cov=tests.unit', + '--cov-append', + '--cov-config=.coveragerc', + '--cov-report=', + '--cov-fail-under=97', 'tests/unit', + *session.posargs ) @@ -49,11 +59,14 @@ def system_tests(session, python_version): # Sanity check: Only run system tests if the environment variable is set. if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): - return + session.skip('Credentials must be set via environment variable.') # Run the system tests against latest Python 2 and Python 3 only. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'sys-' + python_version + # Install all test dependencies, then install this package into the # virutalenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) @@ -66,21 +79,32 @@ def system_tests(session, python_version): @nox.session def lint(session): - """Run flake8. + """Run linters. - Returns a failure if flake8 finds linting errors or sufficiently + Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ session.interpreter = 'python3.6' - session.install('flake8', *LOCAL_DEPS) + session.install('flake8', 'pylint', 'gcp-devrel-py-tools', *LOCAL_DEPS) session.install('.') session.run('flake8', 'google/cloud/spanner') + session.run( + 'gcp-devrel-py-tools', 'run-pylint', + '--config', 'pylint.config.py', + '--library-filesets', 'google', + '--test-filesets', 'tests', + # Temporarily allow this to fail. + success_codes=range(0, 100)) @nox.session def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.interpreter = 'python3.6' + + # Set the virtualenv dirname. + session.virtualenv_dirname = 'setup' + session.install('docutils', 'Pygments') session.run( 'python', 'setup.py', 'check', '--restructuredtext', '--strict') diff --git a/spanner/pylint.config.py b/spanner/pylint.config.py new file mode 100644 index 000000000000..b618319b8b61 --- /dev/null +++ b/spanner/pylint.config.py @@ -0,0 +1,25 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module is used to configure gcp-devrel-py-tools run-pylint.""" + +# Library configuration + +# library_additions = {} +# library_replacements = {} + +# Test configuration + +# test_additions = copy.deepcopy(library_additions) +# test_replacements = copy.deepcopy(library_replacements) diff --git a/spanner/setup.py b/spanner/setup.py index ea9cea88bd6d..0808c1309b6a 100644 --- a/spanner/setup.py +++ b/spanner/setup.py @@ -27,7 +27,7 @@ # consolidate. SETUP_BASE = { 'author': 'Google Cloud Platform', - 'author_email': 'jjg+google-cloud-python@google.com', + 'author_email': 'googleapis-publisher@google.com', 'scripts': [], 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', 'license': 'Apache 2.0', @@ -51,7 +51,7 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.24.1, < 0.25dev', + 'google-cloud-core >= 0.25.0, < 0.26dev', 'grpcio >= 1.2.0, < 2.0dev', 'gapic-google-cloud-spanner-v1 >= 0.15.0, < 0.16dev', 'gapic-google-cloud-spanner-admin-database-v1 >= 0.15.0, < 0.16dev', @@ -60,7 +60,7 @@ setup( name='google-cloud-spanner', - version='0.24.2', + version='0.25.0', description='Python Client for Cloud Spanner', long_description=README, namespace_packages=[ diff --git a/spanner/tests/_fixtures.py b/spanner/tests/_fixtures.py index 1123d03c3f2d..ace9b981b6ec 100644 --- a/spanner/tests/_fixtures.py +++ b/spanner/tests/_fixtures.py @@ -38,6 +38,10 @@ description STRING(16), exactly_hwhen TIMESTAMP) PRIMARY KEY (eye_d); +CREATE TABLE counters ( + name STRING(1024), + value INT64 ) + PRIMARY KEY (name); """ DDL_STATEMENTS = [stmt.strip() for stmt in DDL.split(';') if stmt.strip()] diff --git a/spanner/tests/system/test_system.py b/spanner/tests/system/test_system.py index b1ce6c892e3e..f5d15d715ed5 100644 --- a/spanner/tests/system/test_system.py +++ b/spanner/tests/system/test_system.py @@ -17,6 +17,8 @@ import operator import os import struct +import threading +import time import unittest from google.cloud.proto.spanner.v1.type_pb2 import ARRAY @@ -33,6 +35,7 @@ from google.cloud.exceptions import GrpcRendezvous from google.cloud.spanner._helpers import TimestampWithNanoseconds from google.cloud.spanner.client import Client +from google.cloud.spanner.keyset import KeyRange from google.cloud.spanner.keyset import KeySet from google.cloud.spanner.pool import BurstyPool @@ -87,6 +90,10 @@ def setUpModule(): configs = list(retry(Config.CLIENT.list_instance_configs)()) + # Defend against back-end returning configs for regions we aren't + # actually allowed to use. + configs = [config for config in configs if '-us-' in config.name] + if len(configs) < 1: raise ValueError('List instance configs failed in module set up.') @@ -353,6 +360,11 @@ class TestSessionAPI(unittest.TestCase, _TestData): 'description', 'exactly_hwhen', ) + COUNTERS_TABLE = 'counters' + COUNTERS_COLUMNS = ( + 'name', + 'value', + ) SOME_DATE = datetime.date(2011, 1, 17) SOME_TIME = datetime.datetime(1989, 1, 17, 17, 59, 12, 345612) NANO_TIME = TimestampWithNanoseconds(1995, 8, 31, nanosecond=987654321) @@ -360,10 +372,11 @@ class TestSessionAPI(unittest.TestCase, _TestData): BYTES_1 = b'Ymlu' BYTES_2 = b'Ym9vdHM=' ALL_TYPES_ROWDATA = ( + ([], False, None, None, 0.0, None, None, None), ([1], True, BYTES_1, SOME_DATE, 0.0, 19, u'dog', SOME_TIME), ([5, 10], True, BYTES_1, None, 1.25, 99, u'cat', None), ([], False, BYTES_2, None, float('inf'), 107, u'frog', None), - ([], False, None, None, float('-inf'), 207, None, None), + ([3, None, 9], False, None, None, float('-inf'), 207, None, None), ([], False, None, None, float('nan'), 1207, None, None), ([], False, None, None, OTHER_NAN, 2000, None, NANO_TIME), ) @@ -477,6 +490,31 @@ def test_transaction_read_and_insert_then_rollback(self): rows = list(session.read(self.TABLE, self.COLUMNS, self.ALL)) self.assertEqual(rows, []) + def _transaction_read_then_raise(self, transaction): + rows = list(transaction.read(self.TABLE, self.COLUMNS, self.ALL)) + self.assertEqual(len(rows), 0) + transaction.insert(self.TABLE, self.COLUMNS, self.ROW_DATA) + raise CustomException() + + @RetryErrors(exception=GrpcRendezvous) + def test_transaction_read_and_insert_then_execption(self): + retry = RetryInstanceState(_has_all_ddl) + retry(self._db.reload)() + + session = self._db.session() + session.create() + self.to_delete.append(session) + + with session.batch() as batch: + batch.delete(self.TABLE, self.ALL) + + with self.assertRaises(CustomException): + session.run_in_transaction(self._transaction_read_then_raise) + + # Transaction was rolled back. + rows = list(session.read(self.TABLE, self.COLUMNS, self.ALL)) + self.assertEqual(rows, []) + @RetryErrors(exception=GrpcRendezvous) def test_transaction_read_and_insert_or_update_then_commit(self): retry = RetryInstanceState(_has_all_ddl) @@ -503,6 +541,87 @@ def test_transaction_read_and_insert_or_update_then_commit(self): rows = list(session.read(self.TABLE, self.COLUMNS, self.ALL)) self._check_row_data(rows) + def _transaction_concurrency_helper(self, unit_of_work, pkey): + INITIAL_VALUE = 123 + NUM_THREADS = 3 # conforms to equivalent Java systest. + + retry = RetryInstanceState(_has_all_ddl) + retry(self._db.reload)() + + session = self._db.session() + session.create() + self.to_delete.append(session) + + with session.batch() as batch: + batch.insert_or_update( + self.COUNTERS_TABLE, + self.COUNTERS_COLUMNS, + [[pkey, INITIAL_VALUE]]) + + # We don't want to run the threads' transactions in the current + # session, which would fail. + txn_sessions = [] + + for _ in range(NUM_THREADS): + txn_session = self._db.session() + txn_sessions.append(txn_session) + txn_session.create() + self.to_delete.append(txn_session) + + threads = [ + threading.Thread( + target=txn_session.run_in_transaction, + args=(unit_of_work, pkey)) + for txn_session in txn_sessions] + + for thread in threads: + thread.start() + + for thread in threads: + thread.join() + + keyset = KeySet(keys=[(pkey,)]) + rows = list(session.read( + self.COUNTERS_TABLE, self.COUNTERS_COLUMNS, keyset)) + self.assertEqual(len(rows), 1) + _, value = rows[0] + self.assertEqual(value, INITIAL_VALUE + len(threads)) + + def _read_w_concurrent_update(self, transaction, pkey): + keyset = KeySet(keys=[(pkey,)]) + rows = list(transaction.read( + self.COUNTERS_TABLE, self.COUNTERS_COLUMNS, keyset)) + self.assertEqual(len(rows), 1) + pkey, value = rows[0] + transaction.update( + self.COUNTERS_TABLE, + self.COUNTERS_COLUMNS, + [[pkey, value + 1]]) + + def test_transaction_read_w_concurrent_updates(self): + PKEY = 'read_w_concurrent_updates' + self._transaction_concurrency_helper( + self._read_w_concurrent_update, PKEY) + + def _query_w_concurrent_update(self, transaction, pkey): + SQL = 'SELECT * FROM counters WHERE name = @name' + rows = list(transaction.execute_sql( + SQL, + params={'name': pkey}, + param_types={'name': Type(code=STRING)}, + )) + self.assertEqual(len(rows), 1) + pkey, value = rows[0] + transaction.update( + self.COUNTERS_TABLE, + self.COUNTERS_COLUMNS, + [[pkey, value + 1]]) + + def test_transaction_query_w_concurrent_updates(self): + PKEY = 'query_w_concurrent_updates' + self._transaction_concurrency_helper( + self._query_w_concurrent_update, PKEY) + @staticmethod def _row_data(max_index): for index in range(max_index): @@ -533,6 +652,92 @@ def _unit_of_work(transaction, test): return session, committed + def test_snapshot_read_w_various_staleness(self): + from datetime import datetime + from google.cloud._helpers import UTC + ROW_COUNT = 400 + session, committed = self._set_up_table(ROW_COUNT) + all_data_rows = list(self._row_data(ROW_COUNT)) + + before_reads = datetime.utcnow().replace(tzinfo=UTC) + + # Test w/ read timestamp + read_tx = session.snapshot(read_timestamp=committed) + rows = list(read_tx.read(self.TABLE, self.COLUMNS, self.ALL)) + self._check_row_data(rows, all_data_rows) + + # Test w/ min read timestamp + min_read_ts = session.snapshot(min_read_timestamp=committed) + rows = list(min_read_ts.read(self.TABLE, self.COLUMNS, self.ALL)) + self._check_row_data(rows, all_data_rows) + + staleness = datetime.utcnow().replace(tzinfo=UTC) - before_reads + + # Test w/ max staleness + max_staleness = session.snapshot(max_staleness=staleness) + rows = list(max_staleness.read(self.TABLE, self.COLUMNS, self.ALL)) + self._check_row_data(rows, all_data_rows) + + # Test w/ exact staleness + exact_staleness = session.snapshot(exact_staleness=staleness) + rows = list(exact_staleness.read(self.TABLE, self.COLUMNS, self.ALL)) + self._check_row_data(rows, all_data_rows) + + # Test w/ strong + strong = session.snapshot() + rows = list(strong.read(self.TABLE, self.COLUMNS, self.ALL)) + self._check_row_data(rows, all_data_rows) + + def test_multiuse_snapshot_read_isolation_strong(self): + ROW_COUNT = 40 + session, committed = self._set_up_table(ROW_COUNT) + all_data_rows = list(self._row_data(ROW_COUNT)) + strong = session.snapshot(multi_use=True) + + before = list(strong.read(self.TABLE, self.COLUMNS, self.ALL)) + self._check_row_data(before, all_data_rows) + + with self._db.batch() as batch: + batch.delete(self.TABLE, self.ALL) + + after = list(strong.read(self.TABLE, self.COLUMNS, self.ALL)) + self._check_row_data(after, all_data_rows) + + def test_multiuse_snapshot_read_isolation_read_timestamp(self): + ROW_COUNT = 40 + session, committed = self._set_up_table(ROW_COUNT) + all_data_rows = list(self._row_data(ROW_COUNT)) + read_ts = session.snapshot(read_timestamp=committed, multi_use=True) + + before = list(read_ts.read(self.TABLE, self.COLUMNS, self.ALL)) + self._check_row_data(before, all_data_rows) + + with self._db.batch() as batch: + batch.delete(self.TABLE, self.ALL) + + after = list(read_ts.read(self.TABLE, self.COLUMNS, self.ALL)) + self._check_row_data(after, all_data_rows) + + def test_multiuse_snapshot_read_isolation_exact_staleness(self): + ROW_COUNT = 40 + + session, committed = self._set_up_table(ROW_COUNT) + all_data_rows = list(self._row_data(ROW_COUNT)) + + time.sleep(1) + delta = datetime.timedelta(microseconds=1000) + + exact = session.snapshot(exact_staleness=delta, multi_use=True) + + before = list(exact.read(self.TABLE, self.COLUMNS, self.ALL)) + self._check_row_data(before, all_data_rows) + + with self._db.batch() as batch: + batch.delete(self.TABLE, self.ALL) + + after = list(exact.read(self.TABLE, self.COLUMNS, self.ALL)) + self._check_row_data(after, all_data_rows) + def test_read_w_manual_consume(self): ROW_COUNT = 4000 session, committed = self._set_up_table(ROW_COUNT) @@ -580,6 +785,32 @@ def test_read_w_index(self): [(row[0], row[2]) for row in self._row_data(ROW_COUNT)])) self._check_row_data(rows, expected) + def test_read_w_single_key(self): + ROW_COUNT = 40 + session, committed = self._set_up_table(ROW_COUNT) + + snapshot = session.snapshot(read_timestamp=committed) + rows = list(snapshot.read( + self.TABLE, self.COLUMNS, KeySet(keys=[(0,)]))) + + all_data_rows = list(self._row_data(ROW_COUNT)) + expected = [all_data_rows[0]] + self._check_row_data(rows, expected) + + def test_read_w_multiple_keys(self): + ROW_COUNT = 40 + indices = [0, 5, 17] + session, committed = self._set_up_table(ROW_COUNT) + + snapshot = session.snapshot(read_timestamp=committed) + rows = list(snapshot.read( + self.TABLE, self.COLUMNS, + KeySet(keys=[(index,) for index in indices]))) + + all_data_rows = list(self._row_data(ROW_COUNT)) + expected = [row for row in all_data_rows if row[0] in indices] + self._check_row_data(rows, expected) + def test_read_w_limit(self): ROW_COUNT = 4000 LIMIT = 100 @@ -593,21 +824,40 @@ def test_read_w_limit(self): expected = all_data_rows[:LIMIT] self._check_row_data(rows, expected) - def test_read_w_range(self): - from google.cloud.spanner.keyset import KeyRange + def test_read_w_ranges(self): ROW_COUNT = 4000 - START_CLOSED = 1000 - END_OPEN = 2000 + START = 1000 + END = 2000 session, committed = self._set_up_table(ROW_COUNT) - key_range = KeyRange(start_closed=[START_CLOSED], end_open=[END_OPEN]) - keyset = KeySet(ranges=(key_range,)) + snapshot = session.snapshot(read_timestamp=committed, multi_use=True) + all_data_rows = list(self._row_data(ROW_COUNT)) - snapshot = session.snapshot(read_timestamp=committed) + closed_closed = KeyRange(start_closed=[START], end_closed=[END]) + keyset = KeySet(ranges=(closed_closed,)) rows = list(snapshot.read( self.TABLE, self.COLUMNS, keyset)) + expected = all_data_rows[START:END+1] + self._check_row_data(rows, expected) - all_data_rows = list(self._row_data(ROW_COUNT)) - expected = all_data_rows[START_CLOSED:END_OPEN] + closed_open = KeyRange(start_closed=[START], end_open=[END]) + keyset = KeySet(ranges=(closed_open,)) + rows = list(snapshot.read( + self.TABLE, self.COLUMNS, keyset)) + expected = all_data_rows[START:END] + self._check_row_data(rows, expected) + + open_open = KeyRange(start_open=[START], end_open=[END]) + keyset = KeySet(ranges=(open_open,)) + rows = list(snapshot.read( + self.TABLE, self.COLUMNS, keyset)) + expected = all_data_rows[START+1:END] + self._check_row_data(rows, expected) + + open_closed = KeyRange(start_open=[START], end_closed=[END]) + keyset = KeySet(ranges=(open_closed,)) + rows = list(snapshot.read( + self.TABLE, self.COLUMNS, keyset)) + expected = all_data_rows[START+1:END+1] self._check_row_data(rows, expected) def test_execute_sql_w_manual_consume(self): @@ -637,6 +887,42 @@ def _check_sql_results(self, snapshot, sql, params, param_types, expected): sql, params=params, param_types=param_types)) self._check_row_data(rows, expected=expected) + def test_multiuse_snapshot_execute_sql_isolation_strong(self): + ROW_COUNT = 40 + SQL = 'SELECT * FROM {}'.format(self.TABLE) + session, committed = self._set_up_table(ROW_COUNT) + all_data_rows = list(self._row_data(ROW_COUNT)) + strong = session.snapshot(multi_use=True) + + before = list(strong.execute_sql(SQL)) + self._check_row_data(before, all_data_rows) + + with self._db.batch() as batch: + batch.delete(self.TABLE, self.ALL) + + after = list(strong.execute_sql(SQL)) + self._check_row_data(after, all_data_rows) + + def test_execute_sql_returning_array_of_struct(self): + SQL = ( + "SELECT ARRAY(SELECT AS STRUCT C1, C2 " + "FROM (SELECT 'a' AS C1, 1 AS C2 " + "UNION ALL SELECT 'b' AS C1, 2 AS C2) " + "ORDER BY C1 ASC)" + ) + session = self._db.session() + session.create() + self.to_delete.append(session) + snapshot = session.snapshot() + self._check_sql_results( + snapshot, + sql=SQL, + params=None, + param_types=None, + expected=[ + [[['a', 1], ['b', 2]]], + ]) + def test_execute_sql_w_query_param(self): session = self._db.session() session.create() @@ -649,7 +935,8 @@ def test_execute_sql_w_query_param(self): self.ALL_TYPES_COLUMNS, self.ALL_TYPES_ROWDATA) - snapshot = session.snapshot(read_timestamp=batch.committed) + snapshot = session.snapshot( + read_timestamp=batch.committed, multi_use=True) # Cannot equality-test array values. See below for a test w/ # array of IDs. @@ -685,7 +972,7 @@ def test_execute_sql_w_query_param(self): params={'lower': 0.0, 'upper': 1.0}, param_types={ 'lower': Type(code=FLOAT64), 'upper': Type(code=FLOAT64)}, - expected=[(19,)], + expected=[(None,), (19,)], ) # Find -inf @@ -714,6 +1001,14 @@ def test_execute_sql_w_query_param(self): expected=[(u'dog',)], ) + self._check_sql_results( + snapshot, + sql='SELECT description FROM all_types WHERE eye_d = @my_id', + params={'my_id': None}, + param_types={'my_id': Type(code=INT64)}, + expected=[], + ) + self._check_sql_results( snapshot, sql='SELECT eye_d FROM all_types WHERE description = @description', @@ -796,6 +1091,10 @@ def test_four_meg(self): self._verify_two_columns(FOUR_MEG) +class CustomException(Exception): + """Placeholder for any user-defined exception.""" + + class _DatabaseDropper(object): """Helper for cleaning up databases created on-the-fly.""" diff --git a/spanner/tests/unit/test__helpers.py b/spanner/tests/unit/test__helpers.py index 172c3343cba0..beb5ed7b6bac 100644 --- a/spanner/tests/unit/test__helpers.py +++ b/spanner/tests/unit/test__helpers.py @@ -512,7 +512,7 @@ def _make_one(self, session): def test_ctor(self): session = object() base = self._make_one(session) - self.assertTrue(base._session is session) + self.assertIs(base._session, session) class Test_options_with_prefix(unittest.TestCase): diff --git a/spanner/tests/unit/test_batch.py b/spanner/tests/unit/test_batch.py index ad4cbc872a1e..cf65fdd7e4f5 100644 --- a/spanner/tests/unit/test_batch.py +++ b/spanner/tests/unit/test_batch.py @@ -65,7 +65,7 @@ def _compare_values(self, result, source): def test_ctor(self): session = _Session() base = self._make_one(session) - self.assertTrue(base._session is session) + self.assertIs(base._session, session) self.assertEqual(len(base._mutations), 0) def test__check_state_virtual(self): @@ -177,7 +177,7 @@ def _getTargetClass(self): def test_ctor(self): session = _Session() batch = self._make_one(session) - self.assertTrue(batch._session is session) + self.assertIs(batch._session, session) def test_commit_already_committed(self): from google.cloud.spanner.keyset import KeySet diff --git a/spanner/tests/unit/test_client.py b/spanner/tests/unit/test_client.py index 98e916d8927d..28eee9b78f56 100644 --- a/spanner/tests/unit/test_client.py +++ b/spanner/tests/unit/test_client.py @@ -15,6 +15,7 @@ import unittest import mock +import six def _make_credentials(): @@ -40,13 +41,13 @@ class TestClient(unittest.TestCase): TIMEOUT_SECONDS = 80 USER_AGENT = 'you-sir-age-int' - def _getTargetClass(self): + def _get_target_class(self): from google.cloud.spanner.client import Client return Client def _make_one(self, *args, **kwargs): - return self._getTargetClass()(*args, **kwargs) + return self._get_target_class()(*args, **kwargs) def _constructor_test_helper(self, expected_scopes, creds, user_agent=None, @@ -60,7 +61,7 @@ def _constructor_test_helper(self, expected_scopes, creds, expected_creds = expected_creds or creds.with_scopes.return_value self.assertIs(client._credentials, expected_creds) - self.assertTrue(client._credentials is expected_creds) + self.assertIs(client._credentials, expected_creds) if expected_scopes is not None: creds.with_scopes.assert_called_once_with(expected_scopes) @@ -70,9 +71,9 @@ def _constructor_test_helper(self, expected_scopes, creds, def test_constructor_default_scopes(self): from google.cloud.spanner import client as MUT - expected_scopes = [ + expected_scopes = ( MUT.SPANNER_ADMIN_SCOPE, - ] + ) creds = _make_credentials() self._constructor_test_helper(expected_scopes, creds) @@ -80,27 +81,25 @@ def test_constructor_custom_user_agent_and_timeout(self): from google.cloud.spanner import client as MUT CUSTOM_USER_AGENT = 'custom-application' - expected_scopes = [ + expected_scopes = ( MUT.SPANNER_ADMIN_SCOPE, - ] + ) creds = _make_credentials() self._constructor_test_helper(expected_scopes, creds, user_agent=CUSTOM_USER_AGENT) def test_constructor_implicit_credentials(self): - from google.cloud._testing import _Monkey - from google.cloud.spanner import client as MUT - creds = _make_credentials() - def mock_get_credentials(): - return creds - - with _Monkey(MUT, get_credentials=mock_get_credentials): + patch = mock.patch( + 'google.auth.default', return_value=(creds, None)) + with patch as default: self._constructor_test_helper( None, None, expected_creds=creds.with_scopes.return_value) + default.assert_called_once_with() + def test_constructor_credentials_wo_create_scoped(self): creds = _make_credentials() expected_scopes = None @@ -162,7 +161,7 @@ def __init__(self, *args, **kwargs): self.assertTrue(isinstance(api, _Client)) again = client.instance_admin_api - self.assertTrue(again is api) + self.assertIs(again, api) self.assertEqual(api.kwargs['lib_name'], 'gccl') self.assertIs(api.kwargs['credentials'], client.credentials) @@ -183,29 +182,32 @@ def __init__(self, *args, **kwargs): self.assertTrue(isinstance(api, _Client)) again = client.database_admin_api - self.assertTrue(again is api) + self.assertIs(again, api) self.assertEqual(api.kwargs['lib_name'], 'gccl') self.assertIs(api.kwargs['credentials'], client.credentials) def test_copy(self): - credentials = _Credentials('value') + credentials = _make_credentials() + # Make sure it "already" is scoped. + credentials.requires_scopes = False + client = self._make_one( project=self.PROJECT, credentials=credentials, user_agent=self.USER_AGENT) new_client = client.copy() - self.assertEqual(new_client._credentials, client._credentials) + self.assertIs(new_client._credentials, client._credentials) self.assertEqual(new_client.project, client.project) self.assertEqual(new_client.user_agent, client.user_agent) def test_credentials_property(self): - credentials = _Credentials() + credentials = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=credentials) - self.assertTrue(client.credentials is credentials) + self.assertIs(client.credentials, credentials.with_scopes.return_value) def test_project_name_property(self): - credentials = _Credentials() + credentials = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=credentials) project_name = 'projects/' + self.PROJECT self.assertEqual(client.project_name, project_name) @@ -215,7 +217,7 @@ def test_list_instance_configs_wo_paging(self): from google.gax import INITIAL_PAGE from google.cloud.spanner.client import InstanceConfig - credentials = _Credentials() + credentials = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=credentials) client.connection = object() api = client._instance_admin_api = _FauxInstanceAdminAPI() @@ -236,20 +238,19 @@ def test_list_instance_configs_wo_paging(self): project, page_size, options = api._listed_instance_configs self.assertEqual(project, self.PATH) self.assertEqual(page_size, None) - self.assertTrue(options.page_token is INITIAL_PAGE) + self.assertIs(options.page_token, INITIAL_PAGE) self.assertEqual( options.kwargs['metadata'], [('google-cloud-resource-prefix', client.project_name)]) def test_list_instance_configs_w_paging(self): - import six from google.cloud._testing import _GAXPageIterator from google.cloud.spanner.client import InstanceConfig SIZE = 15 TOKEN_RETURNED = 'TOKEN_RETURNED' TOKEN_PASSED = 'TOKEN_PASSED' - credentials = _Credentials() + credentials = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=credentials) client.connection = object() api = client._instance_admin_api = _FauxInstanceAdminAPI() @@ -282,7 +283,7 @@ def test_instance_factory_defaults(self): from google.cloud.spanner.instance import DEFAULT_NODE_COUNT from google.cloud.spanner.instance import Instance - credentials = _Credentials() + credentials = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=credentials) instance = client.instance(self.INSTANCE_ID) @@ -292,12 +293,12 @@ def test_instance_factory_defaults(self): self.assertIsNone(instance.configuration_name) self.assertEqual(instance.display_name, self.INSTANCE_ID) self.assertEqual(instance.node_count, DEFAULT_NODE_COUNT) - self.assertTrue(instance._client is client) + self.assertIs(instance._client, client) def test_instance_factory_explicit(self): from google.cloud.spanner.instance import Instance - credentials = _Credentials() + credentials = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=credentials) instance = client.instance(self.INSTANCE_ID, self.CONFIGURATION_NAME, @@ -309,14 +310,14 @@ def test_instance_factory_explicit(self): self.assertEqual(instance.configuration_name, self.CONFIGURATION_NAME) self.assertEqual(instance.display_name, self.DISPLAY_NAME) self.assertEqual(instance.node_count, self.NODE_COUNT) - self.assertTrue(instance._client is client) + self.assertIs(instance._client, client) def test_list_instances_wo_paging(self): from google.cloud._testing import _GAXPageIterator from google.gax import INITIAL_PAGE from google.cloud.spanner.instance import Instance - credentials = _Credentials() + credentials = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=credentials) client.connection = object() api = client._instance_admin_api = _FauxInstanceAdminAPI() @@ -342,20 +343,19 @@ def test_list_instances_wo_paging(self): self.assertEqual(project, self.PATH) self.assertEqual(filter_, 'name:TEST') self.assertEqual(page_size, None) - self.assertTrue(options.page_token is INITIAL_PAGE) + self.assertIs(options.page_token, INITIAL_PAGE) self.assertEqual( options.kwargs['metadata'], [('google-cloud-resource-prefix', client.project_name)]) def test_list_instances_w_paging(self): - import six from google.cloud._testing import _GAXPageIterator from google.cloud.spanner.instance import Instance SIZE = 15 TOKEN_RETURNED = 'TOKEN_RETURNED' TOKEN_PASSED = 'TOKEN_PASSED' - credentials = _Credentials() + credentials = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=credentials) client.connection = object() api = client._instance_admin_api = _FauxInstanceAdminAPI() @@ -391,22 +391,6 @@ def test_list_instances_w_paging(self): [('google-cloud-resource-prefix', client.project_name)]) -class _Credentials(object): - - scopes = None - - def __init__(self, access_token=None): - self._access_token = access_token - self._tokens = [] - - def create_scoped(self, scope): - self.scopes = scope - return self - - def __eq__(self, other): - return self._access_token == other._access_token - - class _FauxInstanceAdminAPI(object): def list_instance_configs(self, name, page_size, options): diff --git a/spanner/tests/unit/test_database.py b/spanner/tests/unit/test_database.py index 5369a6f2c0d1..aa1643ed7582 100644 --- a/spanner/tests/unit/test_database.py +++ b/spanner/tests/unit/test_database.py @@ -50,7 +50,7 @@ def test_ctor_defaults(self): database = self._make_one(self.DATABASE_ID, instance) self.assertEqual(database.database_id, self.DATABASE_ID) - self.assertTrue(database._instance is instance) + self.assertIs(database._instance, instance) self.assertEqual(list(database.ddl_statements), []) self.assertIsInstance(database._pool, BurstyPool) # BurstyPool does not create sessions during 'bind()'. @@ -61,7 +61,7 @@ def test_ctor_w_explicit_pool(self): pool = _Pool() database = self._make_one(self.DATABASE_ID, instance, pool=pool) self.assertEqual(database.database_id, self.DATABASE_ID) - self.assertTrue(database._instance is instance) + self.assertIs(database._instance, instance) self.assertEqual(list(database.ddl_statements), []) self.assertIs(database._pool, pool) self.assertIs(pool._bound, database) @@ -89,7 +89,7 @@ def test_ctor_w_ddl_statements_ok(self): self.DATABASE_ID, instance, ddl_statements=DDL_STATEMENTS, pool=pool) self.assertEqual(database.database_id, self.DATABASE_ID) - self.assertTrue(database._instance is instance) + self.assertIs(database._instance, instance) self.assertEqual(list(database.ddl_statements), DDL_STATEMENTS) def test_from_pb_bad_database_name(self): @@ -196,10 +196,10 @@ def _mock_spanner_client(*args, **kwargs): with _Monkey(MUT, SpannerClient=_mock_spanner_client): api = database.spanner_api - self.assertTrue(api is _client) + self.assertIs(api, _client) # API instance is cached again = database.spanner_api - self.assertTrue(again is api) + self.assertIs(again, api) def test___eq__(self): instance = _Instance(self.INSTANCE_NAME) @@ -312,8 +312,6 @@ def test_create_success(self): future = database.create() self.assertIs(future, op_future) - self.assertEqual(future.caller_metadata, - {'request_type': 'CreateDatabase'}) (parent, create_statement, extra_statements, options) = api._created_database @@ -493,8 +491,6 @@ def test_update_ddl(self): future = database.update_ddl(DDL_STATEMENTS) self.assertIs(future, op_future) - self.assertEqual(future.caller_metadata, - {'request_type': 'UpdateDatabaseDdl'}) name, statements, op_id, options = api._updated_database_ddl self.assertEqual(name, self.DATABASE_NAME) @@ -567,8 +563,8 @@ def test_session_factory(self): session = database.session() self.assertTrue(isinstance(session, Session)) - self.assertTrue(session.session_id is None) - self.assertTrue(session._database is database) + self.assertIs(session.session_id, None) + self.assertIs(session._database, database) def test_execute_sql_defaults(self): QUERY = 'SELECT * FROM employees' @@ -671,7 +667,7 @@ def test_batch(self): checkout = database.batch() self.assertIsInstance(checkout, BatchCheckout) - self.assertTrue(checkout._database is database) + self.assertIs(checkout._database, database) def test_snapshot_defaults(self): from google.cloud.spanner.database import SnapshotCheckout @@ -685,35 +681,10 @@ def test_snapshot_defaults(self): checkout = database.snapshot() self.assertIsInstance(checkout, SnapshotCheckout) - self.assertTrue(checkout._database is database) - self.assertIsNone(checkout._read_timestamp) - self.assertIsNone(checkout._min_read_timestamp) - self.assertIsNone(checkout._max_staleness) - self.assertIsNone(checkout._exact_staleness) - - def test_snapshot_w_read_timestamp(self): - import datetime - from google.cloud._helpers import UTC - from google.cloud.spanner.database import SnapshotCheckout - - now = datetime.datetime.utcnow().replace(tzinfo=UTC) - client = _Client() - instance = _Instance(self.INSTANCE_NAME, client=client) - pool = _Pool() - session = _Session() - pool.put(session) - database = self._make_one(self.DATABASE_ID, instance, pool=pool) - - checkout = database.snapshot(read_timestamp=now) - - self.assertIsInstance(checkout, SnapshotCheckout) - self.assertTrue(checkout._database is database) - self.assertEqual(checkout._read_timestamp, now) - self.assertIsNone(checkout._min_read_timestamp) - self.assertIsNone(checkout._max_staleness) - self.assertIsNone(checkout._exact_staleness) + self.assertIs(checkout._database, database) + self.assertEqual(checkout._kw, {}) - def test_snapshot_w_min_read_timestamp(self): + def test_snapshot_w_read_timestamp_and_multi_use(self): import datetime from google.cloud._helpers import UTC from google.cloud.spanner.database import SnapshotCheckout @@ -726,56 +697,12 @@ def test_snapshot_w_min_read_timestamp(self): pool.put(session) database = self._make_one(self.DATABASE_ID, instance, pool=pool) - checkout = database.snapshot(min_read_timestamp=now) - - self.assertIsInstance(checkout, SnapshotCheckout) - self.assertTrue(checkout._database is database) - self.assertIsNone(checkout._read_timestamp) - self.assertEqual(checkout._min_read_timestamp, now) - self.assertIsNone(checkout._max_staleness) - self.assertIsNone(checkout._exact_staleness) - - def test_snapshot_w_max_staleness(self): - import datetime - from google.cloud.spanner.database import SnapshotCheckout - - staleness = datetime.timedelta(seconds=1, microseconds=234567) - client = _Client() - instance = _Instance(self.INSTANCE_NAME, client=client) - pool = _Pool() - session = _Session() - pool.put(session) - database = self._make_one(self.DATABASE_ID, instance, pool=pool) - - checkout = database.snapshot(max_staleness=staleness) + checkout = database.snapshot(read_timestamp=now, multi_use=True) self.assertIsInstance(checkout, SnapshotCheckout) - self.assertTrue(checkout._database is database) - self.assertIsNone(checkout._read_timestamp) - self.assertIsNone(checkout._min_read_timestamp) - self.assertEqual(checkout._max_staleness, staleness) - self.assertIsNone(checkout._exact_staleness) - - def test_snapshot_w_exact_staleness(self): - import datetime - from google.cloud.spanner.database import SnapshotCheckout - - staleness = datetime.timedelta(seconds=1, microseconds=234567) - client = _Client() - instance = _Instance(self.INSTANCE_NAME, client=client) - pool = _Pool() - session = _Session() - pool.put(session) - database = self._make_one(self.DATABASE_ID, instance, pool=pool) - - checkout = database.snapshot(exact_staleness=staleness) - - self.assertIsInstance(checkout, SnapshotCheckout) - self.assertTrue(checkout._database is database) - self.assertIsNone(checkout._read_timestamp) - self.assertIsNone(checkout._min_read_timestamp) - self.assertIsNone(checkout._max_staleness) - self.assertEqual(checkout._exact_staleness, staleness) + self.assertIs(checkout._database, database) + self.assertEqual( + checkout._kw, {'read_timestamp': now, 'multi_use': True}) class TestBatchCheckout(_BaseTest): @@ -788,7 +715,7 @@ def _getTargetClass(self): def test_ctor(self): database = _Database(self.DATABASE_NAME) checkout = self._make_one(database) - self.assertTrue(checkout._database is database) + self.assertIs(checkout._database, database) def test_context_mgr_success(self): import datetime @@ -865,21 +792,19 @@ def test_ctor_defaults(self): pool.put(session) checkout = self._make_one(database) - self.assertTrue(checkout._database is database) - self.assertIsNone(checkout._read_timestamp) - self.assertIsNone(checkout._min_read_timestamp) - self.assertIsNone(checkout._max_staleness) - self.assertIsNone(checkout._exact_staleness) + self.assertIs(checkout._database, database) + self.assertEqual(checkout._kw, {}) with checkout as snapshot: self.assertIsNone(pool._session) self.assertIsInstance(snapshot, Snapshot) self.assertIs(snapshot._session, session) self.assertTrue(snapshot._strong) + self.assertFalse(snapshot._multi_use) self.assertIs(pool._session, session) - def test_ctor_w_read_timestamp(self): + def test_ctor_w_read_timestamp_and_multi_use(self): import datetime from google.cloud._helpers import UTC from google.cloud.spanner.snapshot import Snapshot @@ -890,99 +815,17 @@ def test_ctor_w_read_timestamp(self): pool = database._pool = _Pool() pool.put(session) - checkout = self._make_one(database, read_timestamp=now) - self.assertTrue(checkout._database is database) - self.assertEqual(checkout._read_timestamp, now) - self.assertIsNone(checkout._min_read_timestamp) - self.assertIsNone(checkout._max_staleness) - self.assertIsNone(checkout._exact_staleness) - - with checkout as snapshot: - self.assertIsNone(pool._session) - self.assertIsInstance(snapshot, Snapshot) - self.assertIs(snapshot._session, session) - self.assertFalse(snapshot._strong) - self.assertEqual(snapshot._read_timestamp, now) - - self.assertIs(pool._session, session) - - def test_ctor_w_min_read_timestamp(self): - import datetime - from google.cloud._helpers import UTC - from google.cloud.spanner.snapshot import Snapshot - - now = datetime.datetime.utcnow().replace(tzinfo=UTC) - database = _Database(self.DATABASE_NAME) - session = _Session(database) - pool = database._pool = _Pool() - pool.put(session) - - checkout = self._make_one(database, min_read_timestamp=now) - self.assertTrue(checkout._database is database) - self.assertIsNone(checkout._read_timestamp) - self.assertEqual(checkout._min_read_timestamp, now) - self.assertIsNone(checkout._max_staleness) - self.assertIsNone(checkout._exact_staleness) - - with checkout as snapshot: - self.assertIsNone(pool._session) - self.assertIsInstance(snapshot, Snapshot) - self.assertIs(snapshot._session, session) - self.assertFalse(snapshot._strong) - self.assertEqual(snapshot._min_read_timestamp, now) - - self.assertIs(pool._session, session) - - def test_ctor_w_max_staleness(self): - import datetime - from google.cloud.spanner.snapshot import Snapshot - - staleness = datetime.timedelta(seconds=1, microseconds=234567) - database = _Database(self.DATABASE_NAME) - session = _Session(database) - pool = database._pool = _Pool() - pool.put(session) - - checkout = self._make_one(database, max_staleness=staleness) - self.assertTrue(checkout._database is database) - self.assertIsNone(checkout._read_timestamp) - self.assertIsNone(checkout._min_read_timestamp) - self.assertEqual(checkout._max_staleness, staleness) - self.assertIsNone(checkout._exact_staleness) - - with checkout as snapshot: - self.assertIsNone(pool._session) - self.assertIsInstance(snapshot, Snapshot) - self.assertIs(snapshot._session, session) - self.assertFalse(snapshot._strong) - self.assertEqual(snapshot._max_staleness, staleness) - - self.assertIs(pool._session, session) - - def test_ctor_w_exact_staleness(self): - import datetime - from google.cloud.spanner.snapshot import Snapshot - - staleness = datetime.timedelta(seconds=1, microseconds=234567) - database = _Database(self.DATABASE_NAME) - session = _Session(database) - pool = database._pool = _Pool() - pool.put(session) - - checkout = self._make_one(database, exact_staleness=staleness) - + checkout = self._make_one(database, read_timestamp=now, multi_use=True) self.assertIs(checkout._database, database) - self.assertIsNone(checkout._read_timestamp) - self.assertIsNone(checkout._min_read_timestamp) - self.assertIsNone(checkout._max_staleness) - self.assertEqual(checkout._exact_staleness, staleness) + self.assertEqual(checkout._kw, + {'read_timestamp': now, 'multi_use': True}) with checkout as snapshot: self.assertIsNone(pool._session) self.assertIsInstance(snapshot, Snapshot) self.assertIs(snapshot._session, session) - self.assertFalse(snapshot._strong) - self.assertEqual(snapshot._exact_staleness, staleness) + self.assertEqual(snapshot._read_timestamp, now) + self.assertTrue(snapshot._multi_use) self.assertIs(pool._session, session) diff --git a/spanner/tests/unit/test_instance.py b/spanner/tests/unit/test_instance.py index b556a0396f01..ca8edacf3b81 100644 --- a/spanner/tests/unit/test_instance.py +++ b/spanner/tests/unit/test_instance.py @@ -50,8 +50,8 @@ def test_constructor_defaults(self): client = object() instance = self._make_one(self.INSTANCE_ID, client) self.assertEqual(instance.instance_id, self.INSTANCE_ID) - self.assertTrue(instance._client is client) - self.assertTrue(instance.configuration_name is None) + self.assertIs(instance._client, client) + self.assertIs(instance.configuration_name, None) self.assertEqual(instance.node_count, DEFAULT_NODE_COUNT) self.assertEqual(instance.display_name, self.INSTANCE_ID) @@ -64,7 +64,7 @@ def test_constructor_non_default(self): node_count=self.NODE_COUNT, display_name=DISPLAY_NAME) self.assertEqual(instance.instance_id, self.INSTANCE_ID) - self.assertTrue(instance._client is client) + self.assertIs(instance._client, client) self.assertEqual(instance.configuration_name, self.CONFIG_NAME) self.assertEqual(instance.node_count, self.NODE_COUNT) self.assertEqual(instance.display_name, DISPLAY_NAME) @@ -78,10 +78,10 @@ def test_copy(self): new_instance = instance.copy() # Make sure the client copy succeeded. - self.assertFalse(new_instance._client is client) + self.assertIsNot(new_instance._client, client) self.assertEqual(new_instance._client, client) # Make sure the client got copied to a new instance. - self.assertFalse(instance is new_instance) + self.assertIsNot(instance, new_instance) self.assertEqual(instance, new_instance) def test__update_from_pb_success(self): @@ -241,8 +241,6 @@ def test_create_success(self): future = instance.create() self.assertIs(future, op_future) - self.assertEqual(future.caller_metadata, - {'request_type': 'CreateInstance'}) (parent, instance_id, instance, options) = api._created_instance self.assertEqual(parent, self.PARENT) @@ -424,8 +422,6 @@ def test_update_success(self): future = instance.update() self.assertIs(future, op_future) - self.assertEqual(future.caller_metadata, - {'request_type': 'UpdateInstance'}) instance, field_mask, options = api._updated_instance self.assertEqual(field_mask.paths, @@ -496,7 +492,7 @@ def test_database_factory_defaults(self): self.assertTrue(isinstance(database, Database)) self.assertEqual(database.database_id, DATABASE_ID) - self.assertTrue(database._instance is instance) + self.assertIs(database._instance, instance) self.assertEqual(list(database.ddl_statements), []) self.assertIsInstance(database._pool, BurstyPool) pool = database._pool @@ -516,7 +512,7 @@ def test_database_factory_explicit(self): self.assertTrue(isinstance(database, Database)) self.assertEqual(database.database_id, DATABASE_ID) - self.assertTrue(database._instance is instance) + self.assertIs(database._instance, instance) self.assertEqual(list(database.ddl_statements), DDL_STATEMENTS) self.assertIs(database._pool, pool) self.assertIs(pool._bound, database) @@ -547,7 +543,7 @@ def test_list_databases_wo_paging(self): instance_name, page_size, options = api._listed_databases self.assertEqual(instance_name, self.INSTANCE_NAME) self.assertEqual(page_size, None) - self.assertTrue(options.page_token is INITIAL_PAGE) + self.assertIs(options.page_token, INITIAL_PAGE) self.assertEqual(options.kwargs['metadata'], [('google-cloud-resource-prefix', instance.name)]) diff --git a/spanner/tests/unit/test_session.py b/spanner/tests/unit/test_session.py index 5f75d471a7cf..100555c8e49f 100644 --- a/spanner/tests/unit/test_session.py +++ b/spanner/tests/unit/test_session.py @@ -39,8 +39,8 @@ def _make_one(self, *args, **kwargs): def test_constructor(self): database = _Database(self.DATABASE_NAME) session = self._make_one(database) - self.assertTrue(session.session_id is None) - self.assertTrue(session._database is database) + self.assertIs(session.session_id, None) + self.assertIs(session._database, database) def test___lt___(self): database = _Database(self.DATABASE_NAME) @@ -222,9 +222,24 @@ def test_snapshot_created(self): snapshot = session.snapshot() + self.assertIsInstance(snapshot, Snapshot) + self.assertIs(snapshot._session, session) + self.assertTrue(snapshot._strong) + self.assertFalse(snapshot._multi_use) + + def test_snapshot_created_w_multi_use(self): + from google.cloud.spanner.snapshot import Snapshot + + database = _Database(self.DATABASE_NAME) + session = self._make_one(database) + session._session_id = 'DEADBEEF' # emulate 'session.create()' + + snapshot = session.snapshot(multi_use=True) + self.assertIsInstance(snapshot, Snapshot) self.assertTrue(snapshot._session is session) self.assertTrue(snapshot._strong) + self.assertTrue(snapshot._multi_use) def test_read_not_created(self): from google.cloud.spanner.keyset import KeySet @@ -352,7 +367,7 @@ def test_batch_created(self): batch = session.batch() self.assertIsInstance(batch, Batch) - self.assertTrue(batch._session is session) + self.assertIs(batch._session, session) def test_transaction_not_created(self): database = _Database(self.DATABASE_NAME) @@ -371,8 +386,8 @@ def test_transaction_created(self): transaction = session.transaction() self.assertIsInstance(transaction, Transaction) - self.assertTrue(transaction._session is session) - self.assertTrue(session._transaction is transaction) + self.assertIs(transaction._session, session) + self.assertIs(session._transaction, transaction) def test_transaction_w_existing_txn(self): database = _Database(self.DATABASE_NAME) @@ -382,7 +397,7 @@ def test_transaction_w_existing_txn(self): existing = session.transaction() another = session.transaction() # invalidates existing txn - self.assertTrue(session._transaction is another) + self.assertIs(session._transaction, another) self.assertTrue(existing._rolled_back) def test_retry_transaction_w_commit_error_txn_already_begun(self): @@ -403,7 +418,7 @@ def test_retry_transaction_w_commit_error_txn_already_begun(self): session = self._make_one(database) session._session_id = 'DEADBEEF' begun_txn = session._transaction = Transaction(session) - begun_txn._id = b'FACEDACE' + begun_txn._transaction_id = b'FACEDACE' called_with = [] diff --git a/spanner/tests/unit/test_snapshot.py b/spanner/tests/unit/test_snapshot.py index cf1abce94f45..4717a14c2f24 100644 --- a/spanner/tests/unit/test_snapshot.py +++ b/spanner/tests/unit/test_snapshot.py @@ -53,12 +53,19 @@ def _makeDerived(self, session): class _Derived(self._getTargetClass()): + _transaction_id = None + _multi_use = False + def _make_txn_selector(self): from google.cloud.proto.spanner.v1.transaction_pb2 import ( TransactionOptions, TransactionSelector) + if self._transaction_id: + return TransactionSelector(id=self._transaction_id) options = TransactionOptions( read_only=TransactionOptions.ReadOnly(strong=True)) + if self._multi_use: + return TransactionSelector(begin=options) return TransactionSelector(single_use=options) return _Derived(session) @@ -66,7 +73,7 @@ def _make_txn_selector(self): def test_ctor(self): session = _Session() base = self._make_one(session) - self.assertTrue(base._session is session) + self.assertIs(base._session, session) def test__make_txn_selector_virtual(self): session = _Session() @@ -105,7 +112,7 @@ def test_read_grpc_error(self): self.assertEqual(options.kwargs['metadata'], [('google-cloud-resource-prefix', database.name)]) - def test_read_normal(self): + def _read_helper(self, multi_use, first=True, count=0): from google.protobuf.struct_pb2 import Struct from google.cloud.proto.spanner.v1.result_set_pb2 import ( PartialResultSet, ResultSetMetadata, ResultSetStats) @@ -116,6 +123,7 @@ def test_read_normal(self): from google.cloud.spanner.keyset import KeySet from google.cloud.spanner._helpers import _make_value_pb + TXN_ID = b'DEADBEEF' VALUES = [ [u'bharney', 31], [u'phred', 32], @@ -147,11 +155,22 @@ def test_read_normal(self): _streaming_read_response=_MockCancellableIterator(*result_sets)) session = _Session(database) derived = self._makeDerived(session) + derived._multi_use = multi_use + derived._read_request_count = count + if not first: + derived._transaction_id = TXN_ID result_set = derived.read( TABLE_NAME, COLUMNS, KEYSET, index=INDEX, limit=LIMIT, resume_token=TOKEN) + self.assertEqual(derived._read_request_count, count + 1) + + if multi_use: + self.assertIs(result_set._source, derived) + else: + self.assertIsNone(result_set._source) + result_set.consume_all() self.assertEqual(list(result_set.rows), VALUES) self.assertEqual(result_set.metadata, metadata_pb) @@ -165,13 +184,39 @@ def test_read_normal(self): self.assertEqual(columns, COLUMNS) self.assertEqual(key_set, KEYSET.to_pb()) self.assertIsInstance(transaction, TransactionSelector) - self.assertTrue(transaction.single_use.read_only.strong) + if multi_use: + if first: + self.assertTrue(transaction.begin.read_only.strong) + else: + self.assertEqual(transaction.id, TXN_ID) + else: + self.assertTrue(transaction.single_use.read_only.strong) self.assertEqual(index, INDEX) self.assertEqual(limit, LIMIT) self.assertEqual(resume_token, TOKEN) self.assertEqual(options.kwargs['metadata'], [('google-cloud-resource-prefix', database.name)]) + def test_read_wo_multi_use(self): + self._read_helper(multi_use=False) + + def test_read_wo_multi_use_w_read_request_count_gt_0(self): + with self.assertRaises(ValueError): + self._read_helper(multi_use=False, count=1) + + def test_read_w_multi_use_wo_first(self): + self._read_helper(multi_use=True, first=False) + + def test_read_w_multi_use_wo_first_w_count_gt_0(self): + self._read_helper(multi_use=True, first=False, count=1) + + def test_read_w_multi_use_w_first(self): + self._read_helper(multi_use=True, first=True) + + def test_read_w_multi_use_w_first_w_count_gt_0(self): + with self.assertRaises(ValueError): + self._read_helper(multi_use=True, first=True, count=1) + def test_execute_sql_grpc_error(self): from google.cloud.proto.spanner.v1.transaction_pb2 import ( TransactionSelector) @@ -208,7 +253,7 @@ def test_execute_sql_w_params_wo_param_types(self): with self.assertRaises(ValueError): derived.execute_sql(SQL_QUERY_WITH_PARAM, PARAMS) - def test_execute_sql_normal(self): + def _execute_sql_helper(self, multi_use, first=True, count=0): from google.protobuf.struct_pb2 import Struct from google.cloud.proto.spanner.v1.result_set_pb2 import ( PartialResultSet, ResultSetMetadata, ResultSetStats) @@ -218,6 +263,7 @@ def test_execute_sql_normal(self): from google.cloud.proto.spanner.v1.type_pb2 import STRING, INT64 from google.cloud.spanner._helpers import _make_value_pb + TXN_ID = b'DEADBEEF' VALUES = [ [u'bharney', u'rhubbyl', 31], [u'phred', u'phlyntstone', 32], @@ -248,11 +294,22 @@ def test_execute_sql_normal(self): _execute_streaming_sql_response=iterator) session = _Session(database) derived = self._makeDerived(session) + derived._multi_use = multi_use + derived._read_request_count = count + if not first: + derived._transaction_id = TXN_ID result_set = derived.execute_sql( SQL_QUERY_WITH_PARAM, PARAMS, PARAM_TYPES, query_mode=MODE, resume_token=TOKEN) + self.assertEqual(derived._read_request_count, count + 1) + + if multi_use: + self.assertIs(result_set._source, derived) + else: + self.assertIsNone(result_set._source) + result_set.consume_all() self.assertEqual(list(result_set.rows), VALUES) self.assertEqual(result_set.metadata, metadata_pb) @@ -264,7 +321,13 @@ def test_execute_sql_normal(self): self.assertEqual(r_session, self.SESSION_NAME) self.assertEqual(sql, SQL_QUERY_WITH_PARAM) self.assertIsInstance(transaction, TransactionSelector) - self.assertTrue(transaction.single_use.read_only.strong) + if multi_use: + if first: + self.assertTrue(transaction.begin.read_only.strong) + else: + self.assertEqual(transaction.id, TXN_ID) + else: + self.assertTrue(transaction.single_use.read_only.strong) expected_params = Struct(fields={ key: _make_value_pb(value) for (key, value) in PARAMS.items()}) self.assertEqual(params, expected_params) @@ -274,6 +337,26 @@ def test_execute_sql_normal(self): self.assertEqual(options.kwargs['metadata'], [('google-cloud-resource-prefix', database.name)]) + def test_execute_sql_wo_multi_use(self): + self._execute_sql_helper(multi_use=False) + + def test_execute_sql_wo_multi_use_w_read_request_count_gt_0(self): + with self.assertRaises(ValueError): + self._execute_sql_helper(multi_use=False, count=1) + + def test_execute_sql_w_multi_use_wo_first(self): + self._execute_sql_helper(multi_use=True, first=False) + + def test_execute_sql_w_multi_use_wo_first_w_count_gt_0(self): + self._execute_sql_helper(multi_use=True, first=False, count=1) + + def test_execute_sql_w_multi_use_w_first(self): + self._execute_sql_helper(multi_use=True, first=True) + + def test_execute_sql_w_multi_use_w_first_w_count_gt_0(self): + with self.assertRaises(ValueError): + self._execute_sql_helper(multi_use=True, first=True, count=1) + class _MockCancellableIterator(object): @@ -298,6 +381,7 @@ class TestSnapshot(unittest.TestCase): DATABASE_NAME = INSTANCE_NAME + '/databases/' + DATABASE_ID SESSION_ID = 'session-id' SESSION_NAME = DATABASE_NAME + '/sessions/' + SESSION_ID + TRANSACTION_ID = b'DEADBEEF' def _getTargetClass(self): from google.cloud.spanner.snapshot import Snapshot @@ -320,12 +404,13 @@ def _makeDuration(self, seconds=1, microseconds=0): def test_ctor_defaults(self): session = _Session() snapshot = self._make_one(session) - self.assertTrue(snapshot._session is session) + self.assertIs(snapshot._session, session) self.assertTrue(snapshot._strong) self.assertIsNone(snapshot._read_timestamp) self.assertIsNone(snapshot._min_read_timestamp) self.assertIsNone(snapshot._max_staleness) self.assertIsNone(snapshot._exact_staleness) + self.assertFalse(snapshot._multi_use) def test_ctor_w_multiple_options(self): timestamp = self._makeTimestamp() @@ -340,45 +425,108 @@ def test_ctor_w_read_timestamp(self): timestamp = self._makeTimestamp() session = _Session() snapshot = self._make_one(session, read_timestamp=timestamp) - self.assertTrue(snapshot._session is session) + self.assertIs(snapshot._session, session) self.assertFalse(snapshot._strong) self.assertEqual(snapshot._read_timestamp, timestamp) self.assertIsNone(snapshot._min_read_timestamp) self.assertIsNone(snapshot._max_staleness) self.assertIsNone(snapshot._exact_staleness) + self.assertFalse(snapshot._multi_use) def test_ctor_w_min_read_timestamp(self): timestamp = self._makeTimestamp() session = _Session() snapshot = self._make_one(session, min_read_timestamp=timestamp) - self.assertTrue(snapshot._session is session) + self.assertIs(snapshot._session, session) self.assertFalse(snapshot._strong) self.assertIsNone(snapshot._read_timestamp) self.assertEqual(snapshot._min_read_timestamp, timestamp) self.assertIsNone(snapshot._max_staleness) self.assertIsNone(snapshot._exact_staleness) + self.assertFalse(snapshot._multi_use) def test_ctor_w_max_staleness(self): duration = self._makeDuration() session = _Session() snapshot = self._make_one(session, max_staleness=duration) - self.assertTrue(snapshot._session is session) + self.assertIs(snapshot._session, session) self.assertFalse(snapshot._strong) self.assertIsNone(snapshot._read_timestamp) self.assertIsNone(snapshot._min_read_timestamp) self.assertEqual(snapshot._max_staleness, duration) self.assertIsNone(snapshot._exact_staleness) + self.assertFalse(snapshot._multi_use) def test_ctor_w_exact_staleness(self): duration = self._makeDuration() session = _Session() snapshot = self._make_one(session, exact_staleness=duration) + self.assertIs(snapshot._session, session) + self.assertFalse(snapshot._strong) + self.assertIsNone(snapshot._read_timestamp) + self.assertIsNone(snapshot._min_read_timestamp) + self.assertIsNone(snapshot._max_staleness) + self.assertEqual(snapshot._exact_staleness, duration) + self.assertFalse(snapshot._multi_use) + + def test_ctor_w_multi_use(self): + session = _Session() + snapshot = self._make_one(session, multi_use=True) + self.assertTrue(snapshot._session is session) + self.assertTrue(snapshot._strong) + self.assertIsNone(snapshot._read_timestamp) + self.assertIsNone(snapshot._min_read_timestamp) + self.assertIsNone(snapshot._max_staleness) + self.assertIsNone(snapshot._exact_staleness) + self.assertTrue(snapshot._multi_use) + + def test_ctor_w_multi_use_and_read_timestamp(self): + timestamp = self._makeTimestamp() + session = _Session() + snapshot = self._make_one( + session, read_timestamp=timestamp, multi_use=True) + self.assertTrue(snapshot._session is session) + self.assertFalse(snapshot._strong) + self.assertEqual(snapshot._read_timestamp, timestamp) + self.assertIsNone(snapshot._min_read_timestamp) + self.assertIsNone(snapshot._max_staleness) + self.assertIsNone(snapshot._exact_staleness) + self.assertTrue(snapshot._multi_use) + + def test_ctor_w_multi_use_and_min_read_timestamp(self): + timestamp = self._makeTimestamp() + session = _Session() + + with self.assertRaises(ValueError): + self._make_one( + session, min_read_timestamp=timestamp, multi_use=True) + + def test_ctor_w_multi_use_and_max_staleness(self): + duration = self._makeDuration() + session = _Session() + + with self.assertRaises(ValueError): + self._make_one(session, max_staleness=duration, multi_use=True) + + def test_ctor_w_multi_use_and_exact_staleness(self): + duration = self._makeDuration() + session = _Session() + snapshot = self._make_one( + session, exact_staleness=duration, multi_use=True) self.assertTrue(snapshot._session is session) self.assertFalse(snapshot._strong) self.assertIsNone(snapshot._read_timestamp) self.assertIsNone(snapshot._min_read_timestamp) self.assertIsNone(snapshot._max_staleness) self.assertEqual(snapshot._exact_staleness, duration) + self.assertTrue(snapshot._multi_use) + + def test__make_txn_selector_w_transaction_id(self): + session = _Session() + snapshot = self._make_one(session) + snapshot._transaction_id = self.TRANSACTION_ID + selector = snapshot._make_txn_selector() + self.assertEqual(selector.id, self.TRANSACTION_ID) def test__make_txn_selector_strong(self): session = _Session() @@ -429,6 +577,127 @@ def test__make_txn_selector_w_exact_staleness(self): self.assertEqual(options.read_only.exact_staleness.seconds, 3) self.assertEqual(options.read_only.exact_staleness.nanos, 123456000) + def test__make_txn_selector_strong_w_multi_use(self): + session = _Session() + snapshot = self._make_one(session, multi_use=True) + selector = snapshot._make_txn_selector() + options = selector.begin + self.assertTrue(options.read_only.strong) + + def test__make_txn_selector_w_read_timestamp_w_multi_use(self): + from google.cloud._helpers import _pb_timestamp_to_datetime + + timestamp = self._makeTimestamp() + session = _Session() + snapshot = self._make_one( + session, read_timestamp=timestamp, multi_use=True) + selector = snapshot._make_txn_selector() + options = selector.begin + self.assertEqual( + _pb_timestamp_to_datetime(options.read_only.read_timestamp), + timestamp) + + def test__make_txn_selector_w_exact_staleness_w_multi_use(self): + duration = self._makeDuration(seconds=3, microseconds=123456) + session = _Session() + snapshot = self._make_one( + session, exact_staleness=duration, multi_use=True) + selector = snapshot._make_txn_selector() + options = selector.begin + self.assertEqual(options.read_only.exact_staleness.seconds, 3) + self.assertEqual(options.read_only.exact_staleness.nanos, 123456000) + + def test_begin_wo_multi_use(self): + session = _Session() + snapshot = self._make_one(session) + with self.assertRaises(ValueError): + snapshot.begin() + + def test_begin_w_read_request_count_gt_0(self): + session = _Session() + snapshot = self._make_one(session, multi_use=True) + snapshot._read_request_count = 1 + with self.assertRaises(ValueError): + snapshot.begin() + + def test_begin_w_existing_txn_id(self): + session = _Session() + snapshot = self._make_one(session, multi_use=True) + snapshot._transaction_id = self.TRANSACTION_ID + with self.assertRaises(ValueError): + snapshot.begin() + + def test_begin_w_gax_error(self): + from google.gax.errors import GaxError + from google.cloud._helpers import _pb_timestamp_to_datetime + + database = _Database() + api = database.spanner_api = _FauxSpannerAPI( + _random_gax_error=True) + timestamp = self._makeTimestamp() + session = _Session(database) + snapshot = self._make_one( + session, read_timestamp=timestamp, multi_use=True) + + with self.assertRaises(GaxError): + snapshot.begin() + + session_id, txn_options, options = api._begun + self.assertEqual(session_id, session.name) + self.assertEqual( + _pb_timestamp_to_datetime(txn_options.read_only.read_timestamp), + timestamp) + self.assertEqual(options.kwargs['metadata'], + [('google-cloud-resource-prefix', database.name)]) + + def test_begin_ok_exact_staleness(self): + from google.cloud.proto.spanner.v1.transaction_pb2 import ( + Transaction as TransactionPB) + + transaction_pb = TransactionPB(id=self.TRANSACTION_ID) + database = _Database() + api = database.spanner_api = _FauxSpannerAPI( + _begin_transaction_response=transaction_pb) + duration = self._makeDuration(seconds=3, microseconds=123456) + session = _Session(database) + snapshot = self._make_one( + session, exact_staleness=duration, multi_use=True) + + txn_id = snapshot.begin() + + self.assertEqual(txn_id, self.TRANSACTION_ID) + self.assertEqual(snapshot._transaction_id, self.TRANSACTION_ID) + + session_id, txn_options, options = api._begun + self.assertEqual(session_id, session.name) + read_only = txn_options.read_only + self.assertEqual(read_only.exact_staleness.seconds, 3) + self.assertEqual(read_only.exact_staleness.nanos, 123456000) + self.assertEqual(options.kwargs['metadata'], + [('google-cloud-resource-prefix', database.name)]) + + def test_begin_ok_exact_strong(self): + from google.cloud.proto.spanner.v1.transaction_pb2 import ( + Transaction as TransactionPB) + + transaction_pb = TransactionPB(id=self.TRANSACTION_ID) + database = _Database() + api = database.spanner_api = _FauxSpannerAPI( + _begin_transaction_response=transaction_pb) + session = _Session(database) + snapshot = self._make_one(session, multi_use=True) + + txn_id = snapshot.begin() + + self.assertEqual(txn_id, self.TRANSACTION_ID) + self.assertEqual(snapshot._transaction_id, self.TRANSACTION_ID) + + session_id, txn_options, options = api._begun + self.assertEqual(session_id, session.name) + self.assertTrue(txn_options.read_only.strong) + self.assertEqual(options.kwargs['metadata'], + [('google-cloud-resource-prefix', database.name)]) + class _Session(object): @@ -443,7 +712,15 @@ class _Database(object): class _FauxSpannerAPI(_GAXBaseAPI): - _read_with = None + _read_with = _begin = None + + def begin_transaction(self, session, options_, options=None): + from google.gax.errors import GaxError + + self._begun = (session, options_, options) + if self._random_gax_error: + raise GaxError('error') + return self._begin_transaction_response # pylint: disable=too-many-arguments def streaming_read(self, session, table, columns, key_set, diff --git a/spanner/tests/unit/test_streamed.py b/spanner/tests/unit/test_streamed.py index 3300e4048cc7..2e31f4dfad2c 100644 --- a/spanner/tests/unit/test_streamed.py +++ b/spanner/tests/unit/test_streamed.py @@ -15,6 +15,8 @@ import unittest +import mock + class TestStreamedResultSet(unittest.TestCase): @@ -30,6 +32,18 @@ def test_ctor_defaults(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) self.assertIs(streamed._response_iterator, iterator) + self.assertIsNone(streamed._source) + self.assertEqual(streamed.rows, []) + self.assertIsNone(streamed.metadata) + self.assertIsNone(streamed.stats) + self.assertIsNone(streamed.resume_token) + + def test_ctor_w_source(self): + iterator = _MockCancellableIterator() + source = object() + streamed = self._make_one(iterator, source=source) + self.assertIs(streamed._response_iterator, iterator) + self.assertIs(streamed._source, source) self.assertEqual(streamed.rows, []) self.assertIsNone(streamed.metadata) self.assertIsNone(streamed.stats) @@ -42,14 +56,14 @@ def test_fields_unset(self): _ = streamed.fields @staticmethod - def _makeScalarField(name, type_): + def _make_scalar_field(name, type_): from google.cloud.proto.spanner.v1.type_pb2 import StructType from google.cloud.proto.spanner.v1.type_pb2 import Type return StructType.Field(name=name, type=Type(code=type_)) @staticmethod - def _makeArrayField(name, element_type_code=None, element_type=None): + def _make_array_field(name, element_type_code=None, element_type=None): from google.cloud.proto.spanner.v1.type_pb2 import StructType from google.cloud.proto.spanner.v1.type_pb2 import Type @@ -60,7 +74,7 @@ def _makeArrayField(name, element_type_code=None, element_type=None): return StructType.Field(name=name, type=array_type) @staticmethod - def _makeStructType(struct_type_fields): + def _make_struct_type(struct_type_fields): from google.cloud.proto.spanner.v1.type_pb2 import StructType from google.cloud.proto.spanner.v1.type_pb2 import Type @@ -72,13 +86,13 @@ def _makeStructType(struct_type_fields): return Type(code='STRUCT', struct_type=struct_type) @staticmethod - def _makeValue(value): + def _make_value(value): from google.cloud.spanner._helpers import _make_value_pb return _make_value_pb(value) @staticmethod - def _makeListValue(values=(), value_pbs=None): + def _make_list_value(values=(), value_pbs=None): from google.protobuf.struct_pb2 import ListValue from google.protobuf.struct_pb2 import Value from google.cloud.spanner._helpers import _make_list_value_pb @@ -87,15 +101,52 @@ def _makeListValue(values=(), value_pbs=None): return Value(list_value=ListValue(values=value_pbs)) return Value(list_value=_make_list_value_pb(values)) + @staticmethod + def _make_result_set_metadata(fields=(), transaction_id=None): + from google.cloud.proto.spanner.v1.result_set_pb2 import ( + ResultSetMetadata) + metadata = ResultSetMetadata() + for field in fields: + metadata.row_type.fields.add().CopyFrom(field) + if transaction_id is not None: + metadata.transaction.id = transaction_id + return metadata + + @staticmethod + def _make_result_set_stats(query_plan=None, **kw): + from google.cloud.proto.spanner.v1.result_set_pb2 import ( + ResultSetStats) + from google.protobuf.struct_pb2 import Struct + from google.cloud.spanner._helpers import _make_value_pb + + query_stats = Struct(fields={ + key: _make_value_pb(value) for key, value in kw.items()}) + return ResultSetStats( + query_plan=query_plan, + query_stats=query_stats, + ) + + @staticmethod + def _make_partial_result_set( + values, metadata=None, stats=None, chunked_value=False): + from google.cloud.proto.spanner.v1.result_set_pb2 import ( + PartialResultSet) + return PartialResultSet( + values=values, + metadata=metadata, + stats=stats, + chunked_value=chunked_value, + ) + def test_properties_set(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), ] - metadata = streamed._metadata = _ResultSetMetadataPB(FIELDS) - stats = streamed._stats = _ResultSetStatsPB() + metadata = streamed._metadata = self._make_result_set_metadata(FIELDS) + stats = streamed._stats = self._make_result_set_stats() self.assertEqual(list(streamed.fields), FIELDS) self.assertIs(streamed.metadata, metadata) self.assertIs(streamed.stats, stats) @@ -106,11 +157,11 @@ def test__merge_chunk_bool(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeScalarField('registered_voter', 'BOOL'), + self._make_scalar_field('registered_voter', 'BOOL'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) - streamed._pending_chunk = self._makeValue(True) - chunk = self._makeValue(False) + streamed._metadata = self._make_result_set_metadata(FIELDS) + streamed._pending_chunk = self._make_value(True) + chunk = self._make_value(False) with self.assertRaises(Unmergeable): streamed._merge_chunk(chunk) @@ -119,11 +170,11 @@ def test__merge_chunk_int64(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeScalarField('age', 'INT64'), + self._make_scalar_field('age', 'INT64'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) - streamed._pending_chunk = self._makeValue(42) - chunk = self._makeValue(13) + streamed._metadata = self._make_result_set_metadata(FIELDS) + streamed._pending_chunk = self._make_value(42) + chunk = self._make_value(13) merged = streamed._merge_chunk(chunk) self.assertEqual(merged.string_value, '4213') @@ -133,11 +184,11 @@ def test__merge_chunk_float64_nan_string(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeScalarField('weight', 'FLOAT64'), + self._make_scalar_field('weight', 'FLOAT64'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) - streamed._pending_chunk = self._makeValue(u'Na') - chunk = self._makeValue(u'N') + streamed._metadata = self._make_result_set_metadata(FIELDS) + streamed._pending_chunk = self._make_value(u'Na') + chunk = self._make_value(u'N') merged = streamed._merge_chunk(chunk) self.assertEqual(merged.string_value, u'NaN') @@ -146,11 +197,11 @@ def test__merge_chunk_float64_w_empty(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeScalarField('weight', 'FLOAT64'), + self._make_scalar_field('weight', 'FLOAT64'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) - streamed._pending_chunk = self._makeValue(3.14159) - chunk = self._makeValue('') + streamed._metadata = self._make_result_set_metadata(FIELDS) + streamed._pending_chunk = self._make_value(3.14159) + chunk = self._make_value('') merged = streamed._merge_chunk(chunk) self.assertEqual(merged.number_value, 3.14159) @@ -161,11 +212,11 @@ def test__merge_chunk_float64_w_float64(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeScalarField('weight', 'FLOAT64'), + self._make_scalar_field('weight', 'FLOAT64'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) - streamed._pending_chunk = self._makeValue(3.14159) - chunk = self._makeValue(2.71828) + streamed._metadata = self._make_result_set_metadata(FIELDS) + streamed._pending_chunk = self._make_value(3.14159) + chunk = self._make_value(2.71828) with self.assertRaises(Unmergeable): streamed._merge_chunk(chunk) @@ -174,11 +225,11 @@ def test__merge_chunk_string(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeScalarField('name', 'STRING'), + self._make_scalar_field('name', 'STRING'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) - streamed._pending_chunk = self._makeValue(u'phred') - chunk = self._makeValue(u'wylma') + streamed._metadata = self._make_result_set_metadata(FIELDS) + streamed._pending_chunk = self._make_value(u'phred') + chunk = self._make_value(u'wylma') merged = streamed._merge_chunk(chunk) @@ -189,11 +240,11 @@ def test__merge_chunk_string_w_bytes(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeScalarField('image', 'BYTES'), + self._make_scalar_field('image', 'BYTES'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) - streamed._pending_chunk = self._makeValue(u'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAAAAAA6fptVAAAACXBIWXMAAAsTAAALEwEAmpwYAAAA\n') - chunk = self._makeValue(u'B3RJTUUH4QQGFwsBTL3HMwAAABJpVFh0Q29tbWVudAAAAAAAU0FNUExFMG3E+AAAAApJREFUCNdj\nYAAAAAIAAeIhvDMAAAAASUVORK5CYII=\n') + streamed._metadata = self._make_result_set_metadata(FIELDS) + streamed._pending_chunk = self._make_value(u'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAAAAAA6fptVAAAACXBIWXMAAAsTAAALEwEAmpwYAAAA\n') + chunk = self._make_value(u'B3RJTUUH4QQGFwsBTL3HMwAAABJpVFh0Q29tbWVudAAAAAAAU0FNUExFMG3E+AAAAApJREFUCNdj\nYAAAAAIAAeIhvDMAAAAASUVORK5CYII=\n') merged = streamed._merge_chunk(chunk) @@ -204,15 +255,15 @@ def test__merge_chunk_array_of_bool(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeArrayField('name', element_type_code='BOOL'), + self._make_array_field('name', element_type_code='BOOL'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) - streamed._pending_chunk = self._makeListValue([True, True]) - chunk = self._makeListValue([False, False, False]) + streamed._metadata = self._make_result_set_metadata(FIELDS) + streamed._pending_chunk = self._make_list_value([True, True]) + chunk = self._make_list_value([False, False, False]) merged = streamed._merge_chunk(chunk) - expected = self._makeListValue([True, True, False, False, False]) + expected = self._make_list_value([True, True, False, False, False]) self.assertEqual(merged, expected) self.assertIsNone(streamed._pending_chunk) @@ -220,15 +271,15 @@ def test__merge_chunk_array_of_int(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeArrayField('name', element_type_code='INT64'), + self._make_array_field('name', element_type_code='INT64'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) - streamed._pending_chunk = self._makeListValue([0, 1, 2]) - chunk = self._makeListValue([3, 4, 5]) + streamed._metadata = self._make_result_set_metadata(FIELDS) + streamed._pending_chunk = self._make_list_value([0, 1, 2]) + chunk = self._make_list_value([3, 4, 5]) merged = streamed._merge_chunk(chunk) - expected = self._makeListValue([0, 1, 23, 4, 5]) + expected = self._make_list_value([0, 1, 23, 4, 5]) self.assertEqual(merged, expected) self.assertIsNone(streamed._pending_chunk) @@ -242,15 +293,15 @@ def test__merge_chunk_array_of_float(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeArrayField('name', element_type_code='FLOAT64'), + self._make_array_field('name', element_type_code='FLOAT64'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) - streamed._pending_chunk = self._makeListValue([PI, SQRT_2]) - chunk = self._makeListValue(['', EULER, LOG_10]) + streamed._metadata = self._make_result_set_metadata(FIELDS) + streamed._pending_chunk = self._make_list_value([PI, SQRT_2]) + chunk = self._make_list_value(['', EULER, LOG_10]) merged = streamed._merge_chunk(chunk) - expected = self._makeListValue([PI, SQRT_2, EULER, LOG_10]) + expected = self._make_list_value([PI, SQRT_2, EULER, LOG_10]) self.assertEqual(merged, expected) self.assertIsNone(streamed._pending_chunk) @@ -258,15 +309,15 @@ def test__merge_chunk_array_of_string(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeArrayField('name', element_type_code='STRING'), + self._make_array_field('name', element_type_code='STRING'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) - streamed._pending_chunk = self._makeListValue([u'A', u'B', u'C']) - chunk = self._makeListValue([None, u'D', u'E']) + streamed._metadata = self._make_result_set_metadata(FIELDS) + streamed._pending_chunk = self._make_list_value([u'A', u'B', u'C']) + chunk = self._make_list_value([None, u'D', u'E']) merged = streamed._merge_chunk(chunk) - expected = self._makeListValue([u'A', u'B', u'C', None, u'D', u'E']) + expected = self._make_list_value([u'A', u'B', u'C', None, u'D', u'E']) self.assertEqual(merged, expected) self.assertIsNone(streamed._pending_chunk) @@ -274,15 +325,15 @@ def test__merge_chunk_array_of_string_with_null(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeArrayField('name', element_type_code='STRING'), + self._make_array_field('name', element_type_code='STRING'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) - streamed._pending_chunk = self._makeListValue([u'A', u'B', u'C']) - chunk = self._makeListValue([u'D', u'E']) + streamed._metadata = self._make_result_set_metadata(FIELDS) + streamed._pending_chunk = self._make_list_value([u'A', u'B', u'C']) + chunk = self._make_list_value([u'D', u'E']) merged = streamed._merge_chunk(chunk) - expected = self._makeListValue([u'A', u'B', u'CD', u'E']) + expected = self._make_list_value([u'A', u'B', u'CD', u'E']) self.assertEqual(merged, expected) self.assertIsNone(streamed._pending_chunk) @@ -298,22 +349,22 @@ def test__merge_chunk_array_of_array_of_int(self): FIELDS = [ StructType.Field(name='loloi', type=array_type) ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) - streamed._pending_chunk = self._makeListValue(value_pbs=[ - self._makeListValue([0, 1]), - self._makeListValue([2]), + streamed._metadata = self._make_result_set_metadata(FIELDS) + streamed._pending_chunk = self._make_list_value(value_pbs=[ + self._make_list_value([0, 1]), + self._make_list_value([2]), ]) - chunk = self._makeListValue(value_pbs=[ - self._makeListValue([3]), - self._makeListValue([4, 5]), + chunk = self._make_list_value(value_pbs=[ + self._make_list_value([3]), + self._make_list_value([4, 5]), ]) merged = streamed._merge_chunk(chunk) - expected = self._makeListValue(value_pbs=[ - self._makeListValue([0, 1]), - self._makeListValue([23]), - self._makeListValue([4, 5]), + expected = self._make_list_value(value_pbs=[ + self._make_list_value([0, 1]), + self._make_list_value([23]), + self._make_list_value([4, 5]), ]) self.assertEqual(merged, expected) self.assertIsNone(streamed._pending_chunk) @@ -330,22 +381,22 @@ def test__merge_chunk_array_of_array_of_string(self): FIELDS = [ StructType.Field(name='lolos', type=array_type) ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) - streamed._pending_chunk = self._makeListValue(value_pbs=[ - self._makeListValue([u'A', u'B']), - self._makeListValue([u'C']), + streamed._metadata = self._make_result_set_metadata(FIELDS) + streamed._pending_chunk = self._make_list_value(value_pbs=[ + self._make_list_value([u'A', u'B']), + self._make_list_value([u'C']), ]) - chunk = self._makeListValue(value_pbs=[ - self._makeListValue([u'D']), - self._makeListValue([u'E', u'F']), + chunk = self._make_list_value(value_pbs=[ + self._make_list_value([u'D']), + self._make_list_value([u'E', u'F']), ]) merged = streamed._merge_chunk(chunk) - expected = self._makeListValue(value_pbs=[ - self._makeListValue([u'A', u'B']), - self._makeListValue([u'CD']), - self._makeListValue([u'E', u'F']), + expected = self._make_list_value(value_pbs=[ + self._make_list_value([u'A', u'B']), + self._make_list_value([u'CD']), + self._make_list_value([u'E', u'F']), ]) self.assertEqual(merged, expected) self.assertIsNone(streamed._pending_chunk) @@ -353,47 +404,47 @@ def test__merge_chunk_array_of_array_of_string(self): def test__merge_chunk_array_of_struct(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) - struct_type = self._makeStructType([ + struct_type = self._make_struct_type([ ('name', 'STRING'), ('age', 'INT64'), ]) FIELDS = [ - self._makeArrayField('test', element_type=struct_type), + self._make_array_field('test', element_type=struct_type), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) - partial = self._makeListValue([u'Phred ']) - streamed._pending_chunk = self._makeListValue(value_pbs=[partial]) - rest = self._makeListValue([u'Phlyntstone', 31]) - chunk = self._makeListValue(value_pbs=[rest]) + streamed._metadata = self._make_result_set_metadata(FIELDS) + partial = self._make_list_value([u'Phred ']) + streamed._pending_chunk = self._make_list_value(value_pbs=[partial]) + rest = self._make_list_value([u'Phlyntstone', 31]) + chunk = self._make_list_value(value_pbs=[rest]) merged = streamed._merge_chunk(chunk) - struct = self._makeListValue([u'Phred Phlyntstone', 31]) - expected = self._makeListValue(value_pbs=[struct]) + struct = self._make_list_value([u'Phred Phlyntstone', 31]) + expected = self._make_list_value(value_pbs=[struct]) self.assertEqual(merged, expected) self.assertIsNone(streamed._pending_chunk) def test__merge_chunk_array_of_struct_unmergeable(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) - struct_type = self._makeStructType([ + struct_type = self._make_struct_type([ ('name', 'STRING'), ('registered', 'BOOL'), ('voted', 'BOOL'), ]) FIELDS = [ - self._makeArrayField('test', element_type=struct_type), + self._make_array_field('test', element_type=struct_type), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) - partial = self._makeListValue([u'Phred Phlyntstone', True]) - streamed._pending_chunk = self._makeListValue(value_pbs=[partial]) - rest = self._makeListValue([True]) - chunk = self._makeListValue(value_pbs=[rest]) + streamed._metadata = self._make_result_set_metadata(FIELDS) + partial = self._make_list_value([u'Phred Phlyntstone', True]) + streamed._pending_chunk = self._make_list_value(value_pbs=[partial]) + rest = self._make_list_value([True]) + chunk = self._make_list_value(value_pbs=[rest]) merged = streamed._merge_chunk(chunk) - struct = self._makeListValue([u'Phred Phlyntstone', True, True]) - expected = self._makeListValue(value_pbs=[struct]) + struct = self._make_list_value([u'Phred Phlyntstone', True, True]) + expected = self._make_list_value(value_pbs=[struct]) self.assertEqual(merged, expected) self.assertIsNone(streamed._pending_chunk) @@ -401,11 +452,11 @@ def test_merge_values_empty_and_empty(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), - self._makeScalarField('married', 'BOOL'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) + streamed._metadata = self._make_result_set_metadata(FIELDS) streamed._current_row = [] streamed._merge_values([]) self.assertEqual(streamed.rows, []) @@ -415,13 +466,13 @@ def test_merge_values_empty_and_partial(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), - self._makeScalarField('married', 'BOOL'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) + streamed._metadata = self._make_result_set_metadata(FIELDS) BARE = [u'Phred Phlyntstone', 42] - VALUES = [self._makeValue(bare) for bare in BARE] + VALUES = [self._make_value(bare) for bare in BARE] streamed._current_row = [] streamed._merge_values(VALUES) self.assertEqual(streamed.rows, []) @@ -431,13 +482,13 @@ def test_merge_values_empty_and_filled(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), - self._makeScalarField('married', 'BOOL'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) + streamed._metadata = self._make_result_set_metadata(FIELDS) BARE = [u'Phred Phlyntstone', 42, True] - VALUES = [self._makeValue(bare) for bare in BARE] + VALUES = [self._make_value(bare) for bare in BARE] streamed._current_row = [] streamed._merge_values(VALUES) self.assertEqual(streamed.rows, [BARE]) @@ -447,17 +498,17 @@ def test_merge_values_empty_and_filled_plus(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), - self._makeScalarField('married', 'BOOL'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) + streamed._metadata = self._make_result_set_metadata(FIELDS) BARE = [ u'Phred Phlyntstone', 42, True, u'Bharney Rhubble', 39, True, u'Wylma Phlyntstone', ] - VALUES = [self._makeValue(bare) for bare in BARE] + VALUES = [self._make_value(bare) for bare in BARE] streamed._current_row = [] streamed._merge_values(VALUES) self.assertEqual(streamed.rows, [BARE[0:3], BARE[3:6]]) @@ -467,11 +518,11 @@ def test_merge_values_partial_and_empty(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), - self._makeScalarField('married', 'BOOL'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) + streamed._metadata = self._make_result_set_metadata(FIELDS) BEFORE = [ u'Phred Phlyntstone' ] @@ -484,15 +535,15 @@ def test_merge_values_partial_and_partial(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), - self._makeScalarField('married', 'BOOL'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) + streamed._metadata = self._make_result_set_metadata(FIELDS) BEFORE = [u'Phred Phlyntstone'] streamed._current_row[:] = BEFORE MERGED = [42] - TO_MERGE = [self._makeValue(item) for item in MERGED] + TO_MERGE = [self._make_value(item) for item in MERGED] streamed._merge_values(TO_MERGE) self.assertEqual(streamed.rows, []) self.assertEqual(streamed._current_row, BEFORE + MERGED) @@ -501,17 +552,17 @@ def test_merge_values_partial_and_filled(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), - self._makeScalarField('married', 'BOOL'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) + streamed._metadata = self._make_result_set_metadata(FIELDS) BEFORE = [ u'Phred Phlyntstone' ] streamed._current_row[:] = BEFORE MERGED = [42, True] - TO_MERGE = [self._makeValue(item) for item in MERGED] + TO_MERGE = [self._make_value(item) for item in MERGED] streamed._merge_values(TO_MERGE) self.assertEqual(streamed.rows, [BEFORE + MERGED]) self.assertEqual(streamed._current_row, []) @@ -520,13 +571,13 @@ def test_merge_values_partial_and_filled_plus(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), - self._makeScalarField('married', 'BOOL'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) + streamed._metadata = self._make_result_set_metadata(FIELDS) BEFORE = [ - self._makeValue(u'Phred Phlyntstone') + self._make_value(u'Phred Phlyntstone') ] streamed._current_row[:] = BEFORE MERGED = [ @@ -534,7 +585,7 @@ def test_merge_values_partial_and_filled_plus(self): u'Bharney Rhubble', 39, True, u'Wylma Phlyntstone', ] - TO_MERGE = [self._makeValue(item) for item in MERGED] + TO_MERGE = [self._make_value(item) for item in MERGED] VALUES = BEFORE + MERGED streamed._merge_values(TO_MERGE) self.assertEqual(streamed.rows, [VALUES[0:3], VALUES[3:6]]) @@ -547,36 +598,62 @@ def test_consume_next_empty(self): streamed.consume_next() def test_consume_next_first_set_partial(self): + TXN_ID = b'DEADBEEF' FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), - self._makeScalarField('married', 'BOOL'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), ] - metadata = _ResultSetMetadataPB(FIELDS) + metadata = self._make_result_set_metadata( + FIELDS, transaction_id=TXN_ID) BARE = [u'Phred Phlyntstone', 42] - VALUES = [self._makeValue(bare) for bare in BARE] - result_set = _PartialResultSetPB(VALUES, metadata=metadata) + VALUES = [self._make_value(bare) for bare in BARE] + result_set = self._make_partial_result_set(VALUES, metadata=metadata) iterator = _MockCancellableIterator(result_set) - streamed = self._make_one(iterator) + source = mock.Mock(_transaction_id=None, spec=['_transaction_id']) + streamed = self._make_one(iterator, source=source) streamed.consume_next() self.assertEqual(streamed.rows, []) self.assertEqual(streamed._current_row, BARE) - self.assertTrue(streamed.metadata is metadata) + self.assertEqual(streamed.metadata, metadata) self.assertEqual(streamed.resume_token, result_set.resume_token) + self.assertEqual(source._transaction_id, TXN_ID) + + def test_consume_next_first_set_partial_existing_txn_id(self): + TXN_ID = b'DEADBEEF' + FIELDS = [ + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), + ] + metadata = self._make_result_set_metadata( + FIELDS, transaction_id=b'') + BARE = [u'Phred Phlyntstone', 42] + VALUES = [self._make_value(bare) for bare in BARE] + result_set = self._make_partial_result_set(VALUES, metadata=metadata) + iterator = _MockCancellableIterator(result_set) + source = mock.Mock(_transaction_id=TXN_ID, spec=['_transaction_id']) + streamed = self._make_one(iterator, source=source) + streamed.consume_next() + self.assertEqual(streamed.rows, []) + self.assertEqual(streamed._current_row, BARE) + self.assertEqual(streamed.metadata, metadata) + self.assertEqual(streamed.resume_token, result_set.resume_token) + self.assertEqual(source._transaction_id, TXN_ID) def test_consume_next_w_partial_result(self): FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), - self._makeScalarField('married', 'BOOL'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), ] VALUES = [ - self._makeValue(u'Phred '), + self._make_value(u'Phred '), ] - result_set = _PartialResultSetPB(VALUES, chunked_value=True) + result_set = self._make_partial_result_set(VALUES, chunked_value=True) iterator = _MockCancellableIterator(result_set) streamed = self._make_one(iterator) - streamed._metadata = _ResultSetMetadataPB(FIELDS) + streamed._metadata = self._make_result_set_metadata(FIELDS) streamed.consume_next() self.assertEqual(streamed.rows, []) self.assertEqual(streamed._current_row, []) @@ -585,21 +662,21 @@ def test_consume_next_w_partial_result(self): def test_consume_next_w_pending_chunk(self): FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), - self._makeScalarField('married', 'BOOL'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), ] BARE = [ u'Phlyntstone', 42, True, u'Bharney Rhubble', 39, True, u'Wylma Phlyntstone', ] - VALUES = [self._makeValue(bare) for bare in BARE] - result_set = _PartialResultSetPB(VALUES) + VALUES = [self._make_value(bare) for bare in BARE] + result_set = self._make_partial_result_set(VALUES) iterator = _MockCancellableIterator(result_set) streamed = self._make_one(iterator) - streamed._metadata = _ResultSetMetadataPB(FIELDS) - streamed._pending_chunk = self._makeValue(u'Phred ') + streamed._metadata = self._make_result_set_metadata(FIELDS) + streamed._pending_chunk = self._make_value(u'Phred ') streamed.consume_next() self.assertEqual(streamed.rows, [ [u'Phred Phlyntstone', BARE[1], BARE[2]], @@ -611,26 +688,26 @@ def test_consume_next_w_pending_chunk(self): def test_consume_next_last_set(self): FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), - self._makeScalarField('married', 'BOOL'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), ] - metadata = _ResultSetMetadataPB(FIELDS) - stats = _ResultSetStatsPB( + metadata = self._make_result_set_metadata(FIELDS) + stats = self._make_result_set_stats( rows_returned="1", elapsed_time="1.23 secs", - cpu_tme="0.98 secs", + cpu_time="0.98 secs", ) BARE = [u'Phred Phlyntstone', 42, True] - VALUES = [self._makeValue(bare) for bare in BARE] - result_set = _PartialResultSetPB(VALUES, stats=stats) + VALUES = [self._make_value(bare) for bare in BARE] + result_set = self._make_partial_result_set(VALUES, stats=stats) iterator = _MockCancellableIterator(result_set) streamed = self._make_one(iterator) streamed._metadata = metadata streamed.consume_next() self.assertEqual(streamed.rows, [BARE]) self.assertEqual(streamed._current_row, []) - self.assertTrue(streamed._stats is stats) + self.assertEqual(streamed._stats, stats) self.assertEqual(streamed.resume_token, result_set.resume_token) def test_consume_all_empty(self): @@ -640,36 +717,37 @@ def test_consume_all_empty(self): def test_consume_all_one_result_set_partial(self): FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), - self._makeScalarField('married', 'BOOL'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), ] - metadata = _ResultSetMetadataPB(FIELDS) + metadata = self._make_result_set_metadata(FIELDS) BARE = [u'Phred Phlyntstone', 42] - VALUES = [self._makeValue(bare) for bare in BARE] - result_set = _PartialResultSetPB(VALUES, metadata=metadata) + VALUES = [self._make_value(bare) for bare in BARE] + result_set = self._make_partial_result_set(VALUES, metadata=metadata) iterator = _MockCancellableIterator(result_set) streamed = self._make_one(iterator) streamed.consume_all() self.assertEqual(streamed.rows, []) self.assertEqual(streamed._current_row, BARE) - self.assertTrue(streamed.metadata is metadata) + self.assertEqual(streamed.metadata, metadata) def test_consume_all_multiple_result_sets_filled(self): FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), - self._makeScalarField('married', 'BOOL'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), ] - metadata = _ResultSetMetadataPB(FIELDS) + metadata = self._make_result_set_metadata(FIELDS) BARE = [ u'Phred Phlyntstone', 42, True, u'Bharney Rhubble', 39, True, u'Wylma Phlyntstone', 41, True, ] - VALUES = [self._makeValue(bare) for bare in BARE] - result_set1 = _PartialResultSetPB(VALUES[:4], metadata=metadata) - result_set2 = _PartialResultSetPB(VALUES[4:]) + VALUES = [self._make_value(bare) for bare in BARE] + result_set1 = self._make_partial_result_set( + VALUES[:4], metadata=metadata) + result_set2 = self._make_partial_result_set(VALUES[4:]) iterator = _MockCancellableIterator(result_set1, result_set2) streamed = self._make_one(iterator) streamed.consume_all() @@ -689,37 +767,38 @@ def test___iter___empty(self): def test___iter___one_result_set_partial(self): FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), - self._makeScalarField('married', 'BOOL'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), ] - metadata = _ResultSetMetadataPB(FIELDS) + metadata = self._make_result_set_metadata(FIELDS) BARE = [u'Phred Phlyntstone', 42] - VALUES = [self._makeValue(bare) for bare in BARE] - result_set = _PartialResultSetPB(VALUES, metadata=metadata) + VALUES = [self._make_value(bare) for bare in BARE] + result_set = self._make_partial_result_set(VALUES, metadata=metadata) iterator = _MockCancellableIterator(result_set) streamed = self._make_one(iterator) found = list(streamed) self.assertEqual(found, []) self.assertEqual(streamed.rows, []) self.assertEqual(streamed._current_row, BARE) - self.assertTrue(streamed.metadata is metadata) + self.assertEqual(streamed.metadata, metadata) def test___iter___multiple_result_sets_filled(self): FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), - self._makeScalarField('married', 'BOOL'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), ] - metadata = _ResultSetMetadataPB(FIELDS) + metadata = self._make_result_set_metadata(FIELDS) BARE = [ u'Phred Phlyntstone', 42, True, u'Bharney Rhubble', 39, True, u'Wylma Phlyntstone', 41, True, ] - VALUES = [self._makeValue(bare) for bare in BARE] - result_set1 = _PartialResultSetPB(VALUES[:4], metadata=metadata) - result_set2 = _PartialResultSetPB(VALUES[4:]) + VALUES = [self._make_value(bare) for bare in BARE] + result_set1 = self._make_partial_result_set( + VALUES[:4], metadata=metadata) + result_set2 = self._make_partial_result_set(VALUES[4:]) iterator = _MockCancellableIterator(result_set1, result_set2) streamed = self._make_one(iterator) found = list(streamed) @@ -734,11 +813,11 @@ def test___iter___multiple_result_sets_filled(self): def test___iter___w_existing_rows_read(self): FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), - self._makeScalarField('married', 'BOOL'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), ] - metadata = _ResultSetMetadataPB(FIELDS) + metadata = self._make_result_set_metadata(FIELDS) ALREADY = [ [u'Pebbylz Phlyntstone', 4, False], [u'Dino Rhubble', 4, False], @@ -748,9 +827,10 @@ def test___iter___w_existing_rows_read(self): u'Bharney Rhubble', 39, True, u'Wylma Phlyntstone', 41, True, ] - VALUES = [self._makeValue(bare) for bare in BARE] - result_set1 = _PartialResultSetPB(VALUES[:4], metadata=metadata) - result_set2 = _PartialResultSetPB(VALUES[4:]) + VALUES = [self._make_value(bare) for bare in BARE] + result_set1 = self._make_partial_result_set( + VALUES[:4], metadata=metadata) + result_set2 = self._make_partial_result_set(VALUES[4:]) iterator = _MockCancellableIterator(result_set1, result_set2) streamed = self._make_one(iterator) streamed._rows[:] = ALREADY @@ -779,40 +859,6 @@ def __next__(self): # pragma: NO COVER Py3k return self.next() -class _ResultSetMetadataPB(object): - - def __init__(self, fields): - from google.cloud.proto.spanner.v1.type_pb2 import StructType - - self.row_type = StructType(fields=fields) - - -class _ResultSetStatsPB(object): - - def __init__(self, query_plan=None, **query_stats): - from google.protobuf.struct_pb2 import Struct - from google.cloud.spanner._helpers import _make_value_pb - - self.query_plan = query_plan - self.query_stats = Struct(fields={ - key: _make_value_pb(value) for key, value in query_stats.items()}) - - -class _PartialResultSetPB(object): - - resume_token = b'DEADBEEF' - - def __init__(self, values, metadata=None, stats=None, chunked_value=False): - self.values = values - self.metadata = metadata - self.stats = stats - self.chunked_value = chunked_value - - def HasField(self, name): - assert name == 'stats' - return self.stats is not None - - class TestStreamedResultSet_JSON_acceptance_tests(unittest.TestCase): _json_tests = None diff --git a/spanner/tests/unit/test_transaction.py b/spanner/tests/unit/test_transaction.py index bdb8d20b8f01..9bb36d1f5435 100644 --- a/spanner/tests/unit/test_transaction.py +++ b/spanner/tests/unit/test_transaction.py @@ -42,16 +42,19 @@ def _getTargetClass(self): return Transaction - def _make_one(self, *args, **kwargs): - return self._getTargetClass()(*args, **kwargs) + def _make_one(self, session, *args, **kwargs): + transaction = self._getTargetClass()(session, *args, **kwargs) + session._transaction = transaction + return transaction def test_ctor_defaults(self): session = _Session() transaction = self._make_one(session) - self.assertTrue(transaction._session is session) - self.assertIsNone(transaction._id) + self.assertIs(transaction._session, session) + self.assertIsNone(transaction._transaction_id) self.assertIsNone(transaction.committed) - self.assertEqual(transaction._rolled_back, False) + self.assertFalse(transaction._rolled_back) + self.assertTrue(transaction._multi_use) def test__check_state_not_begun(self): session = _Session() @@ -62,7 +65,7 @@ def test__check_state_not_begun(self): def test__check_state_already_committed(self): session = _Session() transaction = self._make_one(session) - transaction._id = b'DEADBEEF' + transaction._transaction_id = self.TRANSACTION_ID transaction.committed = object() with self.assertRaises(ValueError): transaction._check_state() @@ -70,7 +73,7 @@ def test__check_state_already_committed(self): def test__check_state_already_rolled_back(self): session = _Session() transaction = self._make_one(session) - transaction._id = b'DEADBEEF' + transaction._transaction_id = self.TRANSACTION_ID transaction._rolled_back = True with self.assertRaises(ValueError): transaction._check_state() @@ -78,20 +81,20 @@ def test__check_state_already_rolled_back(self): def test__check_state_ok(self): session = _Session() transaction = self._make_one(session) - transaction._id = b'DEADBEEF' + transaction._transaction_id = self.TRANSACTION_ID transaction._check_state() # does not raise def test__make_txn_selector(self): session = _Session() transaction = self._make_one(session) - transaction._id = self.TRANSACTION_ID + transaction._transaction_id = self.TRANSACTION_ID selector = transaction._make_txn_selector() self.assertEqual(selector.id, self.TRANSACTION_ID) def test_begin_already_begun(self): session = _Session() transaction = self._make_one(session) - transaction._id = self.TRANSACTION_ID + transaction._transaction_id = self.TRANSACTION_ID with self.assertRaises(ValueError): transaction.begin() @@ -141,7 +144,7 @@ def test_begin_ok(self): txn_id = transaction.begin() self.assertEqual(txn_id, self.TRANSACTION_ID) - self.assertEqual(transaction._id, self.TRANSACTION_ID) + self.assertEqual(transaction._transaction_id, self.TRANSACTION_ID) session_id, txn_options, options = api._begun self.assertEqual(session_id, session.name) @@ -158,7 +161,7 @@ def test_rollback_not_begun(self): def test_rollback_already_committed(self): session = _Session() transaction = self._make_one(session) - transaction._id = self.TRANSACTION_ID + transaction._transaction_id = self.TRANSACTION_ID transaction.committed = object() with self.assertRaises(ValueError): transaction.rollback() @@ -166,7 +169,7 @@ def test_rollback_already_committed(self): def test_rollback_already_rolled_back(self): session = _Session() transaction = self._make_one(session) - transaction._id = self.TRANSACTION_ID + transaction._transaction_id = self.TRANSACTION_ID transaction._rolled_back = True with self.assertRaises(ValueError): transaction.rollback() @@ -179,7 +182,7 @@ def test_rollback_w_gax_error(self): _random_gax_error=True) session = _Session(database) transaction = self._make_one(session) - transaction._id = self.TRANSACTION_ID + transaction._transaction_id = self.TRANSACTION_ID transaction.insert(TABLE_NAME, COLUMNS, VALUES) with self.assertRaises(GaxError): @@ -202,12 +205,13 @@ def test_rollback_ok(self): _rollback_response=empty_pb) session = _Session(database) transaction = self._make_one(session) - transaction._id = self.TRANSACTION_ID + transaction._transaction_id = self.TRANSACTION_ID transaction.replace(TABLE_NAME, COLUMNS, VALUES) transaction.rollback() self.assertTrue(transaction._rolled_back) + self.assertIsNone(session._transaction) session_id, txn_id, options = api._rolled_back self.assertEqual(session_id, session.name) @@ -224,7 +228,7 @@ def test_commit_not_begun(self): def test_commit_already_committed(self): session = _Session() transaction = self._make_one(session) - transaction._id = self.TRANSACTION_ID + transaction._transaction_id = self.TRANSACTION_ID transaction.committed = object() with self.assertRaises(ValueError): transaction.commit() @@ -232,7 +236,7 @@ def test_commit_already_committed(self): def test_commit_already_rolled_back(self): session = _Session() transaction = self._make_one(session) - transaction._id = self.TRANSACTION_ID + transaction._transaction_id = self.TRANSACTION_ID transaction._rolled_back = True with self.assertRaises(ValueError): transaction.commit() @@ -240,7 +244,7 @@ def test_commit_already_rolled_back(self): def test_commit_no_mutations(self): session = _Session() transaction = self._make_one(session) - transaction._id = self.TRANSACTION_ID + transaction._transaction_id = self.TRANSACTION_ID with self.assertRaises(ValueError): transaction.commit() @@ -252,7 +256,7 @@ def test_commit_w_gax_error(self): _random_gax_error=True) session = _Session(database) transaction = self._make_one(session) - transaction._id = self.TRANSACTION_ID + transaction._transaction_id = self.TRANSACTION_ID transaction.replace(TABLE_NAME, COLUMNS, VALUES) with self.assertRaises(GaxError): @@ -284,12 +288,13 @@ def test_commit_ok(self): _commit_response=response) session = _Session(database) transaction = self._make_one(session) - transaction._id = self.TRANSACTION_ID + transaction._transaction_id = self.TRANSACTION_ID transaction.delete(TABLE_NAME, keyset) transaction.commit() self.assertEqual(transaction.committed, now) + self.assertIsNone(session._transaction) session_id, mutations, txn_id, options = api._committed self.assertEqual(session_id, session.name) @@ -368,6 +373,8 @@ class _Database(object): class _Session(object): + _transaction = None + def __init__(self, database=None, name=TestTransaction.SESSION_NAME): self._database = database self.name = name diff --git a/speech/MANIFEST.in b/speech/MANIFEST.in index 9f7100c9528a..fc77f8c82ff0 100644 --- a/speech/MANIFEST.in +++ b/speech/MANIFEST.in @@ -1,4 +1,4 @@ include README.rst LICENSE recursive-include google *.json *.proto -recursive-include unit_tests * +recursive-include tests * global-exclude *.pyc __pycache__ diff --git a/speech/README.rst b/speech/README.rst index aeec14e484a3..150fc37bb590 100644 --- a/speech/README.rst +++ b/speech/README.rst @@ -9,7 +9,7 @@ Python Client for Google Cloud Speech - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/speech-usage.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/speech/usage.html Quick Start ----------- @@ -26,7 +26,7 @@ possible. Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html .. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication Using the API @@ -41,8 +41,8 @@ and receive a text transcription from the Cloud Speech API service. See the ``google-cloud-python`` API `speech documentation`_ to learn how to connect to the Google Cloud Speech API using this Client Library. -.. _speech documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/speech-usage.html +.. _speech documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/speech/usage.html .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-speech.svg - :target: https://pypi.python.org/pypi/google-cloud-speech + :target: https://pypi.org/project/google-cloud-speech/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-speech.svg - :target: https://pypi.python.org/pypi/google-cloud-speech + :target: https://pypi.org/project/google-cloud-speech/ diff --git a/speech/google/cloud/gapic/__init__.py b/speech/google/cloud/gapic/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/speech/google/cloud/gapic/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/speech/google/cloud/gapic/speech/__init__.py b/speech/google/cloud/gapic/speech/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/speech/google/cloud/gapic/speech/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/speech/google/cloud/gapic/speech/v1/__init__.py b/speech/google/cloud/gapic/speech/v1/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/speech/google/cloud/gapic/speech/v1/enums.py b/speech/google/cloud/gapic/speech/v1/enums.py new file mode 100644 index 000000000000..98379c7078a9 --- /dev/null +++ b/speech/google/cloud/gapic/speech/v1/enums.py @@ -0,0 +1,86 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Wrappers for protocol buffer enum types.""" + + +class RecognitionConfig(object): + class AudioEncoding(object): + """ + Audio encoding of the data sent in the audio message. All encodings support + only 1 channel (mono) audio. Only ``FLAC`` includes a header that describes + the bytes of audio that follow the header. The other encodings are raw + audio bytes with no header. + + For best results, the audio source should be captured and transmitted using + a lossless encoding (``FLAC`` or ``LINEAR16``). Recognition accuracy may be + reduced if lossy codecs, which include the other codecs listed in + this section, are used to capture or transmit the audio, particularly if + background noise is present. + + Attributes: + ENCODING_UNSPECIFIED (int): Not specified. Will return result ``google.rpc.Code.INVALID_ARGUMENT``. + LINEAR16 (int): Uncompressed 16-bit signed little-endian samples (Linear PCM). + FLAC (int): ```FLAC`` <https://xiph.org/flac/documentation.html>`_ (Free Lossless Audio + Codec) is the recommended encoding because it is + lossless--therefore recognition is not compromised--and + requires only about half the bandwidth of ``LINEAR16``. ``FLAC`` stream + encoding supports 16-bit and 24-bit samples, however, not all fields in + ``STREAMINFO`` are supported. + MULAW (int): 8-bit samples that compand 14-bit audio samples using G.711 PCMU/mu-law. + AMR (int): Adaptive Multi-Rate Narrowband codec. ``sample_rate_hertz`` must be 8000. + AMR_WB (int): Adaptive Multi-Rate Wideband codec. ``sample_rate_hertz`` must be 16000. + OGG_OPUS (int): Opus encoded audio frames in Ogg container + (`OggOpus <https://wiki.xiph.org/OggOpus>`_). + ``sample_rate_hertz`` must be 16000. + SPEEX_WITH_HEADER_BYTE (int): Although the use of lossy encodings is not recommended, if a very low + bitrate encoding is required, ``OGG_OPUS`` is highly preferred over + Speex encoding. The `Speex <https://speex.org/>`_ encoding supported by + Cloud Speech API has a header byte in each block, as in MIME type + ``audio/x-speex-with-header-byte``. + It is a variant of the RTP Speex encoding defined in + `RFC 5574 <https://tools.ietf.org/html/rfc5574>`_. + The stream is a sequence of blocks, one block per RTP packet. Each block + starts with a byte containing the length of the block, in bytes, followed + by one or more frames of Speex data, padded to an integral number of + bytes (octets) as specified in RFC 5574. In other words, each RTP header + is replaced with a single byte containing the block length. Only Speex + wideband is supported. ``sample_rate_hertz`` must be 16000. + """ + ENCODING_UNSPECIFIED = 0 + LINEAR16 = 1 + FLAC = 2 + MULAW = 3 + AMR = 4 + AMR_WB = 5 + OGG_OPUS = 6 + SPEEX_WITH_HEADER_BYTE = 7 + + +class StreamingRecognizeResponse(object): + class SpeechEventType(object): + """ + Indicates the type of speech event. + + Attributes: + SPEECH_EVENT_UNSPECIFIED (int): No speech event specified. + END_OF_SINGLE_UTTERANCE (int): This event indicates that the server has detected the end of the user's + speech utterance and expects no additional speech. Therefore, the server + will not process additional audio (although it may subsequently return + additional results). The client should stop sending additional audio + data, half-close the gRPC connection, and wait for any additional results + until the server closes the gRPC connection. This event is only sent if + ``single_utterance`` was set to ``true``, and is not used otherwise. + """ + SPEECH_EVENT_UNSPECIFIED = 0 + END_OF_SINGLE_UTTERANCE = 1 diff --git a/speech/google/cloud/gapic/speech/v1/speech_client.py b/speech/google/cloud/gapic/speech/v1/speech_client.py new file mode 100644 index 000000000000..3806330b25bb --- /dev/null +++ b/speech/google/cloud/gapic/speech/v1/speech_client.py @@ -0,0 +1,285 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# EDITING INSTRUCTIONS +# This file was generated from the file +# https://github.com/google/googleapis/blob/master/google/cloud/speech/v1/cloud_speech.proto, +# and updates to that file get reflected here through a refresh process. +# For the short term, the refresh process will only be runnable by Google engineers. +# +# The only allowed edits are to method and file documentation. A 3-way +# merge preserves those additions if the generated source changes. +"""Accesses the google.cloud.speech.v1 Speech API.""" + +import collections +import json +import os +import pkg_resources +import platform + +from google.gapic.longrunning import operations_client +from google.gax import api_callable +from google.gax import config +from google.gax import path_template +from google.gax.utils import oneof +import google.gax + +from google.cloud.gapic.speech.v1 import enums +from google.cloud.proto.speech.v1 import cloud_speech_pb2 + + +class SpeechClient(object): + """Service that implements Google Cloud Speech API.""" + + SERVICE_ADDRESS = 'speech.googleapis.com' + """The default address of the service.""" + + DEFAULT_SERVICE_PORT = 443 + """The default port of the service.""" + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _ALL_SCOPES = ('https://www.googleapis.com/auth/cloud-platform', ) + + def __init__(self, + service_path=SERVICE_ADDRESS, + port=DEFAULT_SERVICE_PORT, + channel=None, + credentials=None, + ssl_credentials=None, + scopes=None, + client_config=None, + app_name=None, + app_version='', + lib_name=None, + lib_version='', + metrics_headers=()): + """Constructor. + + Args: + service_path (string): The domain name of the API remote host. + port (int): The port on which to connect to the remote host. + channel (:class:`grpc.Channel`): A ``Channel`` instance through + which to make calls. + credentials (object): The authorization credentials to attach to + requests. These credentials identify this application to the + service. + ssl_credentials (:class:`grpc.ChannelCredentials`): A + ``ChannelCredentials`` instance for use with an SSL-enabled + channel. + scopes (list[string]): A list of OAuth2 scopes to attach to requests. + client_config (dict): + A dictionary for call options for each method. See + :func:`google.gax.construct_settings` for the structure of + this data. Falls back to the default config if not specified + or the specified config is missing data points. + app_name (string): The name of the application calling + the service. Recommended for analytics purposes. + app_version (string): The version of the application calling + the service. Recommended for analytics purposes. + lib_name (string): The API library software used for calling + the service. (Unless you are writing an API client itself, + leave this as default.) + lib_version (string): The API library software version used + for calling the service. (Unless you are writing an API client + itself, leave this as default.) + metrics_headers (dict): A dictionary of values for tracking + client library metrics. Ultimately serializes to a string + (e.g. 'foo/1.2.3 bar/3.14.1'). This argument should be + considered private. + + Returns: + A SpeechClient object. + """ + # Unless the calling application specifically requested + # OAuth scopes, request everything. + if scopes is None: + scopes = self._ALL_SCOPES + + # Initialize an empty client config, if none is set. + if client_config is None: + client_config = {} + + # Initialize metrics_headers as an ordered dictionary + # (cuts down on cardinality of the resulting string slightly). + metrics_headers = collections.OrderedDict(metrics_headers) + metrics_headers['gl-python'] = platform.python_version() + + # The library may or may not be set, depending on what is + # calling this client. Newer client libraries set the library name + # and version. + if lib_name: + metrics_headers[lib_name] = lib_version + + # Finally, track the GAPIC package version. + metrics_headers['gapic'] = pkg_resources.get_distribution( + 'google-cloud-speech', ).version + + # Load the configuration defaults. + default_client_config = json.loads( + pkg_resources.resource_string( + __name__, 'speech_client_config.json').decode()) + defaults = api_callable.construct_settings( + 'google.cloud.speech.v1.Speech', + default_client_config, + client_config, + config.STATUS_CODE_NAMES, + metrics_headers=metrics_headers, ) + self.speech_stub = config.create_stub( + cloud_speech_pb2.SpeechStub, + channel=channel, + service_path=service_path, + service_port=port, + credentials=credentials, + scopes=scopes, + ssl_credentials=ssl_credentials) + + self.operations_client = operations_client.OperationsClient( + service_path=service_path, + port=port, + channel=channel, + credentials=credentials, + ssl_credentials=ssl_credentials, + scopes=scopes, + client_config=client_config, + metrics_headers=metrics_headers, ) + + self._recognize = api_callable.create_api_call( + self.speech_stub.Recognize, settings=defaults['recognize']) + self._long_running_recognize = api_callable.create_api_call( + self.speech_stub.LongRunningRecognize, + settings=defaults['long_running_recognize']) + self._streaming_recognize = api_callable.create_api_call( + self.speech_stub.StreamingRecognize, + settings=defaults['streaming_recognize']) + + # Service calls + def recognize(self, config, audio, options=None): + """ + Performs synchronous speech recognition: receive results after all audio + has been sent and processed. + + Example: + >>> from google.cloud.gapic.speech.v1 import speech_client + >>> from google.cloud.gapic.speech.v1 import enums + >>> from google.cloud.proto.speech.v1 import cloud_speech_pb2 + >>> client = speech_client.SpeechClient() + >>> encoding = enums.RecognitionConfig.AudioEncoding.FLAC + >>> sample_rate_hertz = 44100 + >>> language_code = 'en-US' + >>> config = cloud_speech_pb2.RecognitionConfig(encoding=encoding, sample_rate_hertz=sample_rate_hertz, language_code=language_code) + >>> uri = 'gs://bucket_name/file_name.flac' + >>> audio = cloud_speech_pb2.RecognitionAudio(uri=uri) + >>> response = client.recognize(config, audio) + + Args: + config (:class:`google.cloud.proto.speech.v1.cloud_speech_pb2.RecognitionConfig`): *Required* Provides information to the recognizer that specifies how to + process the request. + audio (:class:`google.cloud.proto.speech.v1.cloud_speech_pb2.RecognitionAudio`): *Required* The audio data to be recognized. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.speech.v1.cloud_speech_pb2.RecognizeResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = cloud_speech_pb2.RecognizeRequest(config=config, audio=audio) + return self._recognize(request, options) + + def long_running_recognize(self, config, audio, options=None): + """ + Performs asynchronous speech recognition: receive results via the + google.longrunning.Operations interface. Returns either an + ``Operation.error`` or an ``Operation.response`` which contains + a ``LongRunningRecognizeResponse`` message. + + Example: + >>> from google.cloud.gapic.speech.v1 import speech_client + >>> from google.cloud.gapic.speech.v1 import enums + >>> from google.cloud.proto.speech.v1 import cloud_speech_pb2 + >>> client = speech_client.SpeechClient() + >>> encoding = enums.RecognitionConfig.AudioEncoding.FLAC + >>> sample_rate_hertz = 44100 + >>> language_code = 'en-US' + >>> config = cloud_speech_pb2.RecognitionConfig(encoding=encoding, sample_rate_hertz=sample_rate_hertz, language_code=language_code) + >>> uri = 'gs://bucket_name/file_name.flac' + >>> audio = cloud_speech_pb2.RecognitionAudio(uri=uri) + >>> response = client.long_running_recognize(config, audio) + >>> + >>> def callback(operation_future): + >>> # Handle result. + >>> result = operation_future.result() + >>> + >>> response.add_done_callback(callback) + >>> + >>> # Handle metadata. + >>> metadata = response.metadata() + + Args: + config (:class:`google.cloud.proto.speech.v1.cloud_speech_pb2.RecognitionConfig`): *Required* Provides information to the recognizer that specifies how to + process the request. + audio (:class:`google.cloud.proto.speech.v1.cloud_speech_pb2.RecognitionAudio`): *Required* The audio data to be recognized. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.gax._OperationFuture` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = cloud_speech_pb2.LongRunningRecognizeRequest( + config=config, audio=audio) + return google.gax._OperationFuture( + self._long_running_recognize(request, + options), self.operations_client, + cloud_speech_pb2.LongRunningRecognizeResponse, + cloud_speech_pb2.LongRunningRecognizeMetadata, options) + + def streaming_recognize(self, requests, options=None): + """ + Performs bidirectional streaming speech recognition: receive results while + sending audio. This method is only available via the gRPC API (not REST). + + EXPERIMENTAL: This method interface might change in the future. + + Example: + >>> from google.cloud.gapic.speech.v1 import speech_client + >>> from google.cloud.proto.speech.v1 import cloud_speech_pb2 + >>> client = speech_client.SpeechClient() + >>> request = cloud_speech_pb2.StreamingRecognizeRequest() + >>> requests = [request] + >>> for element in client.streaming_recognize(requests): + >>> # process element + >>> pass + + Args: + requests (iterator[:class:`google.cloud.proto.speech.v1.cloud_speech_pb2.StreamingRecognizeRequest`]): The input objects. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + iterator[:class:`google.cloud.proto.speech.v1.cloud_speech_pb2.StreamingRecognizeResponse`]. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + return self._streaming_recognize(requests, options) diff --git a/speech/google/cloud/gapic/speech/v1/speech_client_config.json b/speech/google/cloud/gapic/speech/v1/speech_client_config.json new file mode 100644 index 000000000000..4edd15ce865b --- /dev/null +++ b/speech/google/cloud/gapic/speech/v1/speech_client_config.json @@ -0,0 +1,43 @@ +{ + "interfaces": { + "google.cloud.speech.v1.Speech": { + "retry_codes": { + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "non_idempotent": [ + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 190000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 190000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "Recognize": { + "timeout_millis": 190000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "LongRunningRecognize": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "StreamingRecognize": { + "timeout_millis": 190000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/speech/google/cloud/proto/__init__.py b/speech/google/cloud/proto/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/speech/google/cloud/proto/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/speech/google/cloud/proto/speech/__init__.py b/speech/google/cloud/proto/speech/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/speech/google/cloud/proto/speech/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/speech/google/cloud/proto/speech/v1/__init__.py b/speech/google/cloud/proto/speech/v1/__init__.py new file mode 100644 index 000000000000..8b137891791f --- /dev/null +++ b/speech/google/cloud/proto/speech/v1/__init__.py @@ -0,0 +1 @@ + diff --git a/speech/google/cloud/proto/speech/v1/cloud_speech_pb2.py b/speech/google/cloud/proto/speech/v1/cloud_speech_pb2.py new file mode 100644 index 000000000000..29d73064b556 --- /dev/null +++ b/speech/google/cloud/proto/speech/v1/cloud_speech_pb2.py @@ -0,0 +1,1331 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/proto/speech/v1/cloud_speech.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.longrunning import operations_pb2 as google_dot_longrunning_dot_operations__pb2 +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/cloud/proto/speech/v1/cloud_speech.proto', + package='google.cloud.speech.v1', + syntax='proto3', + serialized_pb=_b('\n/google/cloud/proto/speech/v1/cloud_speech.proto\x12\x16google.cloud.speech.v1\x1a\x1cgoogle/api/annotations.proto\x1a#google/longrunning/operations.proto\x1a\x19google/protobuf/any.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\"\x86\x01\n\x10RecognizeRequest\x12\x39\n\x06\x63onfig\x18\x01 \x01(\x0b\x32).google.cloud.speech.v1.RecognitionConfig\x12\x37\n\x05\x61udio\x18\x02 \x01(\x0b\x32(.google.cloud.speech.v1.RecognitionAudio\"\x91\x01\n\x1bLongRunningRecognizeRequest\x12\x39\n\x06\x63onfig\x18\x01 \x01(\x0b\x32).google.cloud.speech.v1.RecognitionConfig\x12\x37\n\x05\x61udio\x18\x02 \x01(\x0b\x32(.google.cloud.speech.v1.RecognitionAudio\"\x99\x01\n\x19StreamingRecognizeRequest\x12N\n\x10streaming_config\x18\x01 \x01(\x0b\x32\x32.google.cloud.speech.v1.StreamingRecognitionConfigH\x00\x12\x17\n\raudio_content\x18\x02 \x01(\x0cH\x00\x42\x13\n\x11streaming_request\"\x8a\x01\n\x1aStreamingRecognitionConfig\x12\x39\n\x06\x63onfig\x18\x01 \x01(\x0b\x32).google.cloud.speech.v1.RecognitionConfig\x12\x18\n\x10single_utterance\x18\x02 \x01(\x08\x12\x17\n\x0finterim_results\x18\x03 \x01(\x08\"\x92\x03\n\x11RecognitionConfig\x12I\n\x08\x65ncoding\x18\x01 \x01(\x0e\x32\x37.google.cloud.speech.v1.RecognitionConfig.AudioEncoding\x12\x19\n\x11sample_rate_hertz\x18\x02 \x01(\x05\x12\x15\n\rlanguage_code\x18\x03 \x01(\t\x12\x18\n\x10max_alternatives\x18\x04 \x01(\x05\x12\x18\n\x10profanity_filter\x18\x05 \x01(\x08\x12>\n\x0fspeech_contexts\x18\x06 \x03(\x0b\x32%.google.cloud.speech.v1.SpeechContext\"\x8b\x01\n\rAudioEncoding\x12\x18\n\x14\x45NCODING_UNSPECIFIED\x10\x00\x12\x0c\n\x08LINEAR16\x10\x01\x12\x08\n\x04\x46LAC\x10\x02\x12\t\n\x05MULAW\x10\x03\x12\x07\n\x03\x41MR\x10\x04\x12\n\n\x06\x41MR_WB\x10\x05\x12\x0c\n\x08OGG_OPUS\x10\x06\x12\x1a\n\x16SPEEX_WITH_HEADER_BYTE\x10\x07\" \n\rSpeechContext\x12\x0f\n\x07phrases\x18\x01 \x03(\t\"D\n\x10RecognitionAudio\x12\x11\n\x07\x63ontent\x18\x01 \x01(\x0cH\x00\x12\r\n\x03uri\x18\x02 \x01(\tH\x00\x42\x0e\n\x0c\x61udio_source\"U\n\x11RecognizeResponse\x12@\n\x07results\x18\x02 \x03(\x0b\x32/.google.cloud.speech.v1.SpeechRecognitionResult\"`\n\x1cLongRunningRecognizeResponse\x12@\n\x07results\x18\x02 \x03(\x0b\x32/.google.cloud.speech.v1.SpeechRecognitionResult\"\x9e\x01\n\x1cLongRunningRecognizeMetadata\x12\x18\n\x10progress_percent\x18\x01 \x01(\x05\x12.\n\nstart_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x34\n\x10last_update_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\xb1\x02\n\x1aStreamingRecognizeResponse\x12!\n\x05\x65rror\x18\x01 \x01(\x0b\x32\x12.google.rpc.Status\x12\x43\n\x07results\x18\x02 \x03(\x0b\x32\x32.google.cloud.speech.v1.StreamingRecognitionResult\x12]\n\x11speech_event_type\x18\x04 \x01(\x0e\x32\x42.google.cloud.speech.v1.StreamingRecognizeResponse.SpeechEventType\"L\n\x0fSpeechEventType\x12\x1c\n\x18SPEECH_EVENT_UNSPECIFIED\x10\x00\x12\x1b\n\x17\x45ND_OF_SINGLE_UTTERANCE\x10\x01\"\x8d\x01\n\x1aStreamingRecognitionResult\x12J\n\x0c\x61lternatives\x18\x01 \x03(\x0b\x32\x34.google.cloud.speech.v1.SpeechRecognitionAlternative\x12\x10\n\x08is_final\x18\x02 \x01(\x08\x12\x11\n\tstability\x18\x03 \x01(\x02\"e\n\x17SpeechRecognitionResult\x12J\n\x0c\x61lternatives\x18\x01 \x03(\x0b\x32\x34.google.cloud.speech.v1.SpeechRecognitionAlternative\"F\n\x1cSpeechRecognitionAlternative\x12\x12\n\ntranscript\x18\x01 \x01(\t\x12\x12\n\nconfidence\x18\x02 \x01(\x02\x32\xa6\x03\n\x06Speech\x12\x81\x01\n\tRecognize\x12(.google.cloud.speech.v1.RecognizeRequest\x1a).google.cloud.speech.v1.RecognizeResponse\"\x1f\x82\xd3\xe4\x93\x02\x19\"\x14/v1/speech:recognize:\x01*\x12\x96\x01\n\x14LongRunningRecognize\x12\x33.google.cloud.speech.v1.LongRunningRecognizeRequest\x1a\x1d.google.longrunning.Operation\"*\x82\xd3\xe4\x93\x02$\"\x1f/v1/speech:longrunningrecognize:\x01*\x12\x7f\n\x12StreamingRecognize\x12\x31.google.cloud.speech.v1.StreamingRecognizeRequest\x1a\x32.google.cloud.speech.v1.StreamingRecognizeResponse(\x01\x30\x01\x42i\n\x1a\x63om.google.cloud.speech.v1B\x0bSpeechProtoP\x01Z<google.golang.org/genproto/googleapis/cloud/speech/v1;speechb\x06proto3') + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_longrunning_dot_operations__pb2.DESCRIPTOR,google_dot_protobuf_dot_any__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,google_dot_rpc_dot_status__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + +_RECOGNITIONCONFIG_AUDIOENCODING = _descriptor.EnumDescriptor( + name='AudioEncoding', + full_name='google.cloud.speech.v1.RecognitionConfig.AudioEncoding', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='ENCODING_UNSPECIFIED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='LINEAR16', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FLAC', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MULAW', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AMR', index=4, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AMR_WB', index=5, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='OGG_OPUS', index=6, number=6, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SPEEX_WITH_HEADER_BYTE', index=7, number=7, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=1105, + serialized_end=1244, +) +_sym_db.RegisterEnumDescriptor(_RECOGNITIONCONFIG_AUDIOENCODING) + +_STREAMINGRECOGNIZERESPONSE_SPEECHEVENTTYPE = _descriptor.EnumDescriptor( + name='SpeechEventType', + full_name='google.cloud.speech.v1.StreamingRecognizeResponse.SpeechEventType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='SPEECH_EVENT_UNSPECIFIED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='END_OF_SINGLE_UTTERANCE', index=1, number=1, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=1926, + serialized_end=2002, +) +_sym_db.RegisterEnumDescriptor(_STREAMINGRECOGNIZERESPONSE_SPEECHEVENTTYPE) + + +_RECOGNIZEREQUEST = _descriptor.Descriptor( + name='RecognizeRequest', + full_name='google.cloud.speech.v1.RecognizeRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='config', full_name='google.cloud.speech.v1.RecognizeRequest.config', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='audio', full_name='google.cloud.speech.v1.RecognizeRequest.audio', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=260, + serialized_end=394, +) + + +_LONGRUNNINGRECOGNIZEREQUEST = _descriptor.Descriptor( + name='LongRunningRecognizeRequest', + full_name='google.cloud.speech.v1.LongRunningRecognizeRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='config', full_name='google.cloud.speech.v1.LongRunningRecognizeRequest.config', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='audio', full_name='google.cloud.speech.v1.LongRunningRecognizeRequest.audio', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=397, + serialized_end=542, +) + + +_STREAMINGRECOGNIZEREQUEST = _descriptor.Descriptor( + name='StreamingRecognizeRequest', + full_name='google.cloud.speech.v1.StreamingRecognizeRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='streaming_config', full_name='google.cloud.speech.v1.StreamingRecognizeRequest.streaming_config', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='audio_content', full_name='google.cloud.speech.v1.StreamingRecognizeRequest.audio_content', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='streaming_request', full_name='google.cloud.speech.v1.StreamingRecognizeRequest.streaming_request', + index=0, containing_type=None, fields=[]), + ], + serialized_start=545, + serialized_end=698, +) + + +_STREAMINGRECOGNITIONCONFIG = _descriptor.Descriptor( + name='StreamingRecognitionConfig', + full_name='google.cloud.speech.v1.StreamingRecognitionConfig', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='config', full_name='google.cloud.speech.v1.StreamingRecognitionConfig.config', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='single_utterance', full_name='google.cloud.speech.v1.StreamingRecognitionConfig.single_utterance', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='interim_results', full_name='google.cloud.speech.v1.StreamingRecognitionConfig.interim_results', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=701, + serialized_end=839, +) + + +_RECOGNITIONCONFIG = _descriptor.Descriptor( + name='RecognitionConfig', + full_name='google.cloud.speech.v1.RecognitionConfig', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='encoding', full_name='google.cloud.speech.v1.RecognitionConfig.encoding', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sample_rate_hertz', full_name='google.cloud.speech.v1.RecognitionConfig.sample_rate_hertz', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='language_code', full_name='google.cloud.speech.v1.RecognitionConfig.language_code', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='max_alternatives', full_name='google.cloud.speech.v1.RecognitionConfig.max_alternatives', index=3, + number=4, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='profanity_filter', full_name='google.cloud.speech.v1.RecognitionConfig.profanity_filter', index=4, + number=5, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='speech_contexts', full_name='google.cloud.speech.v1.RecognitionConfig.speech_contexts', index=5, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _RECOGNITIONCONFIG_AUDIOENCODING, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=842, + serialized_end=1244, +) + + +_SPEECHCONTEXT = _descriptor.Descriptor( + name='SpeechContext', + full_name='google.cloud.speech.v1.SpeechContext', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='phrases', full_name='google.cloud.speech.v1.SpeechContext.phrases', index=0, + number=1, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1246, + serialized_end=1278, +) + + +_RECOGNITIONAUDIO = _descriptor.Descriptor( + name='RecognitionAudio', + full_name='google.cloud.speech.v1.RecognitionAudio', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='content', full_name='google.cloud.speech.v1.RecognitionAudio.content', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='uri', full_name='google.cloud.speech.v1.RecognitionAudio.uri', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='audio_source', full_name='google.cloud.speech.v1.RecognitionAudio.audio_source', + index=0, containing_type=None, fields=[]), + ], + serialized_start=1280, + serialized_end=1348, +) + + +_RECOGNIZERESPONSE = _descriptor.Descriptor( + name='RecognizeResponse', + full_name='google.cloud.speech.v1.RecognizeResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='results', full_name='google.cloud.speech.v1.RecognizeResponse.results', index=0, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1350, + serialized_end=1435, +) + + +_LONGRUNNINGRECOGNIZERESPONSE = _descriptor.Descriptor( + name='LongRunningRecognizeResponse', + full_name='google.cloud.speech.v1.LongRunningRecognizeResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='results', full_name='google.cloud.speech.v1.LongRunningRecognizeResponse.results', index=0, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1437, + serialized_end=1533, +) + + +_LONGRUNNINGRECOGNIZEMETADATA = _descriptor.Descriptor( + name='LongRunningRecognizeMetadata', + full_name='google.cloud.speech.v1.LongRunningRecognizeMetadata', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='progress_percent', full_name='google.cloud.speech.v1.LongRunningRecognizeMetadata.progress_percent', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='start_time', full_name='google.cloud.speech.v1.LongRunningRecognizeMetadata.start_time', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='last_update_time', full_name='google.cloud.speech.v1.LongRunningRecognizeMetadata.last_update_time', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1536, + serialized_end=1694, +) + + +_STREAMINGRECOGNIZERESPONSE = _descriptor.Descriptor( + name='StreamingRecognizeResponse', + full_name='google.cloud.speech.v1.StreamingRecognizeResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='error', full_name='google.cloud.speech.v1.StreamingRecognizeResponse.error', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='results', full_name='google.cloud.speech.v1.StreamingRecognizeResponse.results', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='speech_event_type', full_name='google.cloud.speech.v1.StreamingRecognizeResponse.speech_event_type', index=2, + number=4, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _STREAMINGRECOGNIZERESPONSE_SPEECHEVENTTYPE, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1697, + serialized_end=2002, +) + + +_STREAMINGRECOGNITIONRESULT = _descriptor.Descriptor( + name='StreamingRecognitionResult', + full_name='google.cloud.speech.v1.StreamingRecognitionResult', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='alternatives', full_name='google.cloud.speech.v1.StreamingRecognitionResult.alternatives', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='is_final', full_name='google.cloud.speech.v1.StreamingRecognitionResult.is_final', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='stability', full_name='google.cloud.speech.v1.StreamingRecognitionResult.stability', index=2, + number=3, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2005, + serialized_end=2146, +) + + +_SPEECHRECOGNITIONRESULT = _descriptor.Descriptor( + name='SpeechRecognitionResult', + full_name='google.cloud.speech.v1.SpeechRecognitionResult', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='alternatives', full_name='google.cloud.speech.v1.SpeechRecognitionResult.alternatives', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2148, + serialized_end=2249, +) + + +_SPEECHRECOGNITIONALTERNATIVE = _descriptor.Descriptor( + name='SpeechRecognitionAlternative', + full_name='google.cloud.speech.v1.SpeechRecognitionAlternative', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='transcript', full_name='google.cloud.speech.v1.SpeechRecognitionAlternative.transcript', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='confidence', full_name='google.cloud.speech.v1.SpeechRecognitionAlternative.confidence', index=1, + number=2, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2251, + serialized_end=2321, +) + +_RECOGNIZEREQUEST.fields_by_name['config'].message_type = _RECOGNITIONCONFIG +_RECOGNIZEREQUEST.fields_by_name['audio'].message_type = _RECOGNITIONAUDIO +_LONGRUNNINGRECOGNIZEREQUEST.fields_by_name['config'].message_type = _RECOGNITIONCONFIG +_LONGRUNNINGRECOGNIZEREQUEST.fields_by_name['audio'].message_type = _RECOGNITIONAUDIO +_STREAMINGRECOGNIZEREQUEST.fields_by_name['streaming_config'].message_type = _STREAMINGRECOGNITIONCONFIG +_STREAMINGRECOGNIZEREQUEST.oneofs_by_name['streaming_request'].fields.append( + _STREAMINGRECOGNIZEREQUEST.fields_by_name['streaming_config']) +_STREAMINGRECOGNIZEREQUEST.fields_by_name['streaming_config'].containing_oneof = _STREAMINGRECOGNIZEREQUEST.oneofs_by_name['streaming_request'] +_STREAMINGRECOGNIZEREQUEST.oneofs_by_name['streaming_request'].fields.append( + _STREAMINGRECOGNIZEREQUEST.fields_by_name['audio_content']) +_STREAMINGRECOGNIZEREQUEST.fields_by_name['audio_content'].containing_oneof = _STREAMINGRECOGNIZEREQUEST.oneofs_by_name['streaming_request'] +_STREAMINGRECOGNITIONCONFIG.fields_by_name['config'].message_type = _RECOGNITIONCONFIG +_RECOGNITIONCONFIG.fields_by_name['encoding'].enum_type = _RECOGNITIONCONFIG_AUDIOENCODING +_RECOGNITIONCONFIG.fields_by_name['speech_contexts'].message_type = _SPEECHCONTEXT +_RECOGNITIONCONFIG_AUDIOENCODING.containing_type = _RECOGNITIONCONFIG +_RECOGNITIONAUDIO.oneofs_by_name['audio_source'].fields.append( + _RECOGNITIONAUDIO.fields_by_name['content']) +_RECOGNITIONAUDIO.fields_by_name['content'].containing_oneof = _RECOGNITIONAUDIO.oneofs_by_name['audio_source'] +_RECOGNITIONAUDIO.oneofs_by_name['audio_source'].fields.append( + _RECOGNITIONAUDIO.fields_by_name['uri']) +_RECOGNITIONAUDIO.fields_by_name['uri'].containing_oneof = _RECOGNITIONAUDIO.oneofs_by_name['audio_source'] +_RECOGNIZERESPONSE.fields_by_name['results'].message_type = _SPEECHRECOGNITIONRESULT +_LONGRUNNINGRECOGNIZERESPONSE.fields_by_name['results'].message_type = _SPEECHRECOGNITIONRESULT +_LONGRUNNINGRECOGNIZEMETADATA.fields_by_name['start_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LONGRUNNINGRECOGNIZEMETADATA.fields_by_name['last_update_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_STREAMINGRECOGNIZERESPONSE.fields_by_name['error'].message_type = google_dot_rpc_dot_status__pb2._STATUS +_STREAMINGRECOGNIZERESPONSE.fields_by_name['results'].message_type = _STREAMINGRECOGNITIONRESULT +_STREAMINGRECOGNIZERESPONSE.fields_by_name['speech_event_type'].enum_type = _STREAMINGRECOGNIZERESPONSE_SPEECHEVENTTYPE +_STREAMINGRECOGNIZERESPONSE_SPEECHEVENTTYPE.containing_type = _STREAMINGRECOGNIZERESPONSE +_STREAMINGRECOGNITIONRESULT.fields_by_name['alternatives'].message_type = _SPEECHRECOGNITIONALTERNATIVE +_SPEECHRECOGNITIONRESULT.fields_by_name['alternatives'].message_type = _SPEECHRECOGNITIONALTERNATIVE +DESCRIPTOR.message_types_by_name['RecognizeRequest'] = _RECOGNIZEREQUEST +DESCRIPTOR.message_types_by_name['LongRunningRecognizeRequest'] = _LONGRUNNINGRECOGNIZEREQUEST +DESCRIPTOR.message_types_by_name['StreamingRecognizeRequest'] = _STREAMINGRECOGNIZEREQUEST +DESCRIPTOR.message_types_by_name['StreamingRecognitionConfig'] = _STREAMINGRECOGNITIONCONFIG +DESCRIPTOR.message_types_by_name['RecognitionConfig'] = _RECOGNITIONCONFIG +DESCRIPTOR.message_types_by_name['SpeechContext'] = _SPEECHCONTEXT +DESCRIPTOR.message_types_by_name['RecognitionAudio'] = _RECOGNITIONAUDIO +DESCRIPTOR.message_types_by_name['RecognizeResponse'] = _RECOGNIZERESPONSE +DESCRIPTOR.message_types_by_name['LongRunningRecognizeResponse'] = _LONGRUNNINGRECOGNIZERESPONSE +DESCRIPTOR.message_types_by_name['LongRunningRecognizeMetadata'] = _LONGRUNNINGRECOGNIZEMETADATA +DESCRIPTOR.message_types_by_name['StreamingRecognizeResponse'] = _STREAMINGRECOGNIZERESPONSE +DESCRIPTOR.message_types_by_name['StreamingRecognitionResult'] = _STREAMINGRECOGNITIONRESULT +DESCRIPTOR.message_types_by_name['SpeechRecognitionResult'] = _SPEECHRECOGNITIONRESULT +DESCRIPTOR.message_types_by_name['SpeechRecognitionAlternative'] = _SPEECHRECOGNITIONALTERNATIVE + +RecognizeRequest = _reflection.GeneratedProtocolMessageType('RecognizeRequest', (_message.Message,), dict( + DESCRIPTOR = _RECOGNIZEREQUEST, + __module__ = 'google.cloud.proto.speech.v1.cloud_speech_pb2' + , + __doc__ = """The top-level message sent by the client for the ``Recognize`` method. + + + Attributes: + config: + *Required* Provides information to the recognizer that + specifies how to process the request. + audio: + *Required* The audio data to be recognized. + """, + # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.RecognizeRequest) + )) +_sym_db.RegisterMessage(RecognizeRequest) + +LongRunningRecognizeRequest = _reflection.GeneratedProtocolMessageType('LongRunningRecognizeRequest', (_message.Message,), dict( + DESCRIPTOR = _LONGRUNNINGRECOGNIZEREQUEST, + __module__ = 'google.cloud.proto.speech.v1.cloud_speech_pb2' + , + __doc__ = """The top-level message sent by the client for the + ``LongRunningRecognize`` method. + + + Attributes: + config: + *Required* Provides information to the recognizer that + specifies how to process the request. + audio: + *Required* The audio data to be recognized. + """, + # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.LongRunningRecognizeRequest) + )) +_sym_db.RegisterMessage(LongRunningRecognizeRequest) + +StreamingRecognizeRequest = _reflection.GeneratedProtocolMessageType('StreamingRecognizeRequest', (_message.Message,), dict( + DESCRIPTOR = _STREAMINGRECOGNIZEREQUEST, + __module__ = 'google.cloud.proto.speech.v1.cloud_speech_pb2' + , + __doc__ = """The top-level message sent by the client for the ``StreamingRecognize`` + method. Multiple ``StreamingRecognizeRequest`` messages are sent. The + first message must contain a ``streaming_config`` message and must not + contain ``audio`` data. All subsequent messages must contain ``audio`` + data and must not contain a ``streaming_config`` message. + + + Attributes: + streaming_config: + Provides information to the recognizer that specifies how to + process the request. The first ``StreamingRecognizeRequest`` + message must contain a ``streaming_config`` message. + audio_content: + The audio data to be recognized. Sequential chunks of audio + data are sent in sequential ``StreamingRecognizeRequest`` + messages. The first ``StreamingRecognizeRequest`` message must + not contain ``audio_content`` data and all subsequent + ``StreamingRecognizeRequest`` messages must contain + ``audio_content`` data. The audio bytes must be encoded as + specified in ``RecognitionConfig``. Note: as with all bytes + fields, protobuffers use a pure binary representation (not + base64). See `audio limits + <https://cloud.google.com/speech/limits#content>`__. + """, + # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.StreamingRecognizeRequest) + )) +_sym_db.RegisterMessage(StreamingRecognizeRequest) + +StreamingRecognitionConfig = _reflection.GeneratedProtocolMessageType('StreamingRecognitionConfig', (_message.Message,), dict( + DESCRIPTOR = _STREAMINGRECOGNITIONCONFIG, + __module__ = 'google.cloud.proto.speech.v1.cloud_speech_pb2' + , + __doc__ = """Provides information to the recognizer that specifies how to process the + request. + + + Attributes: + config: + *Required* Provides information to the recognizer that + specifies how to process the request. + single_utterance: + *Optional* If ``false`` or omitted, the recognizer will + perform continuous recognition (continuing to wait for and + process audio even if the user pauses speaking) until the + client closes the input stream (gRPC API) or until the maximum + time limit has been reached. May return multiple + ``StreamingRecognitionResult``\ s with the ``is_final`` flag + set to ``true``. If ``true``, the recognizer will detect a + single spoken utterance. When it detects that the user has + paused or stopped speaking, it will return an + ``END_OF_SINGLE_UTTERANCE`` event and cease recognition. It + will return no more than one ``StreamingRecognitionResult`` + with the ``is_final`` flag set to ``true``. + interim_results: + *Optional* If ``true``, interim results (tentative hypotheses) + may be returned as they become available (these interim + results are indicated with the ``is_final=false`` flag). If + ``false`` or omitted, only ``is_final=true`` result(s) are + returned. + """, + # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.StreamingRecognitionConfig) + )) +_sym_db.RegisterMessage(StreamingRecognitionConfig) + +RecognitionConfig = _reflection.GeneratedProtocolMessageType('RecognitionConfig', (_message.Message,), dict( + DESCRIPTOR = _RECOGNITIONCONFIG, + __module__ = 'google.cloud.proto.speech.v1.cloud_speech_pb2' + , + __doc__ = """Provides information to the recognizer that specifies how to process the + request. + + + Attributes: + encoding: + *Required* Encoding of audio data sent in all + ``RecognitionAudio`` messages. + sample_rate_hertz: + *Required* Sample rate in Hertz of the audio data sent in all + ``RecognitionAudio`` messages. Valid values are: 8000-48000. + 16000 is optimal. For best results, set the sampling rate of + the audio source to 16000 Hz. If that's not possible, use the + native sample rate of the audio source (instead of re- + sampling). + language_code: + *Required* The language of the supplied audio as a `BCP-47 + <https://www.rfc-editor.org/rfc/bcp/bcp47.txt>`__ language + tag. Example: "en-US". See `Language Support + <https://cloud.google.com/speech/docs/languages>`__ for a list + of the currently supported language codes. + max_alternatives: + *Optional* Maximum number of recognition hypotheses to be + returned. Specifically, the maximum number of + ``SpeechRecognitionAlternative`` messages within each + ``SpeechRecognitionResult``. The server may return fewer than + ``max_alternatives``. Valid values are ``0``-``30``. A value + of ``0`` or ``1`` will return a maximum of one. If omitted, + will return a maximum of one. + profanity_filter: + *Optional* If set to ``true``, the server will attempt to + filter out profanities, replacing all but the initial + character in each filtered word with asterisks, e.g. + "f\*\*\*". If set to ``false`` or omitted, profanities won't + be filtered out. + speech_contexts: + *Optional* A means to provide context to assist the speech + recognition. + """, + # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.RecognitionConfig) + )) +_sym_db.RegisterMessage(RecognitionConfig) + +SpeechContext = _reflection.GeneratedProtocolMessageType('SpeechContext', (_message.Message,), dict( + DESCRIPTOR = _SPEECHCONTEXT, + __module__ = 'google.cloud.proto.speech.v1.cloud_speech_pb2' + , + __doc__ = """Provides "hints" to the speech recognizer to favor specific words and + phrases in the results. + + + Attributes: + phrases: + *Optional* A list of strings containing words and phrases + "hints" so that the speech recognition is more likely to + recognize them. This can be used to improve the accuracy for + specific words and phrases, for example, if specific commands + are typically spoken by the user. This can also be used to add + additional words to the vocabulary of the recognizer. See + `usage limits + <https://cloud.google.com/speech/limits#content>`__. + """, + # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.SpeechContext) + )) +_sym_db.RegisterMessage(SpeechContext) + +RecognitionAudio = _reflection.GeneratedProtocolMessageType('RecognitionAudio', (_message.Message,), dict( + DESCRIPTOR = _RECOGNITIONAUDIO, + __module__ = 'google.cloud.proto.speech.v1.cloud_speech_pb2' + , + __doc__ = """Contains audio data in the encoding specified in the + ``RecognitionConfig``. Either ``content`` or ``uri`` must be supplied. + Supplying both or neither returns [google.rpc.Code.INVALID\_ARGUMENT][]. + See `audio limits <https://cloud.google.com/speech/limits#content>`__. + + + Attributes: + content: + The audio data bytes encoded as specified in + ``RecognitionConfig``. Note: as with all bytes fields, + protobuffers use a pure binary representation, whereas JSON + representations use base64. + uri: + URI that points to a file that contains audio data bytes as + specified in ``RecognitionConfig``. Currently, only Google + Cloud Storage URIs are supported, which must be specified in + the following format: ``gs://bucket_name/object_name`` (other + URI formats return [google.rpc.Code.INVALID\_ARGUMENT][]). For + more information, see `Request URIs + <https://cloud.google.com/storage/docs/reference-uris>`__. + """, + # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.RecognitionAudio) + )) +_sym_db.RegisterMessage(RecognitionAudio) + +RecognizeResponse = _reflection.GeneratedProtocolMessageType('RecognizeResponse', (_message.Message,), dict( + DESCRIPTOR = _RECOGNIZERESPONSE, + __module__ = 'google.cloud.proto.speech.v1.cloud_speech_pb2' + , + __doc__ = """The only message returned to the client by the ``Recognize`` method. It + contains the result as zero or more sequential + ``SpeechRecognitionResult`` messages. + + + Attributes: + results: + *Output-only* Sequential list of transcription results + corresponding to sequential portions of audio. + """, + # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.RecognizeResponse) + )) +_sym_db.RegisterMessage(RecognizeResponse) + +LongRunningRecognizeResponse = _reflection.GeneratedProtocolMessageType('LongRunningRecognizeResponse', (_message.Message,), dict( + DESCRIPTOR = _LONGRUNNINGRECOGNIZERESPONSE, + __module__ = 'google.cloud.proto.speech.v1.cloud_speech_pb2' + , + __doc__ = """The only message returned to the client by the ``LongRunningRecognize`` + method. It contains the result as zero or more sequential + ``SpeechRecognitionResult`` messages. It is included in the + ``result.response`` field of the ``Operation`` returned by the + ``GetOperation`` call of the ``google::longrunning::Operations`` + service. + + + Attributes: + results: + *Output-only* Sequential list of transcription results + corresponding to sequential portions of audio. + """, + # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.LongRunningRecognizeResponse) + )) +_sym_db.RegisterMessage(LongRunningRecognizeResponse) + +LongRunningRecognizeMetadata = _reflection.GeneratedProtocolMessageType('LongRunningRecognizeMetadata', (_message.Message,), dict( + DESCRIPTOR = _LONGRUNNINGRECOGNIZEMETADATA, + __module__ = 'google.cloud.proto.speech.v1.cloud_speech_pb2' + , + __doc__ = """Describes the progress of a long-running ``LongRunningRecognize`` call. + It is included in the ``metadata`` field of the ``Operation`` returned + by the ``GetOperation`` call of the ``google::longrunning::Operations`` + service. + + + Attributes: + progress_percent: + Approximate percentage of audio processed thus far. Guaranteed + to be 100 when the audio is fully processed and the results + are available. + start_time: + Time when the request was received. + last_update_time: + Time of the most recent processing update. + """, + # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.LongRunningRecognizeMetadata) + )) +_sym_db.RegisterMessage(LongRunningRecognizeMetadata) + +StreamingRecognizeResponse = _reflection.GeneratedProtocolMessageType('StreamingRecognizeResponse', (_message.Message,), dict( + DESCRIPTOR = _STREAMINGRECOGNIZERESPONSE, + __module__ = 'google.cloud.proto.speech.v1.cloud_speech_pb2' + , + __doc__ = """``StreamingRecognizeResponse`` is the only message returned to the + client by ``StreamingRecognize``. A series of one or more + ``StreamingRecognizeResponse`` messages are streamed back to the client. + + Here's an example of a series of ten ``StreamingRecognizeResponse``\ s + that might be returned while processing audio: + + 1. results { alternatives { transcript: "tube" } stability: 0.01 } + + 2. results { alternatives { transcript: "to be a" } stability: 0.01 } + + 3. results { alternatives { transcript: "to be" } stability: 0.9 } + results { alternatives { transcript: " or not to be" } stability: + 0.01 } + + 4. results { alternatives { transcript: "to be or not to be" confidence: + 0.92 } alternatives { transcript: "to bee or not to bee" } is\_final: + true } + + 5. results { alternatives { transcript: " that's" } stability: 0.01 } + + 6. results { alternatives { transcript: " that is" } stability: 0.9 } + results { alternatives { transcript: " the question" } stability: + 0.01 } + + 7. speech\_event\_type: END\_OF\_SINGLE\_UTTERANCE + + 8. results { alternatives { transcript: " that is the question" + confidence: 0.98 } alternatives { transcript: " that was the + question" } is\_final: true } + + Notes: + + - Only two of the above responses #4 and #8 contain final results; they + are indicated by ``is_final: true``. Concatenating these together + generates the full transcript: "to be or not to be that is the + question". + + - The others contain interim ``results``. #3 and #6 contain two interim + ``results``: the first portion has a high stability and is less + likely to change; the second portion has a low stability and is very + likely to change. A UI designer might choose to show only high + stability ``results``. + + - The specific ``stability`` and ``confidence`` values shown above are + only for illustrative purposes. Actual values may vary. + + - In each response, only one of these fields will be set: ``error``, + ``speech_event_type``, or one or more (repeated) ``results``. + + + + + Attributes: + error: + *Output-only* If set, returns a [google.rpc.Status][] message + that specifies the error for the operation. + results: + *Output-only* This repeated list contains zero or more results + that correspond to consecutive portions of the audio currently + being processed. It contains zero or one ``is_final=true`` + result (the newly settled portion), followed by zero or more + ``is_final=false`` results. + speech_event_type: + *Output-only* Indicates the type of speech event. + """, + # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.StreamingRecognizeResponse) + )) +_sym_db.RegisterMessage(StreamingRecognizeResponse) + +StreamingRecognitionResult = _reflection.GeneratedProtocolMessageType('StreamingRecognitionResult', (_message.Message,), dict( + DESCRIPTOR = _STREAMINGRECOGNITIONRESULT, + __module__ = 'google.cloud.proto.speech.v1.cloud_speech_pb2' + , + __doc__ = """A streaming speech recognition result corresponding to a portion of the + audio that is currently being processed. + + + Attributes: + alternatives: + *Output-only* May contain one or more recognition hypotheses + (up to the maximum specified in ``max_alternatives``). + is_final: + *Output-only* If ``false``, this + ``StreamingRecognitionResult`` represents an interim result + that may change. If ``true``, this is the final time the + speech service will return this particular + ``StreamingRecognitionResult``, the recognizer will not return + any further hypotheses for this portion of the transcript and + corresponding audio. + stability: + *Output-only* An estimate of the likelihood that the + recognizer will not change its guess about this interim + result. Values range from 0.0 (completely unstable) to 1.0 + (completely stable). This field is only provided for interim + results (``is_final=false``). The default of 0.0 is a sentinel + value indicating ``stability`` was not set. + """, + # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.StreamingRecognitionResult) + )) +_sym_db.RegisterMessage(StreamingRecognitionResult) + +SpeechRecognitionResult = _reflection.GeneratedProtocolMessageType('SpeechRecognitionResult', (_message.Message,), dict( + DESCRIPTOR = _SPEECHRECOGNITIONRESULT, + __module__ = 'google.cloud.proto.speech.v1.cloud_speech_pb2' + , + __doc__ = """A speech recognition result corresponding to a portion of the audio. + + + Attributes: + alternatives: + *Output-only* May contain one or more recognition hypotheses + (up to the maximum specified in ``max_alternatives``). + """, + # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.SpeechRecognitionResult) + )) +_sym_db.RegisterMessage(SpeechRecognitionResult) + +SpeechRecognitionAlternative = _reflection.GeneratedProtocolMessageType('SpeechRecognitionAlternative', (_message.Message,), dict( + DESCRIPTOR = _SPEECHRECOGNITIONALTERNATIVE, + __module__ = 'google.cloud.proto.speech.v1.cloud_speech_pb2' + , + __doc__ = """Alternative hypotheses (a.k.a. n-best list). + + + Attributes: + transcript: + *Output-only* Transcript text representing the words that the + user spoke. + confidence: + *Output-only* The confidence estimate between 0.0 and 1.0. A + higher number indicates an estimated greater likelihood that + the recognized words are correct. This field is typically + provided only for the top hypothesis, and only for + ``is_final=true`` results. Clients should not rely on the + ``confidence`` field as it is not guaranteed to be accurate, + or even set, in any of the results. The default of 0.0 is a + sentinel value indicating ``confidence`` was not set. + """, + # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.SpeechRecognitionAlternative) + )) +_sym_db.RegisterMessage(SpeechRecognitionAlternative) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\032com.google.cloud.speech.v1B\013SpeechProtoP\001Z<google.golang.org/genproto/googleapis/cloud/speech/v1;speech')) +try: + # THESE ELEMENTS WILL BE DEPRECATED. + # Please use the generated *_pb2_grpc.py files instead. + import grpc + from grpc.beta import implementations as beta_implementations + from grpc.beta import interfaces as beta_interfaces + from grpc.framework.common import cardinality + from grpc.framework.interfaces.face import utilities as face_utilities + + + class SpeechStub(object): + """Service that implements Google Cloud Speech API. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.Recognize = channel.unary_unary( + '/google.cloud.speech.v1.Speech/Recognize', + request_serializer=RecognizeRequest.SerializeToString, + response_deserializer=RecognizeResponse.FromString, + ) + self.LongRunningRecognize = channel.unary_unary( + '/google.cloud.speech.v1.Speech/LongRunningRecognize', + request_serializer=LongRunningRecognizeRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.StreamingRecognize = channel.stream_stream( + '/google.cloud.speech.v1.Speech/StreamingRecognize', + request_serializer=StreamingRecognizeRequest.SerializeToString, + response_deserializer=StreamingRecognizeResponse.FromString, + ) + + + class SpeechServicer(object): + """Service that implements Google Cloud Speech API. + """ + + def Recognize(self, request, context): + """Performs synchronous speech recognition: receive results after all audio + has been sent and processed. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def LongRunningRecognize(self, request, context): + """Performs asynchronous speech recognition: receive results via the + google.longrunning.Operations interface. Returns either an + `Operation.error` or an `Operation.response` which contains + a `LongRunningRecognizeResponse` message. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def StreamingRecognize(self, request_iterator, context): + """Performs bidirectional streaming speech recognition: receive results while + sending audio. This method is only available via the gRPC API (not REST). + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + + def add_SpeechServicer_to_server(servicer, server): + rpc_method_handlers = { + 'Recognize': grpc.unary_unary_rpc_method_handler( + servicer.Recognize, + request_deserializer=RecognizeRequest.FromString, + response_serializer=RecognizeResponse.SerializeToString, + ), + 'LongRunningRecognize': grpc.unary_unary_rpc_method_handler( + servicer.LongRunningRecognize, + request_deserializer=LongRunningRecognizeRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + 'StreamingRecognize': grpc.stream_stream_rpc_method_handler( + servicer.StreamingRecognize, + request_deserializer=StreamingRecognizeRequest.FromString, + response_serializer=StreamingRecognizeResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.cloud.speech.v1.Speech', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + class BetaSpeechServicer(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """Service that implements Google Cloud Speech API. + """ + def Recognize(self, request, context): + """Performs synchronous speech recognition: receive results after all audio + has been sent and processed. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def LongRunningRecognize(self, request, context): + """Performs asynchronous speech recognition: receive results via the + google.longrunning.Operations interface. Returns either an + `Operation.error` or an `Operation.response` which contains + a `LongRunningRecognizeResponse` message. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def StreamingRecognize(self, request_iterator, context): + """Performs bidirectional streaming speech recognition: receive results while + sending audio. This method is only available via the gRPC API (not REST). + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + + + class BetaSpeechStub(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """Service that implements Google Cloud Speech API. + """ + def Recognize(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Performs synchronous speech recognition: receive results after all audio + has been sent and processed. + """ + raise NotImplementedError() + Recognize.future = None + def LongRunningRecognize(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Performs asynchronous speech recognition: receive results via the + google.longrunning.Operations interface. Returns either an + `Operation.error` or an `Operation.response` which contains + a `LongRunningRecognizeResponse` message. + """ + raise NotImplementedError() + LongRunningRecognize.future = None + def StreamingRecognize(self, request_iterator, timeout, metadata=None, with_call=False, protocol_options=None): + """Performs bidirectional streaming speech recognition: receive results while + sending audio. This method is only available via the gRPC API (not REST). + """ + raise NotImplementedError() + + + def beta_create_Speech_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_deserializers = { + ('google.cloud.speech.v1.Speech', 'LongRunningRecognize'): LongRunningRecognizeRequest.FromString, + ('google.cloud.speech.v1.Speech', 'Recognize'): RecognizeRequest.FromString, + ('google.cloud.speech.v1.Speech', 'StreamingRecognize'): StreamingRecognizeRequest.FromString, + } + response_serializers = { + ('google.cloud.speech.v1.Speech', 'LongRunningRecognize'): google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ('google.cloud.speech.v1.Speech', 'Recognize'): RecognizeResponse.SerializeToString, + ('google.cloud.speech.v1.Speech', 'StreamingRecognize'): StreamingRecognizeResponse.SerializeToString, + } + method_implementations = { + ('google.cloud.speech.v1.Speech', 'LongRunningRecognize'): face_utilities.unary_unary_inline(servicer.LongRunningRecognize), + ('google.cloud.speech.v1.Speech', 'Recognize'): face_utilities.unary_unary_inline(servicer.Recognize), + ('google.cloud.speech.v1.Speech', 'StreamingRecognize'): face_utilities.stream_stream_inline(servicer.StreamingRecognize), + } + server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) + return beta_implementations.server(method_implementations, options=server_options) + + + def beta_create_Speech_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_serializers = { + ('google.cloud.speech.v1.Speech', 'LongRunningRecognize'): LongRunningRecognizeRequest.SerializeToString, + ('google.cloud.speech.v1.Speech', 'Recognize'): RecognizeRequest.SerializeToString, + ('google.cloud.speech.v1.Speech', 'StreamingRecognize'): StreamingRecognizeRequest.SerializeToString, + } + response_deserializers = { + ('google.cloud.speech.v1.Speech', 'LongRunningRecognize'): google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ('google.cloud.speech.v1.Speech', 'Recognize'): RecognizeResponse.FromString, + ('google.cloud.speech.v1.Speech', 'StreamingRecognize'): StreamingRecognizeResponse.FromString, + } + cardinalities = { + 'LongRunningRecognize': cardinality.Cardinality.UNARY_UNARY, + 'Recognize': cardinality.Cardinality.UNARY_UNARY, + 'StreamingRecognize': cardinality.Cardinality.STREAM_STREAM, + } + stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) + return beta_implementations.dynamic_stub(channel, 'google.cloud.speech.v1.Speech', cardinalities, options=stub_options) +except ImportError: + pass +# @@protoc_insertion_point(module_scope) diff --git a/speech/google/cloud/proto/speech/v1/cloud_speech_pb2_grpc.py b/speech/google/cloud/proto/speech/v1/cloud_speech_pb2_grpc.py new file mode 100644 index 000000000000..730f8443a3bd --- /dev/null +++ b/speech/google/cloud/proto/speech/v1/cloud_speech_pb2_grpc.py @@ -0,0 +1,86 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +import google.cloud.proto.speech.v1.cloud_speech_pb2 as google_dot_cloud_dot_proto_dot_speech_dot_v1_dot_cloud__speech__pb2 +import google.longrunning.operations_pb2 as google_dot_longrunning_dot_operations__pb2 + + +class SpeechStub(object): + """Service that implements Google Cloud Speech API. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.Recognize = channel.unary_unary( + '/google.cloud.speech.v1.Speech/Recognize', + request_serializer=google_dot_cloud_dot_proto_dot_speech_dot_v1_dot_cloud__speech__pb2.RecognizeRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_speech_dot_v1_dot_cloud__speech__pb2.RecognizeResponse.FromString, + ) + self.LongRunningRecognize = channel.unary_unary( + '/google.cloud.speech.v1.Speech/LongRunningRecognize', + request_serializer=google_dot_cloud_dot_proto_dot_speech_dot_v1_dot_cloud__speech__pb2.LongRunningRecognizeRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.StreamingRecognize = channel.stream_stream( + '/google.cloud.speech.v1.Speech/StreamingRecognize', + request_serializer=google_dot_cloud_dot_proto_dot_speech_dot_v1_dot_cloud__speech__pb2.StreamingRecognizeRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_speech_dot_v1_dot_cloud__speech__pb2.StreamingRecognizeResponse.FromString, + ) + + +class SpeechServicer(object): + """Service that implements Google Cloud Speech API. + """ + + def Recognize(self, request, context): + """Performs synchronous speech recognition: receive results after all audio + has been sent and processed. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def LongRunningRecognize(self, request, context): + """Performs asynchronous speech recognition: receive results via the + google.longrunning.Operations interface. Returns either an + `Operation.error` or an `Operation.response` which contains + a `LongRunningRecognizeResponse` message. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def StreamingRecognize(self, request_iterator, context): + """Performs bidirectional streaming speech recognition: receive results while + sending audio. This method is only available via the gRPC API (not REST). + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_SpeechServicer_to_server(servicer, server): + rpc_method_handlers = { + 'Recognize': grpc.unary_unary_rpc_method_handler( + servicer.Recognize, + request_deserializer=google_dot_cloud_dot_proto_dot_speech_dot_v1_dot_cloud__speech__pb2.RecognizeRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_speech_dot_v1_dot_cloud__speech__pb2.RecognizeResponse.SerializeToString, + ), + 'LongRunningRecognize': grpc.unary_unary_rpc_method_handler( + servicer.LongRunningRecognize, + request_deserializer=google_dot_cloud_dot_proto_dot_speech_dot_v1_dot_cloud__speech__pb2.LongRunningRecognizeRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + 'StreamingRecognize': grpc.stream_stream_rpc_method_handler( + servicer.StreamingRecognize, + request_deserializer=google_dot_cloud_dot_proto_dot_speech_dot_v1_dot_cloud__speech__pb2.StreamingRecognizeRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_speech_dot_v1_dot_cloud__speech__pb2.StreamingRecognizeResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.cloud.speech.v1.Speech', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/speech/google/cloud/speech/__init__.py b/speech/google/cloud/speech/__init__.py index 9c1654a2a6c7..1035b45c1d0d 100644 --- a/speech/google/cloud/speech/__init__.py +++ b/speech/google/cloud/speech/__init__.py @@ -23,5 +23,23 @@ from google.cloud.speech.encoding import Encoding from google.cloud.speech.operation import Operation +from google.cloud.speech_v1 import enums +from google.cloud.speech_v1 import SpeechClient +from google.cloud.speech_v1 import types -__all__ = ['__version__', 'Alternative', 'Client', 'Encoding', 'Operation'] + +__all__ = ( + # Common + '__version__', + + # Deprecated Manual Layer + 'Alternative', + 'Client', + 'Encoding', + 'Operation', + + # GAPIC & Partial Manual Layer + 'enums', + 'SpeechClient', + 'types', +) diff --git a/speech/google/cloud/speech/_gax.py b/speech/google/cloud/speech/_gax.py index c03c08540214..48d063bfaa8e 100644 --- a/speech/google/cloud/speech/_gax.py +++ b/speech/google/cloud/speech/_gax.py @@ -26,8 +26,7 @@ StreamingRecognizeRequest) from google.longrunning import operations_grpc -from google.cloud._helpers import make_secure_channel -from google.cloud._helpers import make_secure_stub +from google.cloud import _helpers from google.cloud._http import DEFAULT_USER_AGENT from google.cloud.speech import __version__ @@ -46,7 +45,7 @@ class GAPICSpeechAPI(object): def __init__(self, client=None): self._client = client credentials = self._client._credentials - channel = make_secure_channel( + channel = _helpers.make_secure_channel( credentials, DEFAULT_USER_AGENT, SpeechClient.SERVICE_ADDRESS) self._gapic_api = SpeechClient( @@ -54,7 +53,7 @@ def __init__(self, client=None): lib_name='gccl', lib_version=__version__, ) - self._operations_stub = make_secure_stub( + self._operations_stub = _helpers.make_secure_stub( credentials, DEFAULT_USER_AGENT, operations_grpc.OperationsStub, diff --git a/speech/google/cloud/speech/_http.py b/speech/google/cloud/speech/_http.py index 61990a5a9ff6..1e0cc3b298ed 100644 --- a/speech/google/cloud/speech/_http.py +++ b/speech/google/cloud/speech/_http.py @@ -224,7 +224,7 @@ def _build_request_data(sample, language_code, max_alternatives=None, if profanity_filter is not None: config['profanityFilter'] = profanity_filter if speech_contexts: - config['speechContext'] = {'phrases': speech_contexts} + config['speechContexts'] = {'phrases': speech_contexts} data = { 'audio': audio, diff --git a/speech/google/cloud/speech/client.py b/speech/google/cloud/speech/client.py index f9eb211c4a80..7c066d48cb9d 100644 --- a/speech/google/cloud/speech/client.py +++ b/speech/google/cloud/speech/client.py @@ -14,7 +14,10 @@ """Basic client for Google Cloud Speech API.""" +from __future__ import absolute_import + import os +import warnings from google.cloud.client import Client as BaseClient from google.cloud.environment_vars import DISABLE_GRPC @@ -60,6 +63,14 @@ class Client(BaseClient): _speech_api = None def __init__(self, credentials=None, _http=None, _use_grpc=None): + warnings.warn( + 'This client class and objects that derive from it have been ' + 'deprecated. Use `google.cloud.speech.SpeechClient` ' + '(provided by this package) instead. This client will be removed ' + 'in a future release.', + DeprecationWarning, + ) + super(Client, self).__init__(credentials=credentials, _http=_http) # Save on the actual client class whether we use GAX or not. if _use_grpc is None: diff --git a/speech/google/cloud/speech_v1/__init__.py b/speech/google/cloud/speech_v1/__init__.py new file mode 100644 index 000000000000..be9c3772b4a6 --- /dev/null +++ b/speech/google/cloud/speech_v1/__init__.py @@ -0,0 +1,34 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from google.cloud.gapic.speech.v1 import speech_client +from google.cloud.gapic.speech.v1 import enums + +from google.cloud.speech_v1.helpers import SpeechHelpers +from google.cloud.speech_v1 import types + + +class SpeechClient(SpeechHelpers, speech_client.SpeechClient): + __doc__ = speech_client.SpeechClient.__doc__ + enums = enums + types = types + + +__all__ = ( + 'enums', + 'SpeechClient', + 'types', +) diff --git a/speech/google/cloud/speech_v1/helpers.py b/speech/google/cloud/speech_v1/helpers.py new file mode 100644 index 000000000000..8ecddc2738f1 --- /dev/null +++ b/speech/google/cloud/speech_v1/helpers.py @@ -0,0 +1,88 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + + +class SpeechHelpers(object): + """A set of convenience methods to make the Speech client easier to use. + + This class should be considered abstract; it is used as a superclass + in a multiple-inheritance construction alongside the applicable GAPIC. + See the :class:`~google.cloud.speech_v1.SpeechClient`. + """ + def streaming_recognize(self, config, requests, options=None): + """Perform bi-directional speech recognition. + + This method allows you to receive results while sending audio; + it is only available via. gRPC (not REST). + + .. warning:: + + This method is EXPERIMENTAL. Its interface might change in the + future. + + Example: + >>> from google.cloud.speech_v1 import enums + >>> from google.cloud.speech_v1 import SpeechClient + >>> from google.cloud.speech_v1 import types + >>> client = SpeechClient() + >>> config = types.StreamingRecognitionConfig( + ... config=types.RecognitionConfig( + ... encoding=enums.RecognitionConfig.AudioEncoding.FLAC, + ... ), + ... ) + >>> request = types.StreamingRecognizeRequest(audio_content=b'...') + >>> requests = [request] + >>> for element in client.streaming_recognize(config, requests): + ... # process element + ... pass + + Args: + config (:class:`~.types.StreamingRecognitionConfig`): The + configuration to use for the stream. + requests (Iterable[:class:`~.types.StreamingRecognizeRequest`]): + The input objects. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + Iterable[:class:`~.types.StreamingRecognizeResponse`] + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + return self._streaming_recognize( + self._streaming_request_iterable(config, requests), + options, + ) + + def _streaming_request_iterable(self, config, requests): + """A generator that yields the config followed by the requests. + + Args: + config (~.speech_v1.types.StreamingRecognitionConfig): The + configuration to use for the stream. + requests (Iterable[~.speech_v1.types.StreamingRecognizeRequest]): + The input objects. + + Returns: + Iterable[~.speech_v1.types.StreamingRecognizeRequest]): The + correctly formatted input for + :meth:`~.speech_v1.SpeechClient.streaming_recognize`. + """ + yield self.types.StreamingRecognizeRequest(streaming_config=config) + for request in requests: + yield request diff --git a/speech/google/cloud/speech_v1/types.py b/speech/google/cloud/speech_v1/types.py new file mode 100644 index 000000000000..75ec9a5d2b59 --- /dev/null +++ b/speech/google/cloud/speech_v1/types.py @@ -0,0 +1,30 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import +import sys + +from google.cloud.proto.speech.v1 import cloud_speech_pb2 + +from google.gax.utils.messages import get_messages + + +names = [] +for name, message in get_messages(cloud_speech_pb2).items(): + message.__module__ = 'google.cloud.speech_v1.types' + setattr(sys.modules[__name__], name, message) + names.append(name) + + +__all__ = tuple(sorted(names)) diff --git a/speech/nox.py b/speech/nox.py index 272a60231491..ee174668d021 100644 --- a/speech/nox.py +++ b/speech/nox.py @@ -30,15 +30,24 @@ def unit_tests(session, python_version): # Run unit tests against all supported versions of Python. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'unit-' + python_version + # Install all test dependencies, then install this package in-place. session.install('mock', 'pytest', 'pytest-cov', *LOCAL_DEPS) session.install('-e', '.') # Run py.test against the unit tests. - session.run('py.test', '--quiet', - '--cov=google.cloud.speech', '--cov=tests.unit', '--cov-append', - '--cov-config=.coveragerc', '--cov-report=', '--cov-fail-under=97', - 'tests/unit', + session.run( + 'py.test', '--quiet', + '--cov=google.cloud.speech', + '--cov=google.cloud.speech_v1', + '--cov=tests.unit' + '--cov-append', + '--cov-config=.coveragerc', + '--cov-report=', + '--cov-fail-under=0', + os.path.join('tests', 'unit'), ) @@ -49,11 +58,14 @@ def system_tests(session, python_version): # Sanity check: Only run system tests if the environment variable is set. if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): - return + session.skip('Credentials must be set via environment variable.') # Run the system tests against latest Python 2 and Python 3 only. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'sys-' + python_version + # Install all test dependencies, then install this package into the # virutalenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) @@ -66,21 +78,32 @@ def system_tests(session, python_version): @nox.session def lint(session): - """Run flake8. + """Run linters. - Returns a failure if flake8 finds linting errors or sufficiently + Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ session.interpreter = 'python3.6' - session.install('flake8', *LOCAL_DEPS) + session.install('flake8', 'pylint', 'gcp-devrel-py-tools', *LOCAL_DEPS) session.install('.') session.run('flake8', 'google/cloud/speech') + session.run( + 'gcp-devrel-py-tools', 'run-pylint', + '--config', 'pylint.config.py', + '--library-filesets', 'google', + '--test-filesets', 'tests', + # Temporarily allow this to fail. + success_codes=range(0, 100)) @nox.session def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.interpreter = 'python3.6' + + # Set the virtualenv dirname. + session.virtualenv_dirname = 'setup' + session.install('docutils', 'Pygments') session.run( 'python', 'setup.py', 'check', '--restructuredtext', '--strict') diff --git a/speech/pylint.config.py b/speech/pylint.config.py new file mode 100644 index 000000000000..b618319b8b61 --- /dev/null +++ b/speech/pylint.config.py @@ -0,0 +1,25 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module is used to configure gcp-devrel-py-tools run-pylint.""" + +# Library configuration + +# library_additions = {} +# library_replacements = {} + +# Test configuration + +# test_additions = copy.deepcopy(library_additions) +# test_replacements = copy.deepcopy(library_replacements) diff --git a/speech/setup.py b/speech/setup.py index dda61babdf6a..1075df837141 100644 --- a/speech/setup.py +++ b/speech/setup.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import io import os from setuptools import find_packages @@ -20,6 +21,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) + with open(os.path.join(PACKAGE_ROOT, 'README.rst')) as file_obj: README = file_obj.read() @@ -27,7 +29,7 @@ # consolidate. SETUP_BASE = { 'author': 'Google Cloud Platform', - 'author_email': 'jjg+google-cloud-python@google.com', + 'author_email': 'googleapis-publisher@google.com', 'scripts': [], 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', 'license': 'Apache 2.0', @@ -35,7 +37,7 @@ 'include_package_data': True, 'zip_safe': False, 'classifiers': [ - 'Development Status :: 3 - Alpha', + 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', @@ -50,19 +52,23 @@ } REQUIREMENTS = [ - 'google-cloud-core >= 0.24.0, < 0.25dev', - 'grpcio >= 1.0.2, < 2.0dev', - 'gapic-google-cloud-speech-v1 >= 0.15.3, < 0.16dev', + 'google-cloud-core >= 0.25.0, < 0.26dev', + 'google-gax >= 0.15.13, < 0.16dev', + 'googleapis-common-protos[grpc] >= 1.5.2, < 2.0dev', ] setup( name='google-cloud-speech', - version='0.25.1', + version='0.27.0', description='Python Client for Google Cloud Speech', long_description=README, namespace_packages=[ 'google', 'google.cloud', + 'google.cloud.gapic', + 'google.cloud.gapic.speech', + 'google.cloud.proto', + 'google.cloud.proto.speech', ], packages=find_packages(exclude=('tests*',)), install_requires=REQUIREMENTS, diff --git a/speech/tests/gapic/test_speech_client_v1.py b/speech/tests/gapic/test_speech_client_v1.py new file mode 100644 index 000000000000..acd196adde68 --- /dev/null +++ b/speech/tests/gapic/test_speech_client_v1.py @@ -0,0 +1,212 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Unit tests.""" + +import mock +import unittest + +from google.gax import errors +from google.rpc import status_pb2 + +from google.cloud.gapic.speech.v1 import enums +from google.cloud.gapic.speech.v1 import speech_client +from google.cloud.proto.speech.v1 import cloud_speech_pb2 +from google.longrunning import operations_pb2 + + +class CustomException(Exception): + pass + + +class TestSpeechClient(unittest.TestCase): + @mock.patch('google.gax.config.create_stub', spec=True) + def test_recognize(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = speech_client.SpeechClient() + + # Mock request + encoding = enums.RecognitionConfig.AudioEncoding.FLAC + sample_rate_hertz = 44100 + language_code = 'en-US' + config = cloud_speech_pb2.RecognitionConfig( + encoding=encoding, + sample_rate_hertz=sample_rate_hertz, + language_code=language_code) + uri = 'gs://bucket_name/file_name.flac' + audio = cloud_speech_pb2.RecognitionAudio(uri=uri) + + # Mock response + expected_response = cloud_speech_pb2.RecognizeResponse() + grpc_stub.Recognize.return_value = expected_response + + response = client.recognize(config, audio) + self.assertEqual(expected_response, response) + + grpc_stub.Recognize.assert_called_once() + args, kwargs = grpc_stub.Recognize.call_args + self.assertEqual(len(args), 2) + self.assertEqual(len(kwargs), 1) + self.assertIn('metadata', kwargs) + actual_request = args[0] + + expected_request = cloud_speech_pb2.RecognizeRequest( + config=config, audio=audio) + self.assertEqual(expected_request, actual_request) + + @mock.patch('google.gax.config.API_ERRORS', (CustomException, )) + @mock.patch('google.gax.config.create_stub', spec=True) + def test_recognize_exception(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = speech_client.SpeechClient() + + # Mock request + encoding = enums.RecognitionConfig.AudioEncoding.FLAC + sample_rate_hertz = 44100 + language_code = 'en-US' + config = cloud_speech_pb2.RecognitionConfig( + encoding=encoding, + sample_rate_hertz=sample_rate_hertz, + language_code=language_code) + uri = 'gs://bucket_name/file_name.flac' + audio = cloud_speech_pb2.RecognitionAudio(uri=uri) + + # Mock exception response + grpc_stub.Recognize.side_effect = CustomException() + + self.assertRaises(errors.GaxError, client.recognize, config, audio) + + @mock.patch('google.gax.config.create_stub', spec=True) + def test_long_running_recognize(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = speech_client.SpeechClient() + + # Mock request + encoding = enums.RecognitionConfig.AudioEncoding.FLAC + sample_rate_hertz = 44100 + language_code = 'en-US' + config = cloud_speech_pb2.RecognitionConfig( + encoding=encoding, + sample_rate_hertz=sample_rate_hertz, + language_code=language_code) + uri = 'gs://bucket_name/file_name.flac' + audio = cloud_speech_pb2.RecognitionAudio(uri=uri) + + # Mock response + expected_response = cloud_speech_pb2.LongRunningRecognizeResponse() + operation = operations_pb2.Operation( + name='operations/test_long_running_recognize', done=True) + operation.response.Pack(expected_response) + grpc_stub.LongRunningRecognize.return_value = operation + + response = client.long_running_recognize(config, audio) + self.assertEqual(expected_response, response.result()) + + grpc_stub.LongRunningRecognize.assert_called_once() + args, kwargs = grpc_stub.LongRunningRecognize.call_args + self.assertEqual(len(args), 2) + self.assertEqual(len(kwargs), 1) + self.assertIn('metadata', kwargs) + actual_request = args[0] + + expected_request = cloud_speech_pb2.LongRunningRecognizeRequest( + config=config, audio=audio) + self.assertEqual(expected_request, actual_request) + + @mock.patch('google.gax.config.create_stub', spec=True) + def test_long_running_recognize_exception(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = speech_client.SpeechClient() + + # Mock request + encoding = enums.RecognitionConfig.AudioEncoding.FLAC + sample_rate_hertz = 44100 + language_code = 'en-US' + config = cloud_speech_pb2.RecognitionConfig( + encoding=encoding, + sample_rate_hertz=sample_rate_hertz, + language_code=language_code) + uri = 'gs://bucket_name/file_name.flac' + audio = cloud_speech_pb2.RecognitionAudio(uri=uri) + + # Mock exception response + error = status_pb2.Status() + operation = operations_pb2.Operation( + name='operations/test_long_running_recognize_exception', done=True) + operation.error.CopyFrom(error) + grpc_stub.LongRunningRecognize.return_value = operation + + response = client.long_running_recognize(config, audio) + self.assertEqual(error, response.exception()) + + @mock.patch('google.gax.config.create_stub', spec=True) + def test_streaming_recognize(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = speech_client.SpeechClient() + + # Mock request + request = cloud_speech_pb2.StreamingRecognizeRequest() + requests = [request] + + # Mock response + expected_response = cloud_speech_pb2.StreamingRecognizeResponse() + grpc_stub.StreamingRecognize.return_value = iter([expected_response]) + + response = client.streaming_recognize(requests) + resources = list(response) + self.assertEqual(1, len(resources)) + self.assertEqual(expected_response, resources[0]) + + grpc_stub.StreamingRecognize.assert_called_once() + args, kwargs = grpc_stub.StreamingRecognize.call_args + self.assertEqual(len(args), 2) + self.assertEqual(len(kwargs), 1) + self.assertIn('metadata', kwargs) + actual_requests = args[0] + self.assertEqual(1, len(actual_requests)) + actual_request = list(actual_requests)[0] + self.assertEqual(request, actual_request) + + @mock.patch('google.gax.config.API_ERRORS', (CustomException, )) + @mock.patch('google.gax.config.create_stub', spec=True) + def test_streaming_recognize_exception(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = speech_client.SpeechClient() + + # Mock request + request = cloud_speech_pb2.StreamingRecognizeRequest() + requests = [request] + + # Mock exception response + grpc_stub.StreamingRecognize.side_effect = CustomException() + + self.assertRaises(errors.GaxError, client.streaming_recognize, + requests) diff --git a/speech/tests/system.py b/speech/tests/system.py index 0c4acfb52767..35c1ee3d1521 100644 --- a/speech/tests/system.py +++ b/speech/tests/system.py @@ -16,6 +16,8 @@ import time import unittest +import six + from google.cloud import exceptions from google.cloud import speech from google.cloud import storage @@ -158,11 +160,11 @@ def test_sync_recognize_local_file(self): content = file_obj.read() results = self._make_sync_request(content=content, - max_alternatives=2) + max_alternatives=1) self.assertEqual(len(results), 1) alternatives = results[0].alternatives - self.assertEqual(len(alternatives), 2) - self._check_results(alternatives, 2) + self.assertEqual(len(alternatives), 1) + self._check_results(alternatives, 1) def test_sync_recognize_gcs_file(self): bucket_name = Config.TEST_BUCKET.name @@ -183,12 +185,12 @@ def test_async_recognize_local_file(self): content = file_obj.read() operation = self._make_async_request(content=content, - max_alternatives=2) + max_alternatives=1) _wait_until_complete(operation) self.assertEqual(len(operation.results), 1) alternatives = operation.results[0].alternatives - self.assertEqual(len(alternatives), 2) - self._check_results(alternatives, 2) + self.assertEqual(len(alternatives), 1) + self._check_results(alternatives, 1) def test_async_recognize_gcs_file(self): bucket_name = Config.TEST_BUCKET.name @@ -200,13 +202,13 @@ def test_async_recognize_gcs_file(self): source_uri = 'gs://%s/%s' % (bucket_name, blob_name) operation = self._make_async_request(source_uri=source_uri, - max_alternatives=2) + max_alternatives=1) _wait_until_complete(operation) self.assertEqual(len(operation.results), 1) alternatives = operation.results[0].alternatives - self.assertEqual(len(alternatives), 2) - self._check_results(alternatives, 2) + self.assertEqual(len(alternatives), 1) + self._check_results(alternatives, 1) def test_stream_recognize(self): if not Config.USE_GRPC: @@ -220,18 +222,17 @@ def test_stream_recognize_interim_results(self): if not Config.USE_GRPC: self.skipTest('gRPC is required for Speech Streaming Recognize.') - # These extra words are interim_results that the API returns as it's - # deciphering the speech audio. This has a high probability of becoming - # out of date and causing the test to fail. - extras = ' Google Now who hello thank you for you for use hello ' + # Just test that the iterim results exist; the exact value can and + # does change, so writing a test for it is difficult. with open(AUDIO_FILE, 'rb') as file_obj: recognize = self._make_streaming_request(file_obj, interim_results=True) responses = list(recognize) for response in responses: - if response.alternatives[0].transcript: - self.assertIn(response.alternatives[0].transcript, - extras + self.ASSERT_TEXT) + self.assertIsInstance( + response.alternatives[0].transcript, + six.text_type, + ) self.assertGreater(len(responses), 5) self._check_results(responses[-1].alternatives) diff --git a/speech/tests/unit/test__gax.py b/speech/tests/unit/test__gax.py index 7cf44ba58f6e..4587f3b6d6a5 100644 --- a/speech/tests/unit/test__gax.py +++ b/speech/tests/unit/test__gax.py @@ -34,18 +34,17 @@ def _get_target_class(): def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) + @mock.patch('google.cloud._helpers.make_secure_channel', + return_value=mock.sentinel.channel) @mock.patch( - 'google.cloud._helpers.make_secure_channel', - return_value=mock.sentinel.channel) - @mock.patch( - 'google.cloud.gapic.speech.v1.speech_client.SpeechClient', - SERVICE_ADDRESS='hey.you.guys') - @mock.patch( - 'google.cloud._helpers.make_secure_stub', - return_value=mock.sentinel.stub) - def test_constructor(self, mocked_stub, mocked_cls, mocked_channel): + 'google.cloud.gapic.speech.v1.speech_client.SpeechClient.__init__', + return_value=None) + @mock.patch('google.cloud._helpers.make_secure_stub', + return_value=mock.sentinel.stub) + def test_constructor(self, mocked_stub, mocked_init, mocked_channel): from google.longrunning import operations_grpc from google.cloud._http import DEFAULT_USER_AGENT + from google.cloud.gapic.speech.v1.speech_client import SpeechClient from google.cloud.speech import __version__ from google.cloud.speech._gax import OPERATIONS_API_HOST @@ -57,17 +56,17 @@ def test_constructor(self, mocked_stub, mocked_cls, mocked_channel): speech_api = self._make_one(mock_client) self.assertIs(speech_api._client, mock_client) - self.assertIs(speech_api._gapic_api, mocked_cls.return_value) + self.assertIsInstance(speech_api._gapic_api, SpeechClient) mocked_stub.assert_called_once_with( mock_cnxn.credentials, DEFAULT_USER_AGENT, operations_grpc.OperationsStub, OPERATIONS_API_HOST) - mocked_cls.assert_called_once_with( + mocked_init.assert_called_once_with( channel=mock.sentinel.channel, lib_name='gccl', lib_version=__version__) mocked_channel.assert_called_once_with( mock_cnxn.credentials, DEFAULT_USER_AGENT, - mocked_cls.SERVICE_ADDRESS) + 'speech.googleapis.com') class TestSpeechGAXMakeRequests(unittest.TestCase): diff --git a/speech/tests/unit/test_client.py b/speech/tests/unit/test_client.py index f971bb4865d1..b66d3080e066 100644 --- a/speech/tests/unit/test_client.py +++ b/speech/tests/unit/test_client.py @@ -88,8 +88,8 @@ def test_ctor(self): creds = _make_credentials() http = object() client = self._make_one(credentials=creds, _http=http) - self.assertTrue(client._credentials is creds) - self.assertTrue(client._http is http) + self.assertIs(client._credentials, creds) + self.assertIs(client._http, http) def test_ctor_use_grpc_preset(self): creds = _make_credentials() @@ -135,7 +135,7 @@ def test_sync_recognize_content_with_optional_params_no_gax(self): 'encoding': 'FLAC', 'maxAlternatives': 2, 'sampleRateHertz': 16000, - 'speechContext': { + 'speechContexts': { 'phrases': [ 'hi', ] @@ -246,6 +246,7 @@ def test_sync_recognize_with_empty_results_no_gax(self): next(sample.recognize(language_code='en-US')) def test_sync_recognize_with_empty_results_gax(self): + from google.cloud import _helpers from google.cloud._testing import _Monkey from google.cloud import speech @@ -255,13 +256,6 @@ def test_sync_recognize_with_empty_results_gax(self): client = self._make_one(credentials=credentials, _use_grpc=True) client._credentials = credentials - channel_args = [] - channel_obj = object() - - def make_channel(*args): - channel_args.append(args) - return channel_obj - def speech_api(channel=None, **kwargs): return _MockGAPICSpeechAPI( response=_make_sync_response(), channel=channel, **kwargs) @@ -269,16 +263,19 @@ def speech_api(channel=None, **kwargs): host = 'foo.apis.invalid' speech_api.SERVICE_ADDRESS = host - with _Monkey(_gax, SpeechClient=speech_api, - make_secure_channel=make_channel): - client._speech_api = _gax.GAPICSpeechAPI(client) + with _Monkey(_gax, SpeechClient=speech_api): + with mock.patch.object(_helpers, 'make_secure_channel') as msc: + client._speech_api = _gax.GAPICSpeechAPI(client) - low_level = client.speech_api._gapic_api - self.assertIsInstance(low_level, _MockGAPICSpeechAPI) - self.assertIs(low_level._channel, channel_obj) - self.assertEqual( - channel_args, - [(credentials, _gax.DEFAULT_USER_AGENT, host)]) + low_level = client.speech_api._gapic_api + self.assertIsInstance(low_level, _MockGAPICSpeechAPI) + self.assertIs(low_level._channel, msc.return_value) + + assert msc.mock_calls[0] == mock.call( + credentials, + _gax.DEFAULT_USER_AGENT, + host, + ) sample = client.sample( source_uri=self.AUDIO_SOURCE_URI, encoding=speech.Encoding.FLAC, @@ -288,6 +285,7 @@ def speech_api(channel=None, **kwargs): next(sample.recognize(language_code='en-US')) def test_sync_recognize_with_gax(self): + from google.cloud import _helpers from google.cloud._testing import _Monkey from google.cloud import speech @@ -306,13 +304,6 @@ def test_sync_recognize_with_gax(self): }] result = _make_result(alternatives) - channel_args = [] - channel_obj = object() - - def make_channel(*args): - channel_args.append(args) - return channel_obj - def speech_api(channel=None, **kwargs): return _MockGAPICSpeechAPI( response=_make_sync_response(result), channel=channel, @@ -325,15 +316,19 @@ def speech_api(channel=None, **kwargs): source_uri=self.AUDIO_SOURCE_URI, encoding=speech.Encoding.FLAC, sample_rate_hertz=self.SAMPLE_RATE) - with _Monkey(_gax, SpeechClient=speech_api, - make_secure_channel=make_channel): - client._speech_api = _gax.GAPICSpeechAPI(client) + with _Monkey(_gax, SpeechClient=speech_api): + with mock.patch.object(_helpers, 'make_secure_channel') as msc: + client._speech_api = _gax.GAPICSpeechAPI(client) - low_level = client.speech_api._gapic_api - self.assertIsInstance(low_level, _MockGAPICSpeechAPI) - self.assertIs(low_level._channel, channel_obj) - self.assertEqual( - channel_args, [(creds, _gax.DEFAULT_USER_AGENT, host)]) + low_level = client.speech_api._gapic_api + self.assertIsInstance(low_level, _MockGAPICSpeechAPI) + self.assertIs(low_level._channel, msc.return_value) + + assert msc.mock_calls[0] == mock.call( + creds, + _gax.DEFAULT_USER_AGENT, + host, + ) results = [i for i in sample.recognize(language_code='en-US')] @@ -351,18 +346,6 @@ def speech_api(channel=None, **kwargs): self.assertEqual( result.alternatives[1].confidence, alternatives[1]['confidence']) - def test_async_supported_encodings(self): - from google.cloud import speech - - credentials = _make_credentials() - client = self._make_one(credentials=credentials, _use_grpc=True) - - sample = client.sample( - source_uri=self.AUDIO_SOURCE_URI, encoding=speech.Encoding.FLAC, - sample_rate_hertz=self.SAMPLE_RATE) - with self.assertRaises(ValueError): - sample.recognize(language_code='en-US') - def test_async_recognize_no_gax(self): from google.cloud import speech from google.cloud.speech.operation import Operation @@ -392,6 +375,7 @@ def test_async_recognize_no_gax(self): def test_async_recognize_with_gax(self): from google.cloud._testing import _Monkey + from google.cloud import _helpers from google.cloud import speech from google.cloud.speech import _gax from google.cloud.speech.operation import Operation @@ -400,13 +384,6 @@ def test_async_recognize_with_gax(self): client = self._make_one(credentials=credentials, _use_grpc=True) client._credentials = credentials - channel_args = [] - channel_obj = object() - - def make_channel(*args): - channel_args.append(args) - return channel_obj - sample = client.sample( encoding=speech.Encoding.LINEAR16, sample_rate_hertz=self.SAMPLE_RATE, @@ -415,20 +392,21 @@ def make_channel(*args): def speech_api(channel=None, **kwargs): return _MockGAPICSpeechAPI(channel=channel, **kwargs) + speech_api.SERVICE_ADDRESS = 'foo.api.invalid' - host = 'foo.apis.invalid' - speech_api.SERVICE_ADDRESS = host + with _Monkey(_gax, SpeechClient=speech_api): + with mock.patch.object(_helpers, 'make_secure_channel') as msc: + api = client.speech_api - with _Monkey(_gax, SpeechClient=speech_api, - make_secure_channel=make_channel): - api = client.speech_api + low_level = api._gapic_api + self.assertIsInstance(low_level, _MockGAPICSpeechAPI) + self.assertIs(low_level._channel, msc.return_value) - low_level = api._gapic_api - self.assertIsInstance(low_level, _MockGAPICSpeechAPI) - self.assertIs(low_level._channel, channel_obj) - expected = (credentials, _gax.DEFAULT_USER_AGENT, - low_level.SERVICE_ADDRESS) - self.assertEqual(channel_args, [expected]) + assert msc.mock_calls[0] == mock.call( + credentials, + _gax.DEFAULT_USER_AGENT, + 'foo.api.invalid', + ) operation = sample.long_running_recognize(language_code='en-US') self.assertIsInstance(operation, Operation) @@ -450,6 +428,7 @@ def test_streaming_depends_on_gax(self): def test_streaming_closed_stream(self): from io import BytesIO + from google.cloud import _helpers from google.cloud._testing import _Monkey from google.cloud.speech import _gax @@ -460,13 +439,6 @@ def test_streaming_closed_stream(self): client = self._make_one(credentials=credentials) client._credentials = credentials - channel_args = [] - channel_obj = object() - - def make_channel(*args): - channel_args.append(args) - return channel_obj - def speech_api(channel=None, **kwargs): return _MockGAPICSpeechAPI(channel=channel, **kwargs) @@ -480,9 +452,9 @@ def speech_api(channel=None, **kwargs): stream=stream, encoding=Encoding.LINEAR16, sample_rate_hertz=self.SAMPLE_RATE) - with _Monkey(_gax, SpeechClient=speech_api, - make_secure_channel=make_channel): - client._speech_api = _gax.GAPICSpeechAPI(client) + with _Monkey(_gax, SpeechClient=speech_api): + with mock.patch.object(_helpers, 'make_secure_channel') as msc: + client._speech_api = _gax.GAPICSpeechAPI(client) with self.assertRaises(ValueError): list(sample.streaming_recognize(language_code='en-US')) @@ -490,6 +462,7 @@ def speech_api(channel=None, **kwargs): def test_stream_recognize_interim_results(self): from io import BytesIO + from google.cloud import _helpers from google.cloud._testing import _Monkey from google.cloud.speech import _gax @@ -518,13 +491,6 @@ def test_stream_recognize_interim_results(self): alternatives, is_final=True, stability=0.4375)) responses = [first_response, second_response, last_response] - channel_args = [] - channel_obj = object() - - def make_channel(*args): - channel_args.append(args) - return channel_obj - def speech_api(channel=None, **kwargs): return _MockGAPICSpeechAPI( channel=channel, response=responses, **kwargs) @@ -532,9 +498,9 @@ def speech_api(channel=None, **kwargs): host = 'foo.apis.invalid' speech_api.SERVICE_ADDRESS = host - with _Monkey(_gax, SpeechClient=speech_api, - make_secure_channel=make_channel): - client._speech_api = _gax.GAPICSpeechAPI(client) + with _Monkey(_gax, SpeechClient=speech_api): + with mock.patch.object(_helpers, 'make_secure_channel') as msc: + client._speech_api = _gax.GAPICSpeechAPI(client) sample = client.sample( stream=stream, encoding=Encoding.LINEAR16, @@ -582,6 +548,7 @@ def speech_api(channel=None, **kwargs): def test_stream_recognize(self): from io import BytesIO + from google.cloud import _helpers from google.cloud._testing import _Monkey from google.cloud.speech import _gax @@ -609,10 +576,6 @@ def test_stream_recognize(self): channel_args = [] channel_obj = object() - def make_channel(*args): - channel_args.append(args) - return channel_obj - def speech_api(channel=None, **kwargs): return _MockGAPICSpeechAPI( channel=channel, response=responses, **kwargs) @@ -620,9 +583,9 @@ def speech_api(channel=None, **kwargs): host = 'foo.apis.invalid' speech_api.SERVICE_ADDRESS = host - with _Monkey(_gax, SpeechClient=speech_api, - make_secure_channel=make_channel): - client._speech_api = _gax.GAPICSpeechAPI(client) + with _Monkey(_gax, SpeechClient=speech_api): + with mock.patch.object(_helpers, 'make_secure_channel') as msc: + client._speech_api = _gax.GAPICSpeechAPI(client) sample = client.sample( stream=stream, encoding=Encoding.LINEAR16, @@ -639,6 +602,7 @@ def speech_api(channel=None, **kwargs): def test_stream_recognize_no_results(self): from io import BytesIO + from google.cloud import _helpers from google.cloud._testing import _Monkey from google.cloud.speech import _gax @@ -651,13 +615,6 @@ def test_stream_recognize_no_results(self): responses = [_make_streaming_response()] - channel_args = [] - channel_obj = object() - - def make_channel(*args): - channel_args.append(args) - return channel_obj - def speech_api(channel=None, **kwargs): return _MockGAPICSpeechAPI( channel=channel, response=responses, **kwargs) @@ -665,9 +622,9 @@ def speech_api(channel=None, **kwargs): host = 'foo.apis.invalid' speech_api.SERVICE_ADDRESS = host - with _Monkey(_gax, SpeechClient=speech_api, - make_secure_channel=make_channel): - client._speech_api = _gax.GAPICSpeechAPI(client) + with _Monkey(_gax, SpeechClient=speech_api): + with mock.patch.object(_helpers, 'make_secure_channel') as msc: + client._speech_api = _gax.GAPICSpeechAPI(client) sample = client.sample( stream=stream, encoding=Encoding.LINEAR16, @@ -677,6 +634,7 @@ def speech_api(channel=None, **kwargs): self.assertEqual(results, []) def test_speech_api_with_gax(self): + from google.cloud import _helpers from google.cloud._testing import _Monkey from google.cloud.speech import _gax @@ -685,29 +643,25 @@ def test_speech_api_with_gax(self): client = self._make_one(credentials=creds, _use_grpc=True) client._credentials = creds - channel_args = [] - channel_obj = object() - - def make_channel(*args): - channel_args.append(args) - return channel_obj - def speech_api(channel=None, **kwargs): return _MockGAPICSpeechAPI(channel=channel, **kwargs) host = 'foo.apis.invalid' speech_api.SERVICE_ADDRESS = host - with _Monkey(_gax, SpeechClient=speech_api, - make_secure_channel=make_channel): - client._speech_api = _gax.GAPICSpeechAPI(client) + with _Monkey(_gax, SpeechClient=speech_api): + with mock.patch.object(_helpers, 'make_secure_channel') as msc: + client._speech_api = _gax.GAPICSpeechAPI(client) + + low_level = client.speech_api._gapic_api + self.assertIsInstance(low_level, _MockGAPICSpeechAPI) + self.assertIs(low_level._channel, msc.return_value) - low_level = client.speech_api._gapic_api - self.assertIsInstance(low_level, _MockGAPICSpeechAPI) - self.assertIs(low_level._channel, channel_obj) - expected = ( - creds, _gax.DEFAULT_USER_AGENT, low_level.SERVICE_ADDRESS) - self.assertEqual(channel_args, [expected]) + assert msc.mock_calls[0] == mock.call( + creds, + _gax.DEFAULT_USER_AGENT, + low_level.SERVICE_ADDRESS, + ) def test_speech_api_without_gax(self): from google.cloud._http import Connection diff --git a/speech/tests/unit/test_helpers.py b/speech/tests/unit/test_helpers.py new file mode 100644 index 000000000000..e12507d6565a --- /dev/null +++ b/speech/tests/unit/test_helpers.py @@ -0,0 +1,66 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import +from types import GeneratorType +import unittest + +import mock + + +class TestSpeechClient(unittest.TestCase): + + @staticmethod + def _make_one(): + import google.auth.credentials + from google.cloud.speech_v1 import SpeechClient + + credentials = mock.Mock(spec=google.auth.credentials.Credentials) + return SpeechClient(credentials=credentials) + + def test_inherited_method(self): + from google.cloud.speech_v1 import types + + client = self._make_one() + + config = types.RecognitionConfig(encoding='FLAC') + audio = types.RecognitionAudio(uri='http://foo.com/bar.wav') + with mock.patch.object(client, '_recognize') as recognize: + client.recognize(config, audio) + + # Assert that the underlying GAPIC method was called as expected. + recognize.assert_called_once_with(types.RecognizeRequest( + config=config, + audio=audio, + ), None) + + def test_streaming_recognize(self): + from google.cloud.speech_v1 import types + + client = self._make_one() + + config = types.StreamingRecognitionConfig() + requests = [types.StreamingRecognizeRequest(audio_content=b'...')] + with mock.patch.object(client, '_streaming_recognize') as sr: + client.streaming_recognize(config, requests) + + # Assert that we called streaming recognize with an iterable + # that evalutes to the correct format. + _, args, _ = sr.mock_calls[0] + api_requests = args[0] + assert isinstance(api_requests, GeneratorType) + assert list(api_requests) == [ + types.StreamingRecognizeRequest(streaming_config=config), + requests[0], + ] diff --git a/storage/MANIFEST.in b/storage/MANIFEST.in index 9f7100c9528a..fc77f8c82ff0 100644 --- a/storage/MANIFEST.in +++ b/storage/MANIFEST.in @@ -1,4 +1,4 @@ include README.rst LICENSE recursive-include google *.json *.proto -recursive-include unit_tests * +recursive-include tests * global-exclude *.pyc __pycache__ diff --git a/storage/README.rst b/storage/README.rst index 6d55686be9d0..d291fc389c23 100644 --- a/storage/README.rst +++ b/storage/README.rst @@ -9,7 +9,7 @@ Python Client for Google Cloud Storage - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/storage-client.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/storage/client.html Quick Start ----------- @@ -26,7 +26,7 @@ possible. Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html .. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication Using the API @@ -43,7 +43,7 @@ via direct download. See the ``google-cloud-python`` API `storage documentation`_ to learn how to connect to Cloud Storage using this Client Library. -.. _storage documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/storage-client.html +.. _storage documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/storage/client.html You need to create a Google Cloud Storage bucket to use this client library. Follow along with the `official Google Cloud Storage documentation`_ to learn @@ -64,6 +64,6 @@ how to create a bucket. blob2.upload_from_filename(filename='/local/path.txt') .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-storage.svg - :target: https://pypi.python.org/pypi/google-cloud-storage + :target: https://pypi.org/project/google-cloud-storage/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-storage.svg - :target: https://pypi.python.org/pypi/google-cloud-storage + :target: https://pypi.org/project/google-cloud-storage/ diff --git a/storage/google/cloud/storage/_helpers.py b/storage/google/cloud/storage/_helpers.py index 9e47c10269fc..88f9b8dc0ca7 100644 --- a/storage/google/cloud/storage/_helpers.py +++ b/storage/google/cloud/storage/_helpers.py @@ -67,11 +67,6 @@ def client(self): """Abstract getter for the object client.""" raise NotImplementedError - @property - def user_project(self): - """Abstract getter for the object user_project.""" - raise NotImplementedError - def _require_client(self, client): """Check client or verify over-ride. @@ -99,8 +94,6 @@ def reload(self, client=None): # Pass only '?projection=noAcl' here because 'acl' and related # are handled via custom endpoints. query_params = {'projection': 'noAcl'} - if self.user_project is not None: - query_params['userProject'] = self.user_project api_response = client._connection.api_request( method='GET', path=self.path, query_params=query_params, _target_object=self) @@ -147,14 +140,11 @@ def patch(self, client=None): client = self._require_client(client) # Pass '?projection=full' here because 'PATCH' documented not # to work properly w/ 'noAcl'. - query_params = {'projection': 'full'} - if self.user_project is not None: - query_params['userProject'] = self.user_project update_properties = {key: self._properties[key] for key in self._changes} api_response = client._connection.api_request( method='PATCH', path=self.path, data=update_properties, - query_params=query_params, _target_object=self) + query_params={'projection': 'full'}, _target_object=self) self._set_properties(api_response) diff --git a/core/google/cloud/credentials.py b/storage/google/cloud/storage/_signing.py similarity index 82% rename from core/google/cloud/credentials.py rename to storage/google/cloud/storage/_signing.py index 52cba9b22fcc..58e62ac1502d 100644 --- a/core/google/cloud/credentials.py +++ b/storage/google/cloud/storage/_signing.py @@ -1,4 +1,4 @@ -# Copyright 2014 Google Inc. +# Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,35 +12,20 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""A simple wrapper around the OAuth2 credentials library.""" import base64 import datetime + import six -from six.moves.urllib.parse import urlencode -import google.auth import google.auth.credentials +from google.cloud import _helpers -from google.cloud._helpers import UTC -from google.cloud._helpers import _NOW -from google.cloud._helpers import _microseconds_from_datetime - - -def get_credentials(): - """Gets credentials implicitly from the current environment. - Uses :func:`google.auth.default()`. - - :rtype: :class:`google.auth.credentials.Credentials`, - :returns: A new credentials instance corresponding to the implicit - environment. - """ - credentials, _ = google.auth.default() - return credentials +NOW = datetime.datetime.utcnow # To be replaced by tests. -def _get_signed_query_params(credentials, expiration, string_to_sign): +def get_signed_query_params(credentials, expiration, string_to_sign): """Gets query parameters for creating a signed URL. :type credentials: :class:`google.auth.credentials.Signer` @@ -60,8 +45,9 @@ def _get_signed_query_params(credentials, expiration, string_to_sign): signed payload. """ if not isinstance(credentials, google.auth.credentials.Signing): - auth_uri = ('http://google-cloud-python.readthedocs.io/en/latest/' - 'google-cloud-auth.html#setting-up-a-service-account') + auth_uri = ('https://google-cloud-python.readthedocs.io/en/latest/' + 'core/auth.html?highlight=authentication#setting-up-' + 'a-service-account') raise AttributeError('you need a private key to sign credentials.' 'the credentials you are currently using %s ' 'just contains a token. see %s for more ' @@ -77,7 +63,7 @@ def _get_signed_query_params(credentials, expiration, string_to_sign): } -def _get_expiration_seconds(expiration): +def get_expiration_seconds(expiration): """Convert 'expiration' to a number of seconds in the future. :type expiration: int, long, datetime.datetime, datetime.timedelta @@ -90,12 +76,12 @@ def _get_expiration_seconds(expiration): """ # If it's a timedelta, add it to `now` in UTC. if isinstance(expiration, datetime.timedelta): - now = _NOW().replace(tzinfo=UTC) + now = NOW().replace(tzinfo=_helpers.UTC) expiration = now + expiration # If it's a datetime, convert to a timestamp. if isinstance(expiration, datetime.datetime): - micros = _microseconds_from_datetime(expiration) + micros = _helpers._microseconds_from_datetime(expiration) expiration = micros // 10**6 if not isinstance(expiration, six.integer_types): @@ -175,7 +161,7 @@ def generate_signed_url(credentials, resource, expiration, :returns: A signed URL you can use to access the resource until expiration. """ - expiration = _get_expiration_seconds(expiration) + expiration = get_expiration_seconds(expiration) # Generate the string to sign. string_to_sign = '\n'.join([ @@ -183,12 +169,13 @@ def generate_signed_url(credentials, resource, expiration, content_md5 or '', content_type or '', str(expiration), - resource]) + resource, + ]) # Set the right query parameters. - query_params = _get_signed_query_params(credentials, - expiration, - string_to_sign) + query_params = get_signed_query_params( + credentials, expiration, string_to_sign) + if response_type is not None: query_params['response-content-type'] = response_type if response_disposition is not None: @@ -199,4 +186,4 @@ def generate_signed_url(credentials, resource, expiration, # Return the built URL. return '{endpoint}{resource}?{querystring}'.format( endpoint=api_access_endpoint, resource=resource, - querystring=urlencode(query_params)) + querystring=six.moves.urllib.parse.urlencode(query_params)) diff --git a/storage/google/cloud/storage/acl.py b/storage/google/cloud/storage/acl.py index 240662c4dc8d..c4525ea88735 100644 --- a/storage/google/cloud/storage/acl.py +++ b/storage/google/cloud/storage/acl.py @@ -198,7 +198,6 @@ class ACL(object): # as properties). reload_path = None save_path = None - user_project = None def __init__(self): self.entities = {} @@ -406,18 +405,10 @@ def reload(self, client=None): """ path = self.reload_path client = self._require_client(client) - query_params = {} - - if self.user_project is not None: - query_params['userProject'] = self.user_project self.entities.clear() - found = client._connection.api_request( - method='GET', - path=path, - query_params=query_params, - ) + found = client._connection.api_request(method='GET', path=path) self.loaded = True for entry in found.get('items', ()): self.add_entity(self.entity_from_dict(entry)) @@ -444,12 +435,8 @@ def _save(self, acl, predefined, client): acl = [] query_params[self._PREDEFINED_QUERY_PARAM] = predefined - if self.user_project is not None: - query_params['userProject'] = self.user_project - path = self.save_path client = self._require_client(client) - result = client._connection.api_request( method='PATCH', path=path, @@ -545,11 +532,6 @@ def save_path(self): """Compute the path for PATCH API requests for this ACL.""" return self.bucket.path - @property - def user_project(self): - """Compute the user project charged for API requests for this ACL.""" - return self.bucket.user_project - class DefaultObjectACL(BucketACL): """A class representing the default object ACL for a bucket.""" @@ -583,8 +565,3 @@ def reload_path(self): def save_path(self): """Compute the path for PATCH API requests for this ACL.""" return self.blob.path - - @property - def user_project(self): - """Compute the user project charged for API requests for this ACL.""" - return self.blob.user_project diff --git a/storage/google/cloud/storage/blob.py b/storage/google/cloud/storage/blob.py index aad2f47295aa..dfefc3c1a4fa 100644 --- a/storage/google/cloud/storage/blob.py +++ b/storage/google/cloud/storage/blob.py @@ -47,12 +47,12 @@ from google.cloud._helpers import _rfc3339_to_datetime from google.cloud._helpers import _to_bytes from google.cloud._helpers import _bytes_to_unicode -from google.cloud.credentials import generate_signed_url from google.cloud.exceptions import NotFound from google.cloud.exceptions import make_exception from google.cloud.iam import Policy from google.cloud.storage._helpers import _PropertyMixin from google.cloud.storage._helpers import _scalar_property +from google.cloud.storage._signing import generate_signed_url from google.cloud.storage.acl import ObjectACL @@ -113,7 +113,7 @@ class Blob(_PropertyMixin): :type encryption_key: bytes :param encryption_key: Optional 32 byte encryption key for customer-supplied encryption. - See https://cloud.google.com/storage/docs/encryption#customer-supplied + See https://cloud.google.com/storage/docs/encryption#customer-supplied. """ _chunk_size = None # Default value for each instance. @@ -222,16 +222,6 @@ def client(self): """The client bound to this blob.""" return self.bucket.client - @property - def user_project(self): - """Project ID used for API requests made via this blob. - - Derived from bucket's value. - - :rtype: str - """ - return self.bucket.user_project - @property def public_url(self): """The public URL for this blob's object. @@ -340,14 +330,10 @@ def exists(self, client=None): :returns: True if the blob exists in Cloud Storage. """ client = self._require_client(client) - # We only need the status code (200 or not) so we seek to - # minimize the returned payload. - query_params = {'fields': 'name'} - - if self.user_project is not None: - query_params['userProject'] = self.user_project - try: + # We only need the status code (200 or not) so we seek to + # minimize the returned payload. + query_params = {'fields': 'name'} # We intentionally pass `_target_object=None` since fields=name # would limit the local properties. client._connection.api_request( @@ -382,6 +368,7 @@ def _make_transport(self, client): :type client: :class:`~google.cloud.storage.client.Client` :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the blob's bucket. + :rtype transport: :class:`~google.auth.transport.requests.AuthorizedSession` :returns: The transport (with credentials) that will @@ -407,8 +394,6 @@ def _get_download_url(self): download_url = _DOWNLOAD_URL_TEMPLATE.format(path=self.path) if self.generation is not None: download_url += u'&generation={:d}'.format(self.generation) - if self.user_project is not None: - download_url += u'&userProject={}'.format(self.user_project) return download_url else: return self.media_link @@ -660,10 +645,6 @@ def _do_multipart_upload(self, client, stream, content_type, upload_url = _MULTIPART_URL_TEMPLATE.format( bucket_path=self.bucket.path) - - if self.user_project is not None: - upload_url += '&userProject={}'.format(self.user_project) - upload = MultipartUpload(upload_url, headers=headers) if num_retries is not None: @@ -736,10 +717,6 @@ def _initiate_resumable_upload(self, client, stream, content_type, upload_url = _RESUMABLE_URL_TEMPLATE.format( bucket_path=self.bucket.path) - - if self.user_project is not None: - upload_url += '&userProject={}'.format(self.user_project) - upload = ResumableUpload(upload_url, chunk_size, headers=headers) if num_retries is not None: @@ -1093,16 +1070,9 @@ def get_iam_policy(self, client=None): the ``getIamPolicy`` API request. """ client = self._require_client(client) - - query_params = {} - - if self.user_project is not None: - query_params['userProject'] = self.user_project - info = client._connection.api_request( method='GET', path='%s/iam' % (self.path,), - query_params=query_params, _target_object=None) return Policy.from_api_repr(info) @@ -1125,18 +1095,11 @@ def set_iam_policy(self, policy, client=None): the ``setIamPolicy`` API request. """ client = self._require_client(client) - - query_params = {} - - if self.user_project is not None: - query_params['userProject'] = self.user_project - resource = policy.to_api_repr() resource['resourceId'] = self.path info = client._connection.api_request( method='PUT', path='%s/iam' % (self.path,), - query_params=query_params, data=resource, _target_object=None) return Policy.from_api_repr(info) @@ -1160,17 +1123,12 @@ def test_iam_permissions(self, permissions, client=None): request. """ client = self._require_client(client) - query_params = {'permissions': permissions} - - if self.user_project is not None: - query_params['userProject'] = self.user_project - + query = {'permissions': permissions} path = '%s/iam/testPermissions' % (self.path,) resp = client._connection.api_request( method='GET', path=path, - query_params=query_params) - + query_params=query) return resp.get('permissions', []) def make_public(self, client=None): @@ -1200,22 +1158,13 @@ def compose(self, sources, client=None): """ if self.content_type is None: raise ValueError("Destination 'content_type' not set.") - client = self._require_client(client) - query_params = {} - - if self.user_project is not None: - query_params['userProject'] = self.user_project - request = { 'sourceObjects': [{'name': source.name} for source in sources], 'destination': self._properties.copy(), } api_response = client._connection.api_request( - method='POST', - path=self.path + '/compose', - query_params=query_params, - data=request, + method='POST', path=self.path + '/compose', data=request, _target_object=self) self._set_properties(api_response) @@ -1247,20 +1196,14 @@ def rewrite(self, source, token=None, client=None): headers.update(_get_encryption_headers( source._encryption_key, source=True)) - query_params = {} - if token: - query_params['rewriteToken'] = token - - if self.user_project is not None: - query_params['userProject'] = self.user_project + query_params = {'rewriteToken': token} + else: + query_params = {} api_response = client._connection.api_request( - method='POST', - path=source.path + '/rewriteTo' + self.path, - query_params=query_params, - data=self._properties, - headers=headers, + method='POST', path=source.path + '/rewriteTo' + self.path, + query_params=query_params, data=self._properties, headers=headers, _target_object=self) rewritten = int(api_response['totalBytesRewritten']) size = int(api_response['objectSize']) @@ -1291,22 +1234,13 @@ def update_storage_class(self, new_class, client=None): raise ValueError("Invalid storage class: %s" % (new_class,)) client = self._require_client(client) - - query_params = {} - - if self.user_project is not None: - query_params['userProject'] = self.user_project - headers = _get_encryption_headers(self._encryption_key) headers.update(_get_encryption_headers( self._encryption_key, source=True)) api_response = client._connection.api_request( - method='POST', - path=self.path + '/rewriteTo' + self.path, - query_params=query_params, - data={'storageClass': new_class}, - headers=headers, + method='POST', path=self.path + '/rewriteTo' + self.path, + data={'storageClass': new_class}, headers=headers, _target_object=self) self._set_properties(api_response['resource']) @@ -1466,6 +1400,11 @@ def metadata(self): See https://cloud.google.com/storage/docs/json_api/v1/objects + :setter: Update arbitrary/application specific metadata for the + object. + :getter: Retrieve arbitrary/application specific metadata for + the object. + :rtype: dict or ``NoneType`` :returns: The metadata associated with the blob or ``None`` if the property is not set locally. diff --git a/storage/google/cloud/storage/bucket.py b/storage/google/cloud/storage/bucket.py index e740cd4febc2..35ba59337490 100644 --- a/storage/google/cloud/storage/bucket.py +++ b/storage/google/cloud/storage/bucket.py @@ -34,6 +34,7 @@ from google.cloud.storage.acl import BucketACL from google.cloud.storage.acl import DefaultObjectACL from google.cloud.storage.blob import Blob +from google.cloud.storage.blob import _get_encryption_headers def _blobs_page_start(iterator, page, response): @@ -85,10 +86,6 @@ class Bucket(_PropertyMixin): :type name: str :param name: The name of the bucket. Bucket names must start and end with a number or letter. - - :type user_project: str - :param user_project: (Optional) the project ID to be billed for API - requests made via this instance. """ _MAX_OBJECTS_FOR_ITERATION = 256 @@ -112,13 +109,12 @@ class Bucket(_PropertyMixin): https://cloud.google.com/storage/docs/storage-classes """ - def __init__(self, client, name=None, user_project=None): + def __init__(self, client, name=None): name = _validate_name(name) super(Bucket, self).__init__(name=name) self._client = client self._acl = BucketACL(self) self._default_object_acl = DefaultObjectACL(self) - self._user_project = user_project def __repr__(self): return '<Bucket: %s>' % (self.name,) @@ -128,16 +124,6 @@ def client(self): """The client bound to this bucket.""" return self._client - @property - def user_project(self): - """Project ID to be billed for API requests made via this bucket. - - If unset, API requests are billed to the bucket owner. - - :rtype: str - """ - return self._user_project - def blob(self, blob_name, chunk_size=None, encryption_key=None): """Factory constructor for blob object. @@ -175,14 +161,10 @@ def exists(self, client=None): :returns: True if the bucket exists in Cloud Storage. """ client = self._require_client(client) - # We only need the status code (200 or not) so we seek to - # minimize the returned payload. - query_params = {'fields': 'name'} - - if self.user_project is not None: - query_params['userProject'] = self.user_project - try: + # We only need the status code (200 or not) so we seek to + # minimize the returned payload. + query_params = {'fields': 'name'} # We intentionally pass `_target_object=None` since fields=name # would limit the local properties. client._connection.api_request( @@ -208,9 +190,6 @@ def create(self, client=None): :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. """ - if self.user_project is not None: - raise ValueError("Cannot create bucket with 'user_project' set.") - client = self._require_client(client) query_params = {'project': client.project} properties = {key: self._properties[key] for key in self._changes} @@ -250,7 +229,7 @@ def path(self): return self.path_helper(self.name) - def get_blob(self, blob_name, client=None): + def get_blob(self, blob_name, client=None, encryption_key=None, **kwargs): """Get a blob object by name. This will return None if the blob doesn't exist: @@ -267,22 +246,27 @@ def get_blob(self, blob_name, client=None): :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. + :type encryption_key: bytes + :param encryption_key: + Optional 32 byte encryption key for customer-supplied encryption. + See + https://cloud.google.com/storage/docs/encryption#customer-supplied. + + :type kwargs: dict + :param kwargs: Keyword arguments to pass to the + :class:`~google.cloud.storage.blob.Blob` constructor. + :rtype: :class:`google.cloud.storage.blob.Blob` or None :returns: The blob object if it exists, otherwise None. """ client = self._require_client(client) - query_params = {} - - if self.user_project is not None: - query_params['userProject'] = self.user_project - - blob = Blob(bucket=self, name=blob_name) + blob = Blob(bucket=self, name=blob_name, encryption_key=encryption_key, + **kwargs) try: + headers = _get_encryption_headers(encryption_key) response = client._connection.api_request( - method='GET', - path=blob.path, - query_params=query_params, - _target_object=blob) + method='GET', path=blob.path, _target_object=blob, + headers=headers) # NOTE: We assume response.get('name') matches `blob_name`. blob._set_properties(response) # NOTE: This will not fail immediately in a batch. However, when @@ -336,7 +320,7 @@ def list_blobs(self, max_results=None, page_token=None, prefix=None, :returns: Iterator of all :class:`~google.cloud.storage.blob.Blob` in this bucket matching the arguments. """ - extra_params = {'projection': projection} + extra_params = {} if prefix is not None: extra_params['prefix'] = prefix @@ -347,12 +331,11 @@ def list_blobs(self, max_results=None, page_token=None, prefix=None, if versions is not None: extra_params['versions'] = versions + extra_params['projection'] = projection + if fields is not None: extra_params['fields'] = fields - if self.user_project is not None: - extra_params['userProject'] = self.user_project - client = self._require_client(client) path = self.path + '/o' iterator = HTTPIterator( @@ -392,11 +375,6 @@ def delete(self, force=False, client=None): contains more than 256 objects / blobs. """ client = self._require_client(client) - query_params = {} - - if self.user_project is not None: - query_params['userProject'] = self.user_project - if force: blobs = list(self.list_blobs( max_results=self._MAX_OBJECTS_FOR_ITERATION + 1, @@ -418,10 +396,7 @@ def delete(self, force=False, client=None): # request has no response value (whether in a standard request or # in a batch request). client._connection.api_request( - method='DELETE', - path=self.path, - query_params=query_params, - _target_object=None) + method='DELETE', path=self.path, _target_object=None) def delete_blob(self, blob_name, client=None): """Deletes a blob from the current bucket. @@ -453,20 +428,12 @@ def delete_blob(self, blob_name, client=None): """ client = self._require_client(client) - query_params = {} - - if self.user_project is not None: - query_params['userProject'] = self.user_project - blob_path = Blob.path_helper(self.path, blob_name) # We intentionally pass `_target_object=None` since a DELETE # request has no response value (whether in a standard request or # in a batch request). client._connection.api_request( - method='DELETE', - path=blob_path, - query_params=query_params, - _target_object=None) + method='DELETE', path=blob_path, _target_object=None) def delete_blobs(self, blobs, on_error=None, client=None): """Deletes a list of blobs from the current bucket. @@ -529,26 +496,14 @@ def copy_blob(self, blob, destination_bucket, new_name=None, :returns: The new Blob. """ client = self._require_client(client) - query_params = {} - - if self.user_project is not None: - query_params['userProject'] = self.user_project - if new_name is None: new_name = blob.name - new_blob = Blob(bucket=destination_bucket, name=new_name) api_path = blob.path + '/copyTo' + new_blob.path copy_result = client._connection.api_request( - method='POST', - path=api_path, - query_params=query_params, - _target_object=new_blob, - ) - + method='POST', path=api_path, _target_object=new_blob) if not preserve_acl: new_blob.acl.save(acl={}, client=client) - new_blob._set_properties(copy_result) return new_blob @@ -857,40 +812,10 @@ def versioning_enabled(self, value): details. :type value: convertible to boolean - :param value: should versioning be enabled for the bucket? + :param value: should versioning be anabled for the bucket? """ self._patch_property('versioning', {'enabled': bool(value)}) - @property - def requester_pays(self): - """Does the requester pay for API requests for this bucket? - - .. note:: - - No public docs exist yet for the "requester pays" feature. - - :setter: Update whether requester pays for this bucket. - :getter: Query whether requester pays for this bucket. - - :rtype: bool - :returns: True if requester pays for API requests for the bucket, - else False. - """ - versioning = self._properties.get('billing', {}) - return versioning.get('requesterPays', False) - - @requester_pays.setter - def requester_pays(self, value): - """Update whether requester pays for API requests for this bucket. - - See https://cloud.google.com/storage/docs/<DOCS-MISSING> for - details. - - :type value: convertible to boolean - :param value: should requester pay for API requests for the bucket? - """ - self._patch_property('billing', {'requesterPays': bool(value)}) - def configure_website(self, main_page_suffix=None, not_found_page=None): """Configure website-related properties. @@ -956,15 +881,9 @@ def get_iam_policy(self, client=None): the ``getIamPolicy`` API request. """ client = self._require_client(client) - query_params = {} - - if self.user_project is not None: - query_params['userProject'] = self.user_project - info = client._connection.api_request( method='GET', path='%s/iam' % (self.path,), - query_params=query_params, _target_object=None) return Policy.from_api_repr(info) @@ -987,17 +906,11 @@ def set_iam_policy(self, policy, client=None): the ``setIamPolicy`` API request. """ client = self._require_client(client) - query_params = {} - - if self.user_project is not None: - query_params['userProject'] = self.user_project - resource = policy.to_api_repr() resource['resourceId'] = self.path info = client._connection.api_request( method='PUT', path='%s/iam' % (self.path,), - query_params=query_params, data=resource, _target_object=None) return Policy.from_api_repr(info) @@ -1021,16 +934,12 @@ def test_iam_permissions(self, permissions, client=None): request. """ client = self._require_client(client) - query_params = {'permissions': permissions} - - if self.user_project is not None: - query_params['userProject'] = self.user_project - + query = {'permissions': permissions} path = '%s/iam/testPermissions' % (self.path,) resp = client._connection.api_request( method='GET', path=path, - query_params=query_params) + query_params=query) return resp.get('permissions', []) def make_public(self, recursive=False, future=False, client=None): @@ -1120,8 +1029,9 @@ def generate_upload_policy( credentials = client._base_connection.credentials if not isinstance(credentials, google.auth.credentials.Signing): - auth_uri = ('http://google-cloud-python.readthedocs.io/en/latest/' - 'google-cloud-auth.html#setting-up-a-service-account') + auth_uri = ('https://google-cloud-python.readthedocs.io/en/latest/' + 'core/auth.html?highlight=authentication#setting-up-' + 'a-service-account') raise AttributeError( 'you need a private key to sign credentials.' 'the credentials you are currently using %s ' diff --git a/storage/google/cloud/storage/client.py b/storage/google/cloud/storage/client.py index 51cad4d70c54..93785e05269f 100644 --- a/storage/google/cloud/storage/client.py +++ b/storage/google/cloud/storage/client.py @@ -194,7 +194,7 @@ def lookup_bucket(self, bucket_name): except NotFound: return None - def create_bucket(self, bucket_name, requester_pays=None): + def create_bucket(self, bucket_name): """Create a new bucket. For example: @@ -211,17 +211,10 @@ def create_bucket(self, bucket_name, requester_pays=None): :type bucket_name: str :param bucket_name: The bucket name to create. - :type requester_pays: bool - :param requester_pays: - (Optional) Whether requester pays for API requests for this - bucket and its blobs. - :rtype: :class:`google.cloud.storage.bucket.Bucket` :returns: The newly created bucket. """ bucket = Bucket(self, name=bucket_name) - if requester_pays is not None: - bucket.requester_pays = requester_pays bucket.create(client=self) return bucket diff --git a/storage/nox.py b/storage/nox.py index 3f33119f93a0..18ccf81aaff2 100644 --- a/storage/nox.py +++ b/storage/nox.py @@ -30,16 +30,25 @@ def unit_tests(session, python_version): # Run unit tests against all supported versions of Python. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'unit-' + python_version + # Install all test dependencies, then install this package in-place. session.install('mock', 'pytest', 'pytest-cov', *LOCAL_DEPS) session.install('-e', '.') # Run py.test against the unit tests. session.run( - 'py.test', '--quiet', - '--cov=google.cloud.storage', '--cov=tests.unit', '--cov-append', - '--cov-config=.coveragerc', '--cov-report=', '--cov-fail-under=97', + 'py.test', + '--quiet', + '--cov=google.cloud.storage', + '--cov=tests.unit', + '--cov-append', + '--cov-config=.coveragerc', + '--cov-report=', + '--cov-fail-under=97', 'tests/unit', + *session.posargs ) @@ -50,11 +59,14 @@ def system_tests(session, python_version): # Sanity check: Only run system tests if the environment variable is set. if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): - return + session.skip('Credentials must be set via environment variable.') # Run the system tests against latest Python 2 and Python 3 only. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'sys-' + python_version + # Install all test dependencies, then install this package into the # virutalenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) @@ -67,21 +79,32 @@ def system_tests(session, python_version): @nox.session def lint(session): - """Run flake8. + """Run linters. - Returns a failure if flake8 finds linting errors or sufficiently + Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ session.interpreter = 'python3.6' - session.install('flake8', *LOCAL_DEPS) + session.install('flake8', 'pylint', 'gcp-devrel-py-tools', *LOCAL_DEPS) session.install('.') session.run('flake8', 'google/cloud/storage') + session.run( + 'gcp-devrel-py-tools', 'run-pylint', + '--config', 'pylint.config.py', + '--library-filesets', 'google', + '--test-filesets', 'tests', + # Temporarily allow this to fail. + success_codes=range(0, 100)) @nox.session def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.interpreter = 'python3.6' + + # Set the virtualenv dirname. + session.virtualenv_dirname = 'setup' + session.install('docutils', 'Pygments') session.run( 'python', 'setup.py', 'check', '--restructuredtext', '--strict') diff --git a/storage/pylint.config.py b/storage/pylint.config.py new file mode 100644 index 000000000000..b618319b8b61 --- /dev/null +++ b/storage/pylint.config.py @@ -0,0 +1,25 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module is used to configure gcp-devrel-py-tools run-pylint.""" + +# Library configuration + +# library_additions = {} +# library_replacements = {} + +# Test configuration + +# test_additions = copy.deepcopy(library_additions) +# test_replacements = copy.deepcopy(library_replacements) diff --git a/storage/setup.py b/storage/setup.py index e261f6402c02..8d11055fac77 100644 --- a/storage/setup.py +++ b/storage/setup.py @@ -27,7 +27,7 @@ # consolidate. SETUP_BASE = { 'author': 'Google Cloud Platform', - 'author_email': 'jjg+google-cloud-python@google.com', + 'author_email': 'googleapis-publisher@google.com', 'scripts': [], 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', 'license': 'Apache 2.0', @@ -51,15 +51,15 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.24.1, < 0.25dev', + 'google-cloud-core >= 0.25.0, < 0.26dev', 'google-auth >= 1.0.0', - 'google-resumable-media >= 0.1.1', + 'google-resumable-media >= 0.2.1', 'requests >= 2.0.0', ] setup( name='google-cloud-storage', - version='1.1.1', + version='1.2.0', description='Python Client for Google Cloud Storage', long_description=README, namespace_packages=[ diff --git a/storage/tests/system.py b/storage/tests/system.py index 06f50b26128b..afab659882bf 100644 --- a/storage/tests/system.py +++ b/storage/tests/system.py @@ -30,8 +30,6 @@ HTTP = httplib2.Http() -REQUESTER_PAYS_ENABLED = False # query from environment? - def _bad_copy(bad_request): """Predicate: pass only exceptions for a failed copyTo.""" @@ -101,15 +99,6 @@ def test_create_bucket(self): self.case_buckets_to_delete.append(new_bucket_name) self.assertEqual(created.name, new_bucket_name) - @unittest.skipUnless(REQUESTER_PAYS_ENABLED, "requesterPays not enabled") - def test_create_bucket_with_requester_pays(self): - new_bucket_name = 'w-requester-pays' + unique_resource_id('-') - created = Config.CLIENT.create_bucket( - new_bucket_name, requester_pays=True) - self.case_buckets_to_delete.append(new_bucket_name) - self.assertEqual(created.name, new_bucket_name) - self.assertTrue(created.requester_pays) - def test_list_buckets(self): buckets_to_create = [ 'new' + unique_resource_id(), diff --git a/storage/tests/unit/test__helpers.py b/storage/tests/unit/test__helpers.py index 21883e2c4ac9..89967f3a0db0 100644 --- a/storage/tests/unit/test__helpers.py +++ b/storage/tests/unit/test__helpers.py @@ -26,7 +26,7 @@ def _get_target_class(): def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) - def _derivedClass(self, path=None, user_project=None): + def _derivedClass(self, path=None): class Derived(self._get_target_class()): @@ -36,67 +36,30 @@ class Derived(self._get_target_class()): def path(self): return path - @property - def user_project(self): - return user_project - return Derived def test_path_is_abstract(self): mixin = self._make_one() - with self.assertRaises(NotImplementedError): - mixin.path + self.assertRaises(NotImplementedError, lambda: mixin.path) def test_client_is_abstract(self): mixin = self._make_one() - with self.assertRaises(NotImplementedError): - mixin.client - - def test_user_project_is_abstract(self): - mixin = self._make_one() - with self.assertRaises(NotImplementedError): - mixin.user_project + self.assertRaises(NotImplementedError, lambda: mixin.client) def test_reload(self): connection = _Connection({'foo': 'Foo'}) client = _Client(connection) derived = self._derivedClass('/path')() - # Make sure changes is not a set instance before calling reload - # (which will clear / replace it with an empty set), checked below. - derived._changes = object() - derived.reload(client=client) - self.assertEqual(derived._properties, {'foo': 'Foo'}) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0], { - 'method': 'GET', - 'path': '/path', - 'query_params': {'projection': 'noAcl'}, - '_target_object': derived, - }) - self.assertEqual(derived._changes, set()) - - def test_reload_w_user_project(self): - user_project = 'user-project-123' - connection = _Connection({'foo': 'Foo'}) - client = _Client(connection) - derived = self._derivedClass('/path', user_project)() - # Make sure changes is not a set instance before calling reload - # (which will clear / replace it with an empty set), checked below. + # Make sure changes is not a set, so we can observe a change. derived._changes = object() derived.reload(client=client) self.assertEqual(derived._properties, {'foo': 'Foo'}) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0], { - 'method': 'GET', - 'path': '/path', - 'query_params': { - 'projection': 'noAcl', - 'userProject': user_project, - }, - '_target_object': derived, - }) + self.assertEqual(kw[0]['method'], 'GET') + self.assertEqual(kw[0]['path'], '/path') + self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) + # Make sure changes get reset by reload. self.assertEqual(derived._changes, set()) def test__set_properties(self): @@ -124,42 +87,11 @@ def test_patch(self): self.assertEqual(derived._properties, {'foo': 'Foo'}) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0], { - 'method': 'PATCH', - 'path': '/path', - 'query_params': {'projection': 'full'}, - # Since changes does not include `baz`, we don't see it sent. - 'data': {'bar': BAR}, - '_target_object': derived, - }) - # Make sure changes get reset by patch(). - self.assertEqual(derived._changes, set()) - - def test_patch_w_user_project(self): - user_project = 'user-project-123' - connection = _Connection({'foo': 'Foo'}) - client = _Client(connection) - derived = self._derivedClass('/path', user_project)() - # Make sure changes is non-empty, so we can observe a change. - BAR = object() - BAZ = object() - derived._properties = {'bar': BAR, 'baz': BAZ} - derived._changes = set(['bar']) # Ignore baz. - derived.patch(client=client) - self.assertEqual(derived._properties, {'foo': 'Foo'}) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0], { - 'method': 'PATCH', - 'path': '/path', - 'query_params': { - 'projection': 'full', - 'userProject': user_project, - }, - # Since changes does not include `baz`, we don't see it sent. - 'data': {'bar': BAR}, - '_target_object': derived, - }) + self.assertEqual(kw[0]['method'], 'PATCH') + self.assertEqual(kw[0]['path'], '/path') + self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) + # Since changes does not include `baz`, we don't see it sent. + self.assertEqual(kw[0]['data'], {'bar': BAR}) # Make sure changes get reset by patch(). self.assertEqual(derived._changes, set()) diff --git a/storage/tests/unit/test__signing.py b/storage/tests/unit/test__signing.py new file mode 100644 index 000000000000..1e2aabb9d25e --- /dev/null +++ b/storage/tests/unit/test__signing.py @@ -0,0 +1,222 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import base64 +import calendar +import datetime +import time +import unittest + +import mock +import six +from six.moves import urllib_parse + + +class Test_get_expiration_seconds(unittest.TestCase): + + @staticmethod + def _call_fut(expiration): + from google.cloud.storage._signing import get_expiration_seconds + + return get_expiration_seconds(expiration) + + @staticmethod + def _utc_seconds(when): + return int(calendar.timegm(when.timetuple())) + + def test_w_invalid(self): + self.assertRaises(TypeError, self._call_fut, object()) + self.assertRaises(TypeError, self._call_fut, None) + + def test_w_int(self): + self.assertEqual(self._call_fut(123), 123) + + def test_w_long(self): + if six.PY3: + raise unittest.SkipTest('No long on Python 3') + + self.assertEqual(self._call_fut(long(123)), 123) # noqa: F821 + + def test_w_naive_datetime(self): + expiration_no_tz = datetime.datetime(2004, 8, 19, 0, 0, 0, 0) + utc_seconds = self._utc_seconds(expiration_no_tz) + self.assertEqual(self._call_fut(expiration_no_tz), utc_seconds) + + def test_w_utc_datetime(self): + from google.cloud._helpers import UTC + + expiration_utc = datetime.datetime(2004, 8, 19, 0, 0, 0, 0, UTC) + utc_seconds = self._utc_seconds(expiration_utc) + self.assertEqual(self._call_fut(expiration_utc), utc_seconds) + + def test_w_other_zone_datetime(self): + from google.cloud._helpers import _UTC + + class CET(_UTC): + _tzname = 'CET' + _utcoffset = datetime.timedelta(hours=1) + + zone = CET() + expiration_other = datetime.datetime(2004, 8, 19, 0, 0, 0, 0, zone) + utc_seconds = self._utc_seconds(expiration_other) + cet_seconds = utc_seconds - (60 * 60) # CET one hour earlier than UTC + self.assertEqual(self._call_fut(expiration_other), cet_seconds) + + def test_w_timedelta_seconds(self): + dummy_utcnow = datetime.datetime(2004, 8, 19, 0, 0, 0, 0) + utc_seconds = self._utc_seconds(dummy_utcnow) + expiration_as_delta = datetime.timedelta(seconds=10) + + patch = mock.patch( + 'google.cloud.storage._signing.NOW', + return_value=dummy_utcnow) + with patch as utcnow: + result = self._call_fut(expiration_as_delta) + + self.assertEqual(result, utc_seconds + 10) + utcnow.assert_called_once_with() + + def test_w_timedelta_days(self): + dummy_utcnow = datetime.datetime(2004, 8, 19, 0, 0, 0, 0) + utc_seconds = self._utc_seconds(dummy_utcnow) + expiration_as_delta = datetime.timedelta(days=1) + + patch = mock.patch( + 'google.cloud.storage._signing.NOW', + return_value=dummy_utcnow) + with patch as utcnow: + result = self._call_fut(expiration_as_delta) + + self.assertEqual(result, utc_seconds + 86400) + utcnow.assert_called_once_with() + + +class Test_get_signed_query_params(unittest.TestCase): + + @staticmethod + def _call_fut(credentials, expiration, string_to_sign): + from google.cloud.storage._signing import get_signed_query_params + + return get_signed_query_params( + credentials, expiration, string_to_sign) + + def test_it(self): + sig_bytes = b'DEADBEEF' + account_name = mock.sentinel.service_account_email + credentials = _make_credentials( + signing=True, signer_email=account_name) + credentials.sign_bytes.return_value = sig_bytes + expiration = 100 + string_to_sign = 'dummy_signature' + result = self._call_fut( + credentials, expiration, string_to_sign) + + expected = { + 'GoogleAccessId': account_name, + 'Expires': str(expiration), + 'Signature': base64.b64encode(sig_bytes), + } + self.assertEqual(result, expected) + credentials.sign_bytes.assert_called_once_with(string_to_sign) + + +class Test_generate_signed_url(unittest.TestCase): + + @staticmethod + def _call_fut(*args, **kwargs): + from google.cloud.storage._signing import generate_signed_url + + return generate_signed_url(*args, **kwargs) + + def _generate_helper(self, response_type=None, response_disposition=None, + generation=None): + endpoint = 'http://api.example.com' + resource = '/name/path' + credentials = _make_credentials( + signing=True, signer_email='service@example.com') + credentials.sign_bytes.return_value = b'DEADBEEF' + signed = base64.b64encode(credentials.sign_bytes.return_value) + signed = signed.decode('ascii') + + expiration = 1000 + url = self._call_fut( + credentials, + resource, + expiration, + api_access_endpoint=endpoint, + response_type=response_type, + response_disposition=response_disposition, + generation=generation, + ) + + # Check the mock was called. + string_to_sign = '\n'.join([ + 'GET', + '', + '', + str(expiration), + resource, + ]) + credentials.sign_bytes.assert_called_once_with(string_to_sign) + + scheme, netloc, path, qs, frag = urllib_parse.urlsplit(url) + self.assertEqual(scheme, 'http') + self.assertEqual(netloc, 'api.example.com') + self.assertEqual(path, resource) + self.assertEqual(frag, '') + + # Check the URL parameters. + params = urllib_parse.parse_qs(qs) + expected_params = { + 'GoogleAccessId': [credentials.signer_email], + 'Expires': [str(expiration)], + 'Signature': [signed], + } + if response_type is not None: + expected_params['response-content-type'] = [response_type] + if response_disposition is not None: + expected_params['response-content-disposition'] = [ + response_disposition] + if generation is not None: + expected_params['generation'] = [generation] + self.assertEqual(params, expected_params) + + def test_w_expiration_int(self): + self._generate_helper() + + def test_w_custom_fields(self): + response_type = 'text/plain' + response_disposition = 'attachment; filename=blob.png' + generation = '123' + self._generate_helper(response_type=response_type, + response_disposition=response_disposition, + generation=generation) + + def test_with_google_credentials(self): + resource = '/name/path' + credentials = _make_credentials() + expiration = int(time.time() + 5) + self.assertRaises(AttributeError, self._call_fut, credentials, + resource=resource, expiration=expiration) + + +def _make_credentials(signing=False, signer_email=None): + import google.auth.credentials + + if signing: + credentials = mock.Mock(spec=google.auth.credentials.Signing) + credentials.signer_email = signer_email + return credentials + else: + return mock.Mock(spec=google.auth.credentials.Credentials) diff --git a/storage/tests/unit/test_acl.py b/storage/tests/unit/test_acl.py index 4e4018ae7c8c..1159c8c1f2aa 100644 --- a/storage/tests/unit/test_acl.py +++ b/storage/tests/unit/test_acl.py @@ -532,11 +532,8 @@ def test_reload_missing(self): self.assertEqual(list(acl), []) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0], { - 'method': 'GET', - 'path': '/testing/acl', - 'query_params': {}, - }) + self.assertEqual(kw[0]['method'], 'GET') + self.assertEqual(kw[0]['path'], '/testing/acl') def test_reload_empty_result_clears_local(self): ROLE = 'role' @@ -546,41 +543,29 @@ def test_reload_empty_result_clears_local(self): acl.reload_path = '/testing/acl' acl.loaded = True acl.entity('allUsers', ROLE) - acl.reload(client=client) - self.assertTrue(acl.loaded) self.assertEqual(list(acl), []) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0], { - 'method': 'GET', - 'path': '/testing/acl', - 'query_params': {}, - }) + self.assertEqual(kw[0]['method'], 'GET') + self.assertEqual(kw[0]['path'], '/testing/acl') - def test_reload_nonempty_result_w_user_project(self): + def test_reload_nonempty_result(self): ROLE = 'role' - USER_PROJECT = 'user-project-123' connection = _Connection( {'items': [{'entity': 'allUsers', 'role': ROLE}]}) client = _Client(connection) acl = self._make_one() acl.reload_path = '/testing/acl' acl.loaded = True - acl.user_project = USER_PROJECT - acl.reload(client=client) - self.assertTrue(acl.loaded) self.assertEqual(list(acl), [{'entity': 'allUsers', 'role': ROLE}]) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0], { - 'method': 'GET', - 'path': '/testing/acl', - 'query_params': {'userProject': USER_PROJECT}, - }) + self.assertEqual(kw[0]['method'], 'GET') + self.assertEqual(kw[0]['path'], '/testing/acl') def test_save_none_set_none_passed(self): connection = _Connection() @@ -621,43 +606,30 @@ def test_save_no_acl(self): self.assertEqual(len(kw), 1) self.assertEqual(kw[0]['method'], 'PATCH') self.assertEqual(kw[0]['path'], '/testing') - self.assertEqual(kw[0], { - 'method': 'PATCH', - 'path': '/testing', - 'query_params': {'projection': 'full'}, - 'data': {'acl': AFTER}, - }) - - def test_save_w_acl_w_user_project(self): + self.assertEqual(kw[0]['data'], {'acl': AFTER}) + self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) + + def test_save_w_acl(self): ROLE1 = 'role1' ROLE2 = 'role2' STICKY = {'entity': 'allUsers', 'role': ROLE2} - USER_PROJECT = 'user-project-123' new_acl = [{'entity': 'allUsers', 'role': ROLE1}] connection = _Connection({'acl': [STICKY] + new_acl}) client = _Client(connection) acl = self._make_one() acl.save_path = '/testing' acl.loaded = True - acl.user_project = USER_PROJECT - acl.save(new_acl, client=client) - entries = list(acl) self.assertEqual(len(entries), 2) self.assertTrue(STICKY in entries) self.assertTrue(new_acl[0] in entries) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0], { - 'method': 'PATCH', - 'path': '/testing', - 'query_params': { - 'projection': 'full', - 'userProject': USER_PROJECT, - }, - 'data': {'acl': new_acl}, - }) + self.assertEqual(kw[0]['method'], 'PATCH') + self.assertEqual(kw[0]['path'], '/testing') + self.assertEqual(kw[0]['data'], {'acl': new_acl}) + self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) def test_save_prefefined_invalid(self): connection = _Connection() @@ -680,15 +652,11 @@ def test_save_predefined_valid(self): self.assertEqual(len(entries), 0) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0], { - 'method': 'PATCH', - 'path': '/testing', - 'query_params': { - 'projection': 'full', - 'predefinedAcl': PREDEFINED, - }, - 'data': {'acl': []}, - }) + self.assertEqual(kw[0]['method'], 'PATCH') + self.assertEqual(kw[0]['path'], '/testing') + self.assertEqual(kw[0]['data'], {'acl': []}) + self.assertEqual(kw[0]['query_params'], + {'projection': 'full', 'predefinedAcl': PREDEFINED}) def test_save_predefined_w_XML_alias(self): PREDEFINED_XML = 'project-private' @@ -703,15 +671,12 @@ def test_save_predefined_w_XML_alias(self): self.assertEqual(len(entries), 0) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0], { - 'method': 'PATCH', - 'path': '/testing', - 'query_params': { - 'projection': 'full', - 'predefinedAcl': PREDEFINED_JSON, - }, - 'data': {'acl': []}, - }) + self.assertEqual(kw[0]['method'], 'PATCH') + self.assertEqual(kw[0]['path'], '/testing') + self.assertEqual(kw[0]['data'], {'acl': []}) + self.assertEqual(kw[0]['query_params'], + {'projection': 'full', + 'predefinedAcl': PREDEFINED_JSON}) def test_save_predefined_valid_w_alternate_query_param(self): # Cover case where subclass overrides _PREDEFINED_QUERY_PARAM @@ -727,15 +692,11 @@ def test_save_predefined_valid_w_alternate_query_param(self): self.assertEqual(len(entries), 0) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0], { - 'method': 'PATCH', - 'path': '/testing', - 'query_params': { - 'projection': 'full', - 'alternate': PREDEFINED, - }, - 'data': {'acl': []}, - }) + self.assertEqual(kw[0]['method'], 'PATCH') + self.assertEqual(kw[0]['path'], '/testing') + self.assertEqual(kw[0]['data'], {'acl': []}) + self.assertEqual(kw[0]['query_params'], + {'projection': 'full', 'alternate': PREDEFINED}) def test_clear(self): ROLE1 = 'role1' @@ -751,12 +712,10 @@ def test_clear(self): self.assertEqual(list(acl), [STICKY]) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0], { - 'method': 'PATCH', - 'path': '/testing', - 'query_params': {'projection': 'full'}, - 'data': {'acl': []}, - }) + self.assertEqual(kw[0]['method'], 'PATCH') + self.assertEqual(kw[0]['path'], '/testing') + self.assertEqual(kw[0]['data'], {'acl': []}) + self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) class Test_BucketACL(unittest.TestCase): @@ -780,15 +739,6 @@ def test_ctor(self): self.assertEqual(acl.reload_path, '/b/%s/acl' % NAME) self.assertEqual(acl.save_path, '/b/%s' % NAME) - def test_user_project(self): - NAME = 'name' - USER_PROJECT = 'user-project-123' - bucket = _Bucket(NAME) - acl = self._make_one(bucket) - self.assertIsNone(acl.user_project) - bucket.user_project = USER_PROJECT - self.assertEqual(acl.user_project, USER_PROJECT) - class Test_DefaultObjectACL(unittest.TestCase): @@ -835,22 +785,9 @@ def test_ctor(self): self.assertEqual(acl.reload_path, '/b/%s/o/%s/acl' % (NAME, BLOB_NAME)) self.assertEqual(acl.save_path, '/b/%s/o/%s' % (NAME, BLOB_NAME)) - def test_user_project(self): - NAME = 'name' - BLOB_NAME = 'blob-name' - USER_PROJECT = 'user-project-123' - bucket = _Bucket(NAME) - blob = _Blob(bucket, BLOB_NAME) - acl = self._make_one(blob) - self.assertIsNone(acl.user_project) - blob.user_project = USER_PROJECT - self.assertEqual(acl.user_project, USER_PROJECT) - class _Blob(object): - user_project = None - def __init__(self, bucket, blob): self.bucket = bucket self.blob = blob @@ -862,8 +799,6 @@ def path(self): class _Bucket(object): - user_project = None - def __init__(self, name): self.name = name diff --git a/storage/tests/unit/test_blob.py b/storage/tests/unit/test_blob.py index 1c31e9ea1b0f..e2227adbd94a 100644 --- a/storage/tests/unit/test_blob.py +++ b/storage/tests/unit/test_blob.py @@ -141,19 +141,6 @@ def test_path_with_non_ascii(self): blob = self._make_one(blob_name, bucket=bucket) self.assertEqual(blob.path, '/b/name/o/Caf%C3%A9') - def test_client(self): - blob_name = 'BLOB' - bucket = _Bucket() - blob = self._make_one(blob_name, bucket=bucket) - self.assertIs(blob.client, bucket.client) - - def test_user_project(self): - user_project = 'user-project-123' - blob_name = 'BLOB' - bucket = _Bucket(user_project=user_project) - blob = self._make_one(blob_name, bucket=bucket) - self.assertEqual(blob.user_project, user_project) - def test_public_url(self): BLOB_NAME = 'blob-name' bucket = _Bucket() @@ -317,31 +304,16 @@ def test_exists_miss(self): bucket = _Bucket(client) blob = self._make_one(NONESUCH, bucket=bucket) self.assertFalse(blob.exists()) - self.assertEqual(len(connection._requested), 1) - self.assertEqual(connection._requested[0], { - 'method': 'GET', - 'path': '/b/name/o/{}'.format(NONESUCH), - 'query_params': {'fields': 'name'}, - '_target_object': None, - }) - def test_exists_hit_w_user_project(self): + def test_exists_hit(self): BLOB_NAME = 'blob-name' - USER_PROJECT = 'user-project-123' found_response = ({'status': http_client.OK}, b'') connection = _Connection(found_response) client = _Client(connection) - bucket = _Bucket(client, user_project=USER_PROJECT) + bucket = _Bucket(client) blob = self._make_one(BLOB_NAME, bucket=bucket) bucket._blobs[BLOB_NAME] = 1 self.assertTrue(blob.exists()) - self.assertEqual(len(connection._requested), 1) - self.assertEqual(connection._requested[0], { - 'method': 'GET', - 'path': '/b/name/o/{}'.format(BLOB_NAME), - 'query_params': {'fields': 'name', 'userProject': USER_PROJECT}, - '_target_object': None, - }) def test_delete(self): BLOB_NAME = 'blob-name' @@ -377,7 +349,7 @@ def test__get_download_url_with_media_link(self): def test__get_download_url_on_the_fly(self): blob_name = 'bzzz-fly.txt' - bucket = _Bucket(name='buhkit') + bucket = mock.Mock(path='/b/buhkit', spec=['path']) blob = self._make_one(blob_name, bucket=bucket) self.assertIsNone(blob.media_link) @@ -389,7 +361,7 @@ def test__get_download_url_on_the_fly(self): def test__get_download_url_on_the_fly_with_generation(self): blob_name = 'pretend.txt' - bucket = _Bucket(name='fictional') + bucket = mock.Mock(path='/b/fictional', spec=['path']) blob = self._make_one(blob_name, bucket=bucket) generation = 1493058489532987 # Set the media link on the blob @@ -402,20 +374,6 @@ def test__get_download_url_on_the_fly_with_generation(self): 'fictional/o/pretend.txt?alt=media&generation=1493058489532987') self.assertEqual(download_url, expected_url) - def test__get_download_url_on_the_fly_with_user_project(self): - blob_name = 'pretend.txt' - user_project = 'user-project-123' - bucket = _Bucket(name='fictional', user_project=user_project) - blob = self._make_one(blob_name, bucket=bucket) - - self.assertIsNone(blob.media_link) - download_url = blob._get_download_url() - expected_url = ( - 'https://www.googleapis.com/download/storage/v1/b/' - 'fictional/o/pretend.txt?alt=media&userProject={}'.format( - user_project)) - self.assertEqual(download_url, expected_url) - @staticmethod def _mock_requests_response(status_code, headers, content=b''): return mock.Mock( @@ -807,8 +765,8 @@ def _mock_transport(self, status_code, headers, content=b''): return fake_transport def _do_multipart_success(self, mock_get_boundary, size=None, - num_retries=None, user_project=None): - bucket = _Bucket(name='w00t', user_project=user_project) + num_retries=None): + bucket = mock.Mock(path='/b/w00t', spec=[u'path']) blob = self._make_one(u'blob-name', bucket=bucket) self.assertIsNone(blob.chunk_size) @@ -817,7 +775,7 @@ def _do_multipart_success(self, mock_get_boundary, size=None, blob._make_transport = mock.Mock(return_value=fake_transport, spec=[]) # Create some mock arguments. - client = mock.sentinel.mock + client = mock.sentinel.client data = b'data here hear hier' stream = io.BytesIO(data) content_type = u'application/xml' @@ -840,8 +798,6 @@ def _do_multipart_success(self, mock_get_boundary, size=None, 'https://www.googleapis.com/upload/storage/v1' + bucket.path + '/o?uploadType=multipart') - if user_project is not None: - upload_url += '&userProject={}'.format(user_project) payload = ( b'--==0==\r\n' + b'content-type: application/json; charset=UTF-8\r\n\r\n' + @@ -864,13 +820,6 @@ def test__do_multipart_upload_no_size(self, mock_get_boundary): def test__do_multipart_upload_with_size(self, mock_get_boundary): self._do_multipart_success(mock_get_boundary, size=10) - @mock.patch(u'google.resumable_media._upload.get_boundary', - return_value=b'==0==') - def test__do_multipart_upload_with_user_project(self, mock_get_boundary): - user_project = 'user-project-123' - self._do_multipart_success( - mock_get_boundary, user_project=user_project) - @mock.patch(u'google.resumable_media._upload.get_boundary', return_value=b'==0==') def test__do_multipart_upload_with_retry(self, mock_get_boundary): @@ -892,12 +841,11 @@ def test__do_multipart_upload_bad_size(self): 'was specified but the file-like object only had', exc_contents) self.assertEqual(stream.tell(), len(data)) - def _initiate_resumable_helper( - self, size=None, extra_headers=None, chunk_size=None, - num_retries=None, user_project=None): + def _initiate_resumable_helper(self, size=None, extra_headers=None, + chunk_size=None, num_retries=None): from google.resumable_media.requests import ResumableUpload - bucket = _Bucket(name='whammy', user_project=user_project) + bucket = mock.Mock(path='/b/whammy', spec=[u'path']) blob = self._make_one(u'blob-name', bucket=bucket) blob.metadata = {'rook': 'takes knight'} blob.chunk_size = 3 * blob._CHUNK_SIZE_MULTIPLE @@ -917,7 +865,7 @@ def _initiate_resumable_helper( blob._make_transport = mock.Mock(return_value=fake_transport, spec=[]) # Create some mock arguments and call the method under test. - client = mock.sentinel.mock + client = mock.sentinel.client data = b'hello hallo halo hi-low' stream = io.BytesIO(data) content_type = u'text/plain' @@ -931,8 +879,6 @@ def _initiate_resumable_helper( 'https://www.googleapis.com/upload/storage/v1' + bucket.path + '/o?uploadType=resumable') - if user_project is not None: - upload_url += '&userProject={}'.format(user_project) self.assertEqual(upload.upload_url, upload_url) if extra_headers is None: self.assertEqual(upload._headers, {}) @@ -985,10 +931,6 @@ def test__initiate_resumable_upload_no_size(self): def test__initiate_resumable_upload_with_size(self): self._initiate_resumable_helper(size=10000) - def test__initiate_resumable_upload_with_user_project(self): - user_project = 'user-project-123' - self._initiate_resumable_helper(user_project=user_project) - def test__initiate_resumable_upload_with_chunk_size(self): one_mb = 1048576 self._initiate_resumable_helper(chunk_size=one_mb) @@ -1012,7 +954,8 @@ def _make_resumable_transport(self, headers1, headers2, resumable_media.PERMANENT_REDIRECT, headers2) json_body = '{{"size": "{:d}"}}'.format(total_bytes) fake_response3 = self._mock_requests_response( - http_client.OK, headers3, content=json_body) + http_client.OK, headers3, + content=json_body.encode('utf-8')) responses = [fake_response1, fake_response2, fake_response3] fake_transport.request.side_effect = responses @@ -1068,7 +1011,7 @@ def _do_resumable_upload_call2(blob, content_type, data, 'PUT', resumable_url, data=payload, headers=expected_headers) def _do_resumable_helper(self, use_size=False, num_retries=None): - bucket = _Bucket(name='yesterday') + bucket = mock.Mock(path='/b/yesterday', spec=[u'path']) blob = self._make_one(u'blob-name', bucket=bucket) blob.chunk_size = blob._CHUNK_SIZE_MULTIPLE self.assertIsNotNone(blob.chunk_size) @@ -1090,7 +1033,7 @@ def _do_resumable_helper(self, use_size=False, num_retries=None): blob._make_transport = mock.Mock(return_value=fake_transport, spec=[]) # Create some mock arguments and call the method under test. - client = mock.sentinel.mock + client = mock.sentinel.client stream = io.BytesIO(data) content_type = u'text/html' response = blob._do_resumable_upload( @@ -1219,7 +1162,7 @@ def test_upload_from_file_failure(self): from google.resumable_media import InvalidResponse from google.cloud import exceptions - message = u'Someone is already in this spot.' + message = b'Someone is already in this spot.' response = mock.Mock( content=message, status_code=http_client.CONFLICT, spec=[u'content', u'status_code']) @@ -1228,7 +1171,7 @@ def test_upload_from_file_failure(self): with self.assertRaises(exceptions.Conflict) as exc_info: self._upload_from_file_helper(side_effect=side_effect) - self.assertEqual(exc_info.exception.message, message) + self.assertEqual(exc_info.exception.message, message.decode('utf-8')) self.assertEqual(exc_info.exception.errors, []) def _do_upload_mock_call_helper(self, blob, client, content_type, size): @@ -1311,7 +1254,7 @@ def test_upload_from_string_w_text(self): def _create_resumable_upload_session_helper(self, origin=None, side_effect=None): - bucket = _Bucket(name='alex-trebek') + bucket = mock.Mock(path='/b/alex-trebek', spec=[u'path']) blob = self._make_one('blob-name', bucket=bucket) chunk_size = 99 * blob._CHUNK_SIZE_MULTIPLE blob.chunk_size = chunk_size @@ -1328,7 +1271,7 @@ def _create_resumable_upload_session_helper(self, origin=None, # Create some mock arguments and call the method under test. content_type = u'text/plain' size = 10000 - client = mock.sentinel.mock + client = mock.sentinel.client new_url = blob.create_resumable_upload_session( content_type=content_type, size=size, origin=origin, client=client) @@ -1365,7 +1308,7 @@ def test_create_resumable_upload_session_with_failure(self): from google.resumable_media import InvalidResponse from google.cloud import exceptions - message = u'5-oh-3 woe is me.' + message = b'5-oh-3 woe is me.' response = mock.Mock( content=message, status_code=http_client.SERVICE_UNAVAILABLE, spec=[u'content', u'status_code']) @@ -1375,7 +1318,7 @@ def test_create_resumable_upload_session_with_failure(self): self._create_resumable_upload_session_helper( side_effect=side_effect) - self.assertEqual(exc_info.exception.message, message) + self.assertEqual(exc_info.exception.message, message.decode('utf-8')) self.assertEqual(exc_info.exception.errors, []) def test_get_iam_policy(self): @@ -1422,49 +1365,8 @@ def test_get_iam_policy(self): kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0], { - 'method': 'GET', - 'path': '%s/iam' % (PATH,), - 'query_params': {}, - '_target_object': None, - }) - - def test_get_iam_policy_w_user_project(self): - from google.cloud.iam import Policy - - BLOB_NAME = 'blob-name' - USER_PROJECT = 'user-project-123' - PATH = '/b/name/o/%s' % (BLOB_NAME,) - ETAG = 'DEADBEEF' - VERSION = 17 - RETURNED = { - 'resourceId': PATH, - 'etag': ETAG, - 'version': VERSION, - 'bindings': [], - } - after = ({'status': http_client.OK}, RETURNED) - EXPECTED = {} - connection = _Connection(after) - client = _Client(connection) - bucket = _Bucket(client=client, user_project=USER_PROJECT) - blob = self._make_one(BLOB_NAME, bucket=bucket) - - policy = blob.get_iam_policy() - - self.assertIsInstance(policy, Policy) - self.assertEqual(policy.etag, RETURNED['etag']) - self.assertEqual(policy.version, RETURNED['version']) - self.assertEqual(dict(policy), EXPECTED) - - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0], { - 'method': 'GET', - 'path': '%s/iam' % (PATH,), - 'query_params': {'userProject': USER_PROJECT}, - '_target_object': None, - }) + self.assertEqual(kw[0]['method'], 'GET') + self.assertEqual(kw[0]['path'], '%s/iam' % (PATH,)) def test_set_iam_policy(self): import operator @@ -1513,7 +1415,6 @@ def test_set_iam_policy(self): self.assertEqual(len(kw), 1) self.assertEqual(kw[0]['method'], 'PUT') self.assertEqual(kw[0]['path'], '%s/iam' % (PATH,)) - self.assertEqual(kw[0]['query_params'], {}) sent = kw[0]['data'] self.assertEqual(sent['resourceId'], PATH) self.assertEqual(len(sent['bindings']), len(BINDINGS)) @@ -1525,41 +1426,6 @@ def test_set_iam_policy(self): self.assertEqual( sorted(found['members']), sorted(expected['members'])) - def test_set_iam_policy_w_user_project(self): - from google.cloud.iam import Policy - - BLOB_NAME = 'blob-name' - USER_PROJECT = 'user-project-123' - PATH = '/b/name/o/%s' % (BLOB_NAME,) - ETAG = 'DEADBEEF' - VERSION = 17 - BINDINGS = [] - RETURNED = { - 'etag': ETAG, - 'version': VERSION, - 'bindings': BINDINGS, - } - after = ({'status': http_client.OK}, RETURNED) - policy = Policy() - - connection = _Connection(after) - client = _Client(connection) - bucket = _Bucket(client=client, user_project=USER_PROJECT) - blob = self._make_one(BLOB_NAME, bucket=bucket) - - returned = blob.set_iam_policy(policy) - - self.assertEqual(returned.etag, ETAG) - self.assertEqual(returned.version, VERSION) - self.assertEqual(dict(returned), dict(policy)) - - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'PUT') - self.assertEqual(kw[0]['path'], '%s/iam' % (PATH,)) - self.assertEqual(kw[0]['query_params'], {'userProject': USER_PROJECT}) - self.assertEqual(kw[0]['data'], {'resourceId': PATH}) - def test_test_iam_permissions(self): from google.cloud.storage.iam import STORAGE_OBJECTS_LIST from google.cloud.storage.iam import STORAGE_BUCKETS_GET @@ -1590,39 +1456,6 @@ def test_test_iam_permissions(self): self.assertEqual(kw[0]['path'], '%s/iam/testPermissions' % (PATH,)) self.assertEqual(kw[0]['query_params'], {'permissions': PERMISSIONS}) - def test_test_iam_permissions_w_user_project(self): - from google.cloud.storage.iam import STORAGE_OBJECTS_LIST - from google.cloud.storage.iam import STORAGE_BUCKETS_GET - from google.cloud.storage.iam import STORAGE_BUCKETS_UPDATE - - BLOB_NAME = 'blob-name' - USER_PROJECT = 'user-project-123' - PATH = '/b/name/o/%s' % (BLOB_NAME,) - PERMISSIONS = [ - STORAGE_OBJECTS_LIST, - STORAGE_BUCKETS_GET, - STORAGE_BUCKETS_UPDATE, - ] - ALLOWED = PERMISSIONS[1:] - RETURNED = {'permissions': ALLOWED} - after = ({'status': http_client.OK}, RETURNED) - connection = _Connection(after) - client = _Client(connection) - bucket = _Bucket(client=client, user_project=USER_PROJECT) - blob = self._make_one(BLOB_NAME, bucket=bucket) - - allowed = blob.test_iam_permissions(PERMISSIONS) - - self.assertEqual(allowed, ALLOWED) - - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'GET') - self.assertEqual(kw[0]['path'], '%s/iam/testPermissions' % (PATH,)) - self.assertEqual( - kw[0]['query_params'], - {'permissions': PERMISSIONS, 'userProject': USER_PROJECT}) - def test_make_public(self): from google.cloud.storage.acl import _ACLEntity @@ -1657,18 +1490,17 @@ def test_compose_wo_content_type_set(self): with self.assertRaises(ValueError): destination.compose(sources=[source_1, source_2]) - def test_compose_minimal_w_user_project(self): + def test_compose_minimal(self): SOURCE_1 = 'source-1' SOURCE_2 = 'source-2' DESTINATION = 'destinaton' RESOURCE = { 'etag': 'DEADBEEF' } - USER_PROJECT = 'user-project-123' after = ({'status': http_client.OK}, RESOURCE) connection = _Connection(after) client = _Client(connection) - bucket = _Bucket(client=client, user_project=USER_PROJECT) + bucket = _Bucket(client=client) source_1 = self._make_one(SOURCE_1, bucket=bucket) source_2 = self._make_one(SOURCE_2, bucket=bucket) destination = self._make_one(DESTINATION, bucket=bucket) @@ -1678,23 +1510,20 @@ def test_compose_minimal_w_user_project(self): self.assertEqual(destination.etag, 'DEADBEEF') + SENT = { + 'sourceObjects': [ + {'name': source_1.name}, + {'name': source_2.name}, + ], + 'destination': { + 'contentType': 'text/plain', + }, + } kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0], { - 'method': 'POST', - 'path': '/b/name/o/%s/compose' % DESTINATION, - 'query_params': {'userProject': USER_PROJECT}, - 'data': { - 'sourceObjects': [ - {'name': source_1.name}, - {'name': source_2.name}, - ], - 'destination': { - 'contentType': 'text/plain', - }, - }, - '_target_object': destination, - }) + self.assertEqual(kw[0]['method'], 'POST') + self.assertEqual(kw[0]['path'], '/b/name/o/%s/compose' % DESTINATION) + self.assertEqual(kw[0]['data'], SENT) def test_compose_w_additional_property_changes(self): SOURCE_1 = 'source-1' @@ -1718,27 +1547,24 @@ def test_compose_w_additional_property_changes(self): self.assertEqual(destination.etag, 'DEADBEEF') + SENT = { + 'sourceObjects': [ + {'name': source_1.name}, + {'name': source_2.name}, + ], + 'destination': { + 'contentType': 'text/plain', + 'contentLanguage': 'en-US', + 'metadata': { + 'my-key': 'my-value', + } + }, + } kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0], { - 'method': 'POST', - 'path': '/b/name/o/%s/compose' % DESTINATION, - 'query_params': {}, - 'data': { - 'sourceObjects': [ - {'name': source_1.name}, - {'name': source_2.name}, - ], - 'destination': { - 'contentType': 'text/plain', - 'contentLanguage': 'en-US', - 'metadata': { - 'my-key': 'my-value', - } - }, - }, - '_target_object': destination, - }) + self.assertEqual(kw[0]['method'], 'POST') + self.assertEqual(kw[0]['path'], '/b/name/o/%s/compose' % DESTINATION) + self.assertEqual(kw[0]['data'], SENT) def test_rewrite_response_without_resource(self): SOURCE_BLOB = 'source' @@ -1810,7 +1636,7 @@ def test_rewrite_other_bucket_other_name_no_encryption_partial(self): self.assertNotIn('X-Goog-Encryption-Key', headers) self.assertNotIn('X-Goog-Encryption-Key-Sha256', headers) - def test_rewrite_same_name_no_old_key_new_key_done_w_user_project(self): + def test_rewrite_same_name_no_old_key_new_key_done(self): import base64 import hashlib @@ -1819,7 +1645,6 @@ def test_rewrite_same_name_no_old_key_new_key_done_w_user_project(self): KEY_HASH = hashlib.sha256(KEY).digest() KEY_HASH_B64 = base64.b64encode(KEY_HASH).rstrip().decode('ascii') BLOB_NAME = 'blob' - USER_PROJECT = 'user-project-123' RESPONSE = { 'totalBytesRewritten': 42, 'objectSize': 42, @@ -1829,7 +1654,7 @@ def test_rewrite_same_name_no_old_key_new_key_done_w_user_project(self): response = ({'status': http_client.OK}, RESPONSE) connection = _Connection(response) client = _Client(connection) - bucket = _Bucket(client=client, user_project=USER_PROJECT) + bucket = _Bucket(client=client) plain = self._make_one(BLOB_NAME, bucket=bucket) encrypted = self._make_one(BLOB_NAME, bucket=bucket, encryption_key=KEY) @@ -1845,7 +1670,7 @@ def test_rewrite_same_name_no_old_key_new_key_done_w_user_project(self): self.assertEqual(kw[0]['method'], 'POST') PATH = '/b/name/o/%s/rewriteTo/b/name/o/%s' % (BLOB_NAME, BLOB_NAME) self.assertEqual(kw[0]['path'], PATH) - self.assertEqual(kw[0]['query_params'], {'userProject': USER_PROJECT}) + self.assertEqual(kw[0]['query_params'], {}) SENT = {} self.assertEqual(kw[0]['data'], SENT) @@ -1948,7 +1773,7 @@ def test_update_storage_class_wo_encryption_key(self): self.assertEqual(kw[0]['method'], 'POST') PATH = '/b/name/o/%s/rewriteTo/b/name/o/%s' % (BLOB_NAME, BLOB_NAME) self.assertEqual(kw[0]['path'], PATH) - self.assertEqual(kw[0]['query_params'], {}) + self.assertNotIn('query_params', kw[0]) SENT = {'storageClass': STORAGE_CLASS} self.assertEqual(kw[0]['data'], SENT) @@ -1962,7 +1787,7 @@ def test_update_storage_class_wo_encryption_key(self): self.assertNotIn('X-Goog-Encryption-Key', headers) self.assertNotIn('X-Goog-Encryption-Key-Sha256', headers) - def test_update_storage_class_w_encryption_key_w_user_project(self): + def test_update_storage_class_w_encryption_key(self): import base64 import hashlib @@ -1973,14 +1798,13 @@ def test_update_storage_class_w_encryption_key_w_user_project(self): BLOB_KEY_HASH_B64 = base64.b64encode( BLOB_KEY_HASH).rstrip().decode('ascii') STORAGE_CLASS = u'NEARLINE' - USER_PROJECT = 'user-project-123' RESPONSE = { 'resource': {'storageClass': STORAGE_CLASS}, } response = ({'status': http_client.OK}, RESPONSE) connection = _Connection(response) client = _Client(connection) - bucket = _Bucket(client=client, user_project=USER_PROJECT) + bucket = _Bucket(client=client) blob = self._make_one( BLOB_NAME, bucket=bucket, encryption_key=BLOB_KEY) @@ -1993,7 +1817,7 @@ def test_update_storage_class_w_encryption_key_w_user_project(self): self.assertEqual(kw[0]['method'], 'POST') PATH = '/b/name/o/%s/rewriteTo/b/name/o/%s' % (BLOB_NAME, BLOB_NAME) self.assertEqual(kw[0]['path'], PATH) - self.assertEqual(kw[0]['query_params'], {'userProject': USER_PROJECT}) + self.assertNotIn('query_params', kw[0]) SENT = {'storageClass': STORAGE_CLASS} self.assertEqual(kw[0]['data'], SENT) @@ -2415,17 +2239,18 @@ def _helper(self, message, **kwargs): return exc_info def test_default(self): - message = u'Failure' + message = b'Failure' exc_info = self._helper(message) - self.assertEqual(exc_info.exception.message, message) + self.assertEqual(exc_info.exception.message, message.decode('utf-8')) self.assertEqual(exc_info.exception.errors, []) def test_with_error_info(self): - message = u'Eeek bad.' + message = b'Eeek bad.' error_info = 'http://test.invalid' exc_info = self._helper(message, error_info=error_info) - full_message = u'{} ({})'.format(message, error_info) + message_str = message.decode('utf-8') + full_message = u'{} ({})'.format(message_str, error_info) self.assertEqual(exc_info.exception.message, full_message) self.assertEqual(exc_info.exception.errors, []) @@ -2457,7 +2282,7 @@ def api_request(self, **kw): class _Bucket(object): - def __init__(self, client=None, name='name', user_project=None): + def __init__(self, client=None, name='name'): if client is None: connection = _Connection() client = _Client(connection) @@ -2467,7 +2292,6 @@ def __init__(self, client=None, name='name', user_project=None): self._deleted = [] self.name = name self.path = '/b/' + name - self.user_project = user_project def delete_blob(self, blob_name, client=None): del self._blobs[blob_name] diff --git a/storage/tests/unit/test_bucket.py b/storage/tests/unit/test_bucket.py index b6231fa2192a..0df94dc5db3d 100644 --- a/storage/tests/unit/test_bucket.py +++ b/storage/tests/unit/test_bucket.py @@ -33,21 +33,13 @@ class _SigningCredentials( class Test_Bucket(unittest.TestCase): - @staticmethod - def _get_target_class(): + def _make_one(self, client=None, name=None, properties=None): from google.cloud.storage.bucket import Bucket - return Bucket - def _make_one( - self, client=None, name=None, properties=None, user_project=None): if client is None: connection = _Connection() client = _Client(connection) - if user_project is None: - bucket = self._get_target_class()(client, name=name) - else: - bucket = self._get_target_class()( - client, name=name, user_project=user_project) + bucket = Bucket(client, name=name) bucket._properties = properties or {} return bucket @@ -61,21 +53,6 @@ def test_ctor(self): self.assertIs(bucket._acl.bucket, bucket) self.assertFalse(bucket._default_object_acl.loaded) self.assertIs(bucket._default_object_acl.bucket, bucket) - self.assertIsNone(bucket.user_project) - - def test_ctor_w_user_project(self): - NAME = 'name' - USER_PROJECT = 'user-project-123' - connection = _Connection() - client = _Client(connection) - bucket = self._make_one(client, name=NAME, user_project=USER_PROJECT) - self.assertEqual(bucket.name, NAME) - self.assertEqual(bucket._properties, {}) - self.assertEqual(bucket.user_project, USER_PROJECT) - self.assertFalse(bucket._acl.loaded) - self.assertIs(bucket._acl.bucket, bucket) - self.assertFalse(bucket._default_object_acl.loaded) - self.assertIs(bucket._default_object_acl.bucket, bucket) def test_blob(self): from google.cloud.storage.blob import Blob @@ -96,8 +73,9 @@ def test_blob(self): self.assertEqual(blob._encryption_key, KEY) def test_bucket_name_value(self): - BUCKET_NAME = 'bucket-name' - bucket = self._make_one(name=BUCKET_NAME) + bucket_name = 'testing123' + mixin = self._make_one(name=bucket_name) + self.assertEqual(mixin.name, bucket_name) bad_start_bucket_name = '/testing123' with self.assertRaises(ValueError): @@ -107,13 +85,6 @@ def test_bucket_name_value(self): with self.assertRaises(ValueError): self._make_one(name=bad_end_bucket_name) - def test_user_project(self): - BUCKET_NAME = 'name' - USER_PROJECT = 'user-project-123' - bucket = self._make_one(name=BUCKET_NAME) - bucket._user_project = USER_PROJECT - self.assertEqual(bucket.user_project, USER_PROJECT) - def test_exists_miss(self): from google.cloud.exceptions import NotFound @@ -141,9 +112,7 @@ def api_request(cls, *args, **kwargs): expected_cw = [((), expected_called_kwargs)] self.assertEqual(_FakeConnection._called_with, expected_cw) - def test_exists_hit_w_user_project(self): - USER_PROJECT = 'user-project-123' - + def test_exists_hit(self): class _FakeConnection(object): _called_with = [] @@ -155,7 +124,7 @@ def api_request(cls, *args, **kwargs): return object() BUCKET_NAME = 'bucket-name' - bucket = self._make_one(name=BUCKET_NAME, user_project=USER_PROJECT) + bucket = self._make_one(name=BUCKET_NAME) client = _Client(_FakeConnection) self.assertTrue(bucket.exists(client=client)) expected_called_kwargs = { @@ -163,29 +132,17 @@ def api_request(cls, *args, **kwargs): 'path': bucket.path, 'query_params': { 'fields': 'name', - 'userProject': USER_PROJECT, }, '_target_object': None, } expected_cw = [((), expected_called_kwargs)] self.assertEqual(_FakeConnection._called_with, expected_cw) - def test_create_w_user_project(self): - PROJECT = 'PROJECT' - BUCKET_NAME = 'bucket-name' - USER_PROJECT = 'user-project-123' - connection = _Connection() - client = _Client(connection, project=PROJECT) - bucket = self._make_one(client, BUCKET_NAME, user_project=USER_PROJECT) - - with self.assertRaises(ValueError): - bucket.create() - def test_create_hit(self): - PROJECT = 'PROJECT' BUCKET_NAME = 'bucket-name' DATA = {'name': BUCKET_NAME} connection = _Connection(DATA) + PROJECT = 'PROJECT' client = _Client(connection, project=PROJECT) bucket = self._make_one(client=client, name=BUCKET_NAME) bucket.create() @@ -219,7 +176,6 @@ def test_create_w_extra_properties(self): 'location': LOCATION, 'storageClass': STORAGE_CLASS, 'versioning': {'enabled': True}, - 'billing': {'requesterPays': True}, 'labels': LABELS, } connection = _Connection(DATA) @@ -230,7 +186,6 @@ def test_create_w_extra_properties(self): bucket.location = LOCATION bucket.storage_class = STORAGE_CLASS bucket.versioning_enabled = True - bucket.requester_pays = True bucket.labels = LABELS bucket.create() @@ -277,20 +232,41 @@ def test_get_blob_miss(self): self.assertEqual(kw['method'], 'GET') self.assertEqual(kw['path'], '/b/%s/o/%s' % (NAME, NONESUCH)) - def test_get_blob_hit_w_user_project(self): + def test_get_blob_hit(self): NAME = 'name' BLOB_NAME = 'blob-name' - USER_PROJECT = 'user-project-123' connection = _Connection({'name': BLOB_NAME}) client = _Client(connection) - bucket = self._make_one(name=NAME, user_project=USER_PROJECT) + bucket = self._make_one(name=NAME) blob = bucket.get_blob(BLOB_NAME, client=client) self.assertIs(blob.bucket, bucket) self.assertEqual(blob.name, BLOB_NAME) kw, = connection._requested self.assertEqual(kw['method'], 'GET') self.assertEqual(kw['path'], '/b/%s/o/%s' % (NAME, BLOB_NAME)) - self.assertEqual(kw['query_params'], {'userProject': USER_PROJECT}) + + def test_get_blob_hit_with_kwargs(self): + from google.cloud.storage.blob import _get_encryption_headers + + NAME = 'name' + BLOB_NAME = 'blob-name' + CHUNK_SIZE = 1024 * 1024 + KEY = b'01234567890123456789012345678901' # 32 bytes + + connection = _Connection({'name': BLOB_NAME}) + client = _Client(connection) + bucket = self._make_one(name=NAME) + blob = bucket.get_blob( + BLOB_NAME, client=client, encryption_key=KEY, chunk_size=CHUNK_SIZE + ) + self.assertIs(blob.bucket, bucket) + self.assertEqual(blob.name, BLOB_NAME) + kw, = connection._requested + self.assertEqual(kw['method'], 'GET') + self.assertEqual(kw['path'], '/b/%s/o/%s' % (NAME, BLOB_NAME)) + self.assertEqual(kw['headers'], _get_encryption_headers(KEY)) + self.assertEqual(blob.chunk_size, CHUNK_SIZE) + self.assertEqual(blob._encryption_key, KEY) def test_list_blobs_defaults(self): NAME = 'name' @@ -305,9 +281,8 @@ def test_list_blobs_defaults(self): self.assertEqual(kw['path'], '/b/%s/o' % NAME) self.assertEqual(kw['query_params'], {'projection': 'noAcl'}) - def test_list_blobs_w_all_arguments_and_user_project(self): + def test_list_blobs_w_all_arguments(self): NAME = 'name' - USER_PROJECT = 'user-project-123' MAX_RESULTS = 10 PAGE_TOKEN = 'ABCD' PREFIX = 'subfolder' @@ -323,11 +298,10 @@ def test_list_blobs_w_all_arguments_and_user_project(self): 'versions': VERSIONS, 'projection': PROJECTION, 'fields': FIELDS, - 'userProject': USER_PROJECT, } connection = _Connection({'items': []}) client = _Client(connection) - bucket = self._make_one(name=NAME, user_project=USER_PROJECT) + bucket = self._make_one(name=NAME) iterator = bucket.list_blobs( max_results=MAX_RESULTS, page_token=PAGE_TOKEN, @@ -369,27 +343,23 @@ def test_delete_miss(self): expected_cw = [{ 'method': 'DELETE', 'path': bucket.path, - 'query_params': {}, '_target_object': None, }] self.assertEqual(connection._deleted_buckets, expected_cw) - def test_delete_hit_with_user_project(self): + def test_delete_hit(self): NAME = 'name' - USER_PROJECT = 'user-project-123' GET_BLOBS_RESP = {'items': []} connection = _Connection(GET_BLOBS_RESP) connection._delete_bucket = True client = _Client(connection) - bucket = self._make_one( - client=client, name=NAME, user_project=USER_PROJECT) + bucket = self._make_one(client=client, name=NAME) result = bucket.delete(force=True) self.assertIsNone(result) expected_cw = [{ 'method': 'DELETE', 'path': bucket.path, '_target_object': None, - 'query_params': {'userProject': USER_PROJECT}, }] self.assertEqual(connection._deleted_buckets, expected_cw) @@ -414,7 +384,6 @@ def test_delete_force_delete_blobs(self): expected_cw = [{ 'method': 'DELETE', 'path': bucket.path, - 'query_params': {}, '_target_object': None, }] self.assertEqual(connection._deleted_buckets, expected_cw) @@ -433,7 +402,6 @@ def test_delete_force_miss_blobs(self): expected_cw = [{ 'method': 'DELETE', 'path': bucket.path, - 'query_params': {}, '_target_object': None, }] self.assertEqual(connection._deleted_buckets, expected_cw) @@ -470,22 +438,18 @@ def test_delete_blob_miss(self): kw, = connection._requested self.assertEqual(kw['method'], 'DELETE') self.assertEqual(kw['path'], '/b/%s/o/%s' % (NAME, NONESUCH)) - self.assertEqual(kw['query_params'], {}) - def test_delete_blob_hit_with_user_project(self): + def test_delete_blob_hit(self): NAME = 'name' BLOB_NAME = 'blob-name' - USER_PROJECT = 'user-project-123' connection = _Connection({}) client = _Client(connection) - bucket = self._make_one( - client=client, name=NAME, user_project=USER_PROJECT) + bucket = self._make_one(client=client, name=NAME) result = bucket.delete_blob(BLOB_NAME) self.assertIsNone(result) kw, = connection._requested self.assertEqual(kw['method'], 'DELETE') self.assertEqual(kw['path'], '/b/%s/o/%s' % (NAME, BLOB_NAME)) - self.assertEqual(kw['query_params'], {'userProject': USER_PROJECT}) def test_delete_blobs_empty(self): NAME = 'name' @@ -495,20 +459,17 @@ def test_delete_blobs_empty(self): bucket.delete_blobs([]) self.assertEqual(connection._requested, []) - def test_delete_blobs_hit_w_user_project(self): + def test_delete_blobs_hit(self): NAME = 'name' BLOB_NAME = 'blob-name' - USER_PROJECT = 'user-project-123' connection = _Connection({}) client = _Client(connection) - bucket = self._make_one( - client=client, name=NAME, user_project=USER_PROJECT) + bucket = self._make_one(client=client, name=NAME) bucket.delete_blobs([BLOB_NAME]) kw = connection._requested self.assertEqual(len(kw), 1) self.assertEqual(kw[0]['method'], 'DELETE') self.assertEqual(kw[0]['path'], '/b/%s/o/%s' % (NAME, BLOB_NAME)) - self.assertEqual(kw[0]['query_params'], {'userProject': USER_PROJECT}) def test_delete_blobs_miss_no_on_error(self): from google.cloud.exceptions import NotFound @@ -566,7 +527,6 @@ class _Blob(object): DEST, BLOB_NAME) self.assertEqual(kw['method'], 'POST') self.assertEqual(kw['path'], COPY_PATH) - self.assertEqual(kw['query_params'], {}) def test_copy_blobs_preserve_acl(self): from google.cloud.storage.acl import ObjectACL @@ -598,17 +558,14 @@ class _Blob(object): self.assertEqual(len(kw), 2) self.assertEqual(kw[0]['method'], 'POST') self.assertEqual(kw[0]['path'], COPY_PATH) - self.assertEqual(kw[0]['query_params'], {}) self.assertEqual(kw[1]['method'], 'PATCH') self.assertEqual(kw[1]['path'], NEW_BLOB_PATH) - self.assertEqual(kw[1]['query_params'], {'projection': 'full'}) - def test_copy_blobs_w_name_and_user_project(self): + def test_copy_blobs_w_name(self): SOURCE = 'source' DEST = 'dest' BLOB_NAME = 'blob-name' NEW_NAME = 'new_name' - USER_PROJECT = 'user-project-123' class _Blob(object): name = BLOB_NAME @@ -616,8 +573,7 @@ class _Blob(object): connection = _Connection({}) client = _Client(connection) - source = self._make_one( - client=client, name=SOURCE, user_project=USER_PROJECT) + source = self._make_one(client=client, name=SOURCE) dest = self._make_one(client=client, name=DEST) blob = _Blob() new_blob = source.copy_blob(blob, dest, NEW_NAME) @@ -628,7 +584,6 @@ class _Blob(object): DEST, NEW_NAME) self.assertEqual(kw['method'], 'POST') self.assertEqual(kw['path'], COPY_PATH) - self.assertEqual(kw['query_params'], {'userProject': USER_PROJECT}) def test_rename_blob(self): BUCKET_NAME = 'BUCKET_NAME' @@ -934,24 +889,6 @@ def test_versioning_enabled_setter(self): bucket.versioning_enabled = True self.assertTrue(bucket.versioning_enabled) - def test_requester_pays_getter_missing(self): - NAME = 'name' - bucket = self._make_one(name=NAME) - self.assertEqual(bucket.requester_pays, False) - - def test_requester_pays_getter(self): - NAME = 'name' - before = {'billing': {'requesterPays': True}} - bucket = self._make_one(name=NAME, properties=before) - self.assertEqual(bucket.requester_pays, True) - - def test_requester_pays_setter(self): - NAME = 'name' - bucket = self._make_one(name=NAME) - self.assertFalse(bucket.requester_pays) - bucket.requester_pays = True - self.assertTrue(bucket.requester_pays) - def test_configure_website_defaults(self): NAME = 'name' UNSET = {'website': {'mainPageSuffix': None, @@ -1020,40 +957,6 @@ def test_get_iam_policy(self): self.assertEqual(len(kw), 1) self.assertEqual(kw[0]['method'], 'GET') self.assertEqual(kw[0]['path'], '%s/iam' % (PATH,)) - self.assertEqual(kw[0]['query_params'], {}) - - def test_get_iam_policy_w_user_project(self): - from google.cloud.iam import Policy - - NAME = 'name' - USER_PROJECT = 'user-project-123' - PATH = '/b/%s' % (NAME,) - ETAG = 'DEADBEEF' - VERSION = 17 - RETURNED = { - 'resourceId': PATH, - 'etag': ETAG, - 'version': VERSION, - 'bindings': [], - } - EXPECTED = {} - connection = _Connection(RETURNED) - client = _Client(connection, None) - bucket = self._make_one( - client=client, name=NAME, user_project=USER_PROJECT) - - policy = bucket.get_iam_policy() - - self.assertIsInstance(policy, Policy) - self.assertEqual(policy.etag, RETURNED['etag']) - self.assertEqual(policy.version, RETURNED['version']) - self.assertEqual(dict(policy), EXPECTED) - - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'GET') - self.assertEqual(kw[0]['path'], '%s/iam' % (PATH,)) - self.assertEqual(kw[0]['query_params'], {'userProject': USER_PROJECT}) def test_set_iam_policy(self): import operator @@ -1100,66 +1003,6 @@ def test_set_iam_policy(self): self.assertEqual(len(kw), 1) self.assertEqual(kw[0]['method'], 'PUT') self.assertEqual(kw[0]['path'], '%s/iam' % (PATH,)) - self.assertEqual(kw[0]['query_params'], {}) - sent = kw[0]['data'] - self.assertEqual(sent['resourceId'], PATH) - self.assertEqual(len(sent['bindings']), len(BINDINGS)) - key = operator.itemgetter('role') - for found, expected in zip( - sorted(sent['bindings'], key=key), - sorted(BINDINGS, key=key)): - self.assertEqual(found['role'], expected['role']) - self.assertEqual( - sorted(found['members']), sorted(expected['members'])) - - def test_set_iam_policy_w_user_project(self): - import operator - from google.cloud.storage.iam import STORAGE_OWNER_ROLE - from google.cloud.storage.iam import STORAGE_EDITOR_ROLE - from google.cloud.storage.iam import STORAGE_VIEWER_ROLE - from google.cloud.iam import Policy - - NAME = 'name' - USER_PROJECT = 'user-project-123' - PATH = '/b/%s' % (NAME,) - ETAG = 'DEADBEEF' - VERSION = 17 - OWNER1 = 'user:phred@example.com' - OWNER2 = 'group:cloud-logs@google.com' - EDITOR1 = 'domain:google.com' - EDITOR2 = 'user:phred@example.com' - VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' - VIEWER2 = 'user:phred@example.com' - BINDINGS = [ - {'role': STORAGE_OWNER_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': STORAGE_EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, - {'role': STORAGE_VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, - ] - RETURNED = { - 'etag': ETAG, - 'version': VERSION, - 'bindings': BINDINGS, - } - policy = Policy() - for binding in BINDINGS: - policy[binding['role']] = binding['members'] - - connection = _Connection(RETURNED) - client = _Client(connection, None) - bucket = self._make_one( - client=client, name=NAME, user_project=USER_PROJECT) - - returned = bucket.set_iam_policy(policy) - - self.assertEqual(returned.etag, ETAG) - self.assertEqual(returned.version, VERSION) - self.assertEqual(dict(returned), dict(policy)) - - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'PUT') - self.assertEqual(kw[0]['path'], '%s/iam' % (PATH,)) - self.assertEqual(kw[0]['query_params'], {'userProject': USER_PROJECT}) sent = kw[0]['data'] self.assertEqual(sent['resourceId'], PATH) self.assertEqual(len(sent['bindings']), len(BINDINGS)) @@ -1199,38 +1042,6 @@ def test_test_iam_permissions(self): self.assertEqual(kw[0]['path'], '%s/iam/testPermissions' % (PATH,)) self.assertEqual(kw[0]['query_params'], {'permissions': PERMISSIONS}) - def test_test_iam_permissions_w_user_project(self): - from google.cloud.storage.iam import STORAGE_OBJECTS_LIST - from google.cloud.storage.iam import STORAGE_BUCKETS_GET - from google.cloud.storage.iam import STORAGE_BUCKETS_UPDATE - - NAME = 'name' - USER_PROJECT = 'user-project-123' - PATH = '/b/%s' % (NAME,) - PERMISSIONS = [ - STORAGE_OBJECTS_LIST, - STORAGE_BUCKETS_GET, - STORAGE_BUCKETS_UPDATE, - ] - ALLOWED = PERMISSIONS[1:] - RETURNED = {'permissions': ALLOWED} - connection = _Connection(RETURNED) - client = _Client(connection, None) - bucket = self._make_one( - client=client, name=NAME, user_project=USER_PROJECT) - - allowed = bucket.test_iam_permissions(PERMISSIONS) - - self.assertEqual(allowed, ALLOWED) - - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'GET') - self.assertEqual(kw[0]['path'], '%s/iam/testPermissions' % (PATH,)) - self.assertEqual( - kw[0]['query_params'], - {'permissions': PERMISSIONS, 'userProject': USER_PROJECT}) - def test_make_public_defaults(self): from google.cloud.storage.acl import _ACLEntity diff --git a/storage/tests/unit/test_client.py b/storage/tests/unit/test_client.py index 29545415a220..9696d4e5fa51 100644 --- a/storage/tests/unit/test_client.py +++ b/storage/tests/unit/test_client.py @@ -155,22 +155,22 @@ def test_get_bucket_hit(self): CREDENTIALS = _make_credentials() client = self._make_one(project=PROJECT, credentials=CREDENTIALS) - BUCKET_NAME = 'bucket-name' + BLOB_NAME = 'blob-name' URI = '/'.join([ client._connection.API_BASE_URL, 'storage', client._connection.API_VERSION, 'b', - '%s?projection=noAcl' % (BUCKET_NAME,), + '%s?projection=noAcl' % (BLOB_NAME,), ]) http = client._http_internal = _Http( {'status': '200', 'content-type': 'application/json'}, - '{{"name": "{0}"}}'.format(BUCKET_NAME).encode('utf-8'), + '{{"name": "{0}"}}'.format(BLOB_NAME).encode('utf-8'), ) - bucket = client.get_bucket(BUCKET_NAME) + bucket = client.get_bucket(BLOB_NAME) self.assertIsInstance(bucket, Bucket) - self.assertEqual(bucket.name, BUCKET_NAME) + self.assertEqual(bucket.name, BLOB_NAME) self.assertEqual(http._called_with['method'], 'GET') self.assertEqual(http._called_with['uri'], URI) @@ -203,34 +203,33 @@ def test_lookup_bucket_hit(self): CREDENTIALS = _make_credentials() client = self._make_one(project=PROJECT, credentials=CREDENTIALS) - BUCKET_NAME = 'bucket-name' + BLOB_NAME = 'blob-name' URI = '/'.join([ client._connection.API_BASE_URL, 'storage', client._connection.API_VERSION, 'b', - '%s?projection=noAcl' % (BUCKET_NAME,), + '%s?projection=noAcl' % (BLOB_NAME,), ]) http = client._http_internal = _Http( {'status': '200', 'content-type': 'application/json'}, - '{{"name": "{0}"}}'.format(BUCKET_NAME).encode('utf-8'), + '{{"name": "{0}"}}'.format(BLOB_NAME).encode('utf-8'), ) - bucket = client.lookup_bucket(BUCKET_NAME) + bucket = client.lookup_bucket(BLOB_NAME) self.assertIsInstance(bucket, Bucket) - self.assertEqual(bucket.name, BUCKET_NAME) + self.assertEqual(bucket.name, BLOB_NAME) self.assertEqual(http._called_with['method'], 'GET') self.assertEqual(http._called_with['uri'], URI) def test_create_bucket_conflict(self): - import json from google.cloud.exceptions import Conflict PROJECT = 'PROJECT' CREDENTIALS = _make_credentials() client = self._make_one(project=PROJECT, credentials=CREDENTIALS) - BUCKET_NAME = 'bucket-name' + BLOB_NAME = 'blob-name' URI = '/'.join([ client._connection.API_BASE_URL, 'storage', @@ -242,21 +241,18 @@ def test_create_bucket_conflict(self): '{"error": {"message": "Conflict"}}', ) - self.assertRaises(Conflict, client.create_bucket, BUCKET_NAME) + self.assertRaises(Conflict, client.create_bucket, BLOB_NAME) self.assertEqual(http._called_with['method'], 'POST') self.assertEqual(http._called_with['uri'], URI) - body = json.loads(http._called_with['body']) - self.assertEqual(body, {'name': BUCKET_NAME}) def test_create_bucket_success(self): - import json from google.cloud.storage.bucket import Bucket PROJECT = 'PROJECT' CREDENTIALS = _make_credentials() client = self._make_one(project=PROJECT, credentials=CREDENTIALS) - BUCKET_NAME = 'bucket-name' + BLOB_NAME = 'blob-name' URI = '/'.join([ client._connection.API_BASE_URL, 'storage', @@ -265,17 +261,14 @@ def test_create_bucket_success(self): ]) http = client._http_internal = _Http( {'status': '200', 'content-type': 'application/json'}, - '{{"name": "{0}"}}'.format(BUCKET_NAME).encode('utf-8'), + '{{"name": "{0}"}}'.format(BLOB_NAME).encode('utf-8'), ) - bucket = client.create_bucket(BUCKET_NAME, requester_pays=True) + bucket = client.create_bucket(BLOB_NAME) self.assertIsInstance(bucket, Bucket) - self.assertEqual(bucket.name, BUCKET_NAME) + self.assertEqual(bucket.name, BLOB_NAME) self.assertEqual(http._called_with['method'], 'POST') self.assertEqual(http._called_with['uri'], URI) - body = json.loads(http._called_with['body']) - self.assertEqual( - body, {'name': BUCKET_NAME, 'billing': {'requesterPays': True}}) def test_list_buckets_empty(self): from six.moves.urllib.parse import parse_qs @@ -407,7 +400,7 @@ def test_page_non_empty_response(self): credentials = _make_credentials() client = self._make_one(project=project, credentials=credentials) - blob_name = 'bucket-name' + blob_name = 'blob-name' response = {'items': [{'name': blob_name}]} def dummy_response(): diff --git a/test_utils/setup.py b/test_utils/setup.py index b913d6e0f1dd..179a25898982 100644 --- a/test_utils/setup.py +++ b/test_utils/setup.py @@ -25,7 +25,7 @@ # consolidate. SETUP_BASE = { 'author': 'Google Cloud Platform', - 'author_email': 'jjg+google-cloud-python@google.com', + 'author_email': 'googleapis-publisher@google.com', 'scripts': [], 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', 'license': 'Apache 2.0', @@ -49,7 +49,6 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.24.0, < 0.25dev', 'google-auth >= 0.4.0', 'six', ] diff --git a/trace/google/cloud/trace/__init__.py b/trace/google/cloud/trace/__init__.py index 461d41be3b20..558d1302e596 100644 --- a/trace/google/cloud/trace/__init__.py +++ b/trace/google/cloud/trace/__init__.py @@ -13,8 +13,6 @@ # limitations under the License. from google.cloud.trace.client import Client -from google.cloud.trace.trace import Trace -from google.cloud.trace.trace_span import TraceSpan -__all__ = ['Client', 'Trace', 'TraceSpan'] +__all__ = ['Client'] diff --git a/trace/google/cloud/trace/__init__.py~HEAD b/trace/google/cloud/trace/__init__.py~HEAD new file mode 100644 index 000000000000..461d41be3b20 --- /dev/null +++ b/trace/google/cloud/trace/__init__.py~HEAD @@ -0,0 +1,20 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.cloud.trace.client import Client +from google.cloud.trace.trace import Trace +from google.cloud.trace.trace_span import TraceSpan + + +__all__ = ['Client', 'Trace', 'TraceSpan'] diff --git a/trace/google/cloud/trace/client.py b/trace/google/cloud/trace/client.py index bc6e08cbda4d..7d49c47c6f5c 100644 --- a/trace/google/cloud/trace/client.py +++ b/trace/google/cloud/trace/client.py @@ -15,7 +15,10 @@ """Client for interacting with the Stackdriver Trace API.""" from google.cloud.trace._gax import make_gax_trace_api +<<<<<<< HEAD from google.cloud.trace.trace import Trace +======= +>>>>>>> e273319dfff93228889024f6194da48616a20e21 from google.cloud.client import ClientWithProject from google.cloud._helpers import _datetime_to_pb_timestamp @@ -50,6 +53,7 @@ def trace_api(self): self._trace_api = make_gax_trace_api(self) return self._trace_api +<<<<<<< HEAD def trace(self, project_id=None, trace_id=None): """Initialize a new trace instance. @@ -68,6 +72,8 @@ def trace(self, project_id=None, trace_id=None): return Trace(client=self, project_id=project_id, trace_id=trace_id) +======= +>>>>>>> e273319dfff93228889024f6194da48616a20e21 def patch_traces(self, traces, project_id=None, options=None): """Sends new traces to Stackdriver Trace or updates existing traces. diff --git a/core/tests/unit/streaming/__init__.py b/trace/google/cloud/trace/propagation/__init__.py similarity index 94% rename from core/tests/unit/streaming/__init__.py rename to trace/google/cloud/trace/propagation/__init__.py index 58e0d9153632..7c07b241f066 100644 --- a/core/tests/unit/streaming/__init__.py +++ b/trace/google/cloud/trace/propagation/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/trace/google/cloud/trace/propagation/google_cloud_format.py b/trace/google/cloud/trace/propagation/google_cloud_format.py new file mode 100644 index 000000000000..edb70d3f1e8b --- /dev/null +++ b/trace/google/cloud/trace/propagation/google_cloud_format.py @@ -0,0 +1,92 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This file is for converting the trace header in google cloud format and +generate a SpanContext, or converting a SpanContext to a google cloud format +header. Later we will add implementation for supporting other format like +binary format and zipkin, opencensus format. +""" + +import logging +import re + +from google.cloud.trace.span_context import SpanContext + +_TRACE_CONTEXT_HEADER_FORMAT = '([0-9a-f]{32})(\/(\d+))?(;o=(\d+))?' +_TRACE_ID_DELIMETER = '/' +_SPAN_ID_DELIMETER = ';' + + +def from_header(header): + """Generate a SpanContext object using the trace context header. + The value of enabled parsed from header is int. Need to convert to bool. + + :type header: str + :param header: Trace context header which was extracted from the HTTP + request headers. + + :rtype: :class:`~google.cloud.trace.span_context.SpanContext` + :returns: SpanContext generated from the trace context header. + """ + pattern = re.compile(_TRACE_CONTEXT_HEADER_FORMAT) + + try: + match = re.search(pattern, header) + except TypeError: + logging.warning( + 'Header should be str, got {}. Cannot parse the header, ' + 'generate a new context instead.'.format( + header.__class__.__name__)) + return SpanContext() + + if match: + trace_id = match.group(1) + span_id = match.group(3) + enabled = match.group(5) + + if enabled is None: + enabled = True + + trace_context = SpanContext( + trace_id=trace_id, + span_id=span_id, + enabled=bool(enabled), + from_header=True) + return trace_context + else: + logging.warning( + 'Cannot parse the header {}, generate a new context instead.' + .format(header)) + return SpanContext() + + +def to_header(span_context): + """Convert a SpanContext object to header string. + + :type span_context: + :class:`~google.cloud.trace.span_context.SpanContext` + :param span_context: SpanContext object. + + :rtype: str + :returns: A trace context header string in google cloud format. + """ + trace_id = span_context.trace_id + span_id = span_context.span_id + enabled = span_context.enabled + + header = '{}/{};o={}'.format( + trace_id, + span_id, + enabled) + return header diff --git a/trace/google/cloud/trace/span_context.py b/trace/google/cloud/trace/span_context.py new file mode 100644 index 000000000000..c964d12afcc9 --- /dev/null +++ b/trace/google/cloud/trace/span_context.py @@ -0,0 +1,146 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""SpanContext encapsulates the current context within the request's trace.""" + +from google.cloud.trace.trace import generate_trace_id + +import logging +import re + +_INVALID_TRACE_ID = '0' * 32 +_INVALID_SPAN_ID = 0 +_TRACE_HEADER_KEY = 'X_CLOUD_TRACE_CONTEXT' +_TRACE_ID_FORMAT = '[0-9a-f]{32}?' + + +class SpanContext(object): + """SpanContext includes 3 fields: traceId, spanId, and an enabled flag + which indicates whether or not the request is being traced. It contains the + current context to be propagate to the child spans. + + :type trace_id: str + :param trace_id: (Optional) Trace_id is a 32 digits uuid for the trace. + If not given, will generate one automatically. + + :type span_id: int + :param span_id: (Optional) Identifier for the span, unique within a trace. + + :type enabled: bool + :param enabled: (Optional) Indicates whether the request is traced or not. + + :type from_header: bool + :param from_header: (Optional) Indicates whether the trace context is + generated from request header. + """ + def __init__( + self, + trace_id=None, + span_id=None, + enabled=True, + from_header=False): + if trace_id is None: + trace_id = generate_trace_id() + + if enabled is None: + enabled = True + + self.trace_id = self.check_trace_id(trace_id) + self.span_id = self.check_span_id(span_id) + self.enabled = enabled + self.from_header = from_header + + def check_span_id(self, span_id): + """Check the type of span_id to ensure it is int. If it is not int, + first try to convert it to int, if failed to convert, then log a + warning message and set the span_id to None. + + :type span_id: int + :param span_id: Identifier for the span, unique within a trace. + + :rtype: int + :returns: Span_id for the current span. + """ + if span_id is None: + return None + + if span_id == 0: + logging.warning( + 'Span_id {} is invalid, cannot be zero.'.format(span_id)) + self.from_header = False + return None + + if not isinstance(span_id, int): + try: + span_id = int(span_id) + except (TypeError, ValueError): + logging.warning( + 'The type of span_id should be int, got {}.'.format( + span_id.__class__.__name__)) + self.from_header = False + span_id = None + + return span_id + + def check_trace_id(self, trace_id): + """Check the format of the trace_id to ensure it is 32-character hex + value representing a 128-bit number. Also the trace_id cannot be zero. + + :type trace_id: str + :param trace_id: + + :rtype: str + :returns: Trace_id for the current context. + """ + if trace_id is _INVALID_TRACE_ID: + logging.warning( + 'Trace_id {} is invalid (cannot be all zero), ' + 'generate a new one.'.format(trace_id)) + self.from_header = False + return generate_trace_id() + + trace_id_pattern = re.compile(_TRACE_ID_FORMAT) + + try: + match = trace_id_pattern.match(trace_id) + + if match: + return trace_id + else: + logging.warning( + 'Trace_id {} does not the match the required format,' + 'generate a new one instead.'.format(trace_id)) + self.from_header = False + return generate_trace_id() + + except TypeError: + logging.warning( + 'Trace_id should be str, got {}. Generate a new one.'.format( + trace_id.__class__.__name__)) + self.from_header = False + return generate_trace_id() + + def __str__(self): + """Returns a string form of the SpanContext. This is the format of + the Trace Context Header and should be forwarded to downstream + requests as the X-Cloud-Trace-Context header. + + :rtype: str + :returns: String form of the SpanContext. + """ + header = '{}/{};o={}'.format( + self.trace_id, + self.span_id, + self.enabled) + return header diff --git a/trace/google/cloud/trace/trace.py b/trace/google/cloud/trace/trace.py index e726c7361f2f..c7d46f245dd5 100644 --- a/trace/google/cloud/trace/trace.py +++ b/trace/google/cloud/trace/trace.py @@ -26,6 +26,8 @@ class Trace(object): a single timed event within the operation. Node that Trace is not thread-safe and must not be shared between threads. + A trace contains one or more spans, initially the spans list is empty. + See https://cloud.google.com/trace/docs/reference/v1/rpc/google.devtools. cloudtrace.v1#google.devtools.cloudtrace.v1.Trace @@ -53,6 +55,7 @@ def __init__(self, client, project_id=None, trace_id=None): trace_id = generate_trace_id() self.trace_id = trace_id + self.spans = [] def __enter__(self): self.start() diff --git a/trace/google/cloud/trace/trace_span.py b/trace/google/cloud/trace/trace_span.py index 3ead9243d5cb..8e8116e5cac0 100644 --- a/trace/google/cloud/trace/trace_span.py +++ b/trace/google/cloud/trace/trace_span.py @@ -105,11 +105,11 @@ def span(self, name='child_span'): self._child_spans.append(child_span) return child_span - def set_start_time(self): + def start(self): """Set the start time for a span.""" self.start_time = datetime.utcnow().isoformat() + 'Z' - def set_end_time(self): + def end(self): """Set the end time for a span.""" self.end_time = datetime.utcnow().isoformat() + 'Z' @@ -119,11 +119,11 @@ def __iter__(self): yield self def __enter__(self): - self.set_start_time() + self.start() return self def __exit__(self, exception_type, exception_value, traceback): - self.set_end_time() + self.end() def generate_span_id(): diff --git a/trace/google/cloud/trace/tracer/context_tracer.py b/trace/google/cloud/trace/tracer/context_tracer.py new file mode 100644 index 000000000000..3e97f99c1d17 --- /dev/null +++ b/trace/google/cloud/trace/tracer/context_tracer.py @@ -0,0 +1,93 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.cloud.trace.span_context import SpanContext +from google.cloud.trace.trace import Trace +from google.cloud.trace.trace_span import TraceSpan + + +class ContextTracer(object): + """The interface for tracing a request context. + + :type client: :class:`~google.cloud.trace.client.Client` + :param client: The client that owns this API object. + + :type span_context: :class:`~google.cloud.trace.span_context.SpanContext` + :param span_context: The current span context. + """ + _span_stack = [] + + def __init__(self, client, span_context): + self.client = client + + if span_context is None: + span_context = SpanContext() + + self.span_context = span_context + self.trace_id = span_context.trace_id + self.trace = self.trace() + + def trace(self): + """Create a trace using the context information. + + :rtype: :class:`~google.cloud.trace.trace.Trace` + :returns: The Trace object. + """ + return Trace(client=self.client, trace_id=self.trace_id) + + def start_trace(self): + """Start a trace.""" + self.trace.start() + + def end_trace(self): + """End a trace.""" + self.trace.finish() + + def span(self, name='span'): + """Create a new span with the trace using the context information. + + :type name: str + :param name: The name of the span. + + :rtype: :class:`~google.cloud.trace.trace_span.TraceSpan` + :returns: The TraceSpan object. + """ + parent_span_id = self.span_context.span_id + span = TraceSpan(name, parent_span_id=parent_span_id) + self.trace.spans.append(span) + self._span_stack.append(span) + self.span_context.span_id = span.span_id + return span + + def start_span(self, name='span'): + """Start a span.""" + span = self.span(name=name) + span.start() + + def end_span(self): + """End a span. Remove the span from the span stack, and update the + span_id in TraceContext as the current span_id which is the peek + element in the span stack. + """ + try: + cur_span = self._span_stack.pop() + except IndexError: + raise + + cur_span.end() + + if not self._span_stack: + self.span_context.span_id = None + else: + self.span_context.span_id = self._span_stack[-1] diff --git a/trace/google/cloud/trace/tracer/flask_tracer.py b/trace/google/cloud/trace/tracer/flask_tracer.py new file mode 100644 index 000000000000..1c7a4244d79a --- /dev/null +++ b/trace/google/cloud/trace/tracer/flask_tracer.py @@ -0,0 +1,60 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""The FlaskTracer maintains the trace spans for a trace in flask application. +It has a stack of TraceSpan that are currently open allowing you to know the +current context at any moment. +""" + +try: + import flask +except ImportError: # pragma: NO COVER + flask = None + +from google.cloud.trace.propagation.google_cloud_format import from_header +from google.cloud.trace.span_context import _TRACE_HEADER_KEY +from google.cloud.trace.tracer.context_tracer import ContextTracer + + +class FlaskTracer(ContextTracer): + """The flask implementation of the ContextTracer Interface. + + :type client: :class:`~google.cloud.trace.client.Client` + :param client: The client that owns this API object. + + :type span_context: :class:`~google.cloud.trace.span_context.SpanContext` + :param span_context: The current span context. + """ + def __init__(self, client, span_context=None): + if span_context is None: + header = get_flask_header() + span_context = from_header(header) + + super(FlaskTracer, self).__init__( + client=client, + span_context=span_context) + + +def get_flask_header(): + """Get trace context header from flask request headers. + + :rtype: str + :returns: Trace context header in HTTP request headers. + """ + if flask is None or not flask.request: + return None + + header = flask.request.headers.get(_TRACE_HEADER_KEY) + + return header diff --git a/trace/nox.py b/trace/nox.py index d7376a745130..08c69cb85df5 100644 --- a/trace/nox.py +++ b/trace/nox.py @@ -52,7 +52,7 @@ def lint(session): Returns a failure if flake8 finds linting errors or sufficiently serious code quality issues. """ - session.interpreter = 'python2.7' + session.interpreter = 'python3.6' session.install('flake8', *LOCAL_DEPS) session.install('.') session.run('flake8', 'google/cloud/trace') @@ -73,7 +73,7 @@ def cover(session): This outputs the coverage report aggregating coverage from the unit test runs (not system test runs), and then erases coverage data. """ - session.interpreter = 'python2.7' + session.interpreter = 'python3.6' session.install('coverage', 'pytest-cov') session.run('coverage', 'report', '--show-missing', '--fail-under=100') session.run('coverage', 'erase') diff --git a/trace/tests/__init__.py b/trace/tests/__init__.py index 0fe161d30fc3..7c07b241f066 100644 --- a/trace/tests/__init__.py +++ b/trace/tests/__init__.py @@ -10,4 +10,4 @@ # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and -# limitations under the License. \ No newline at end of file +# limitations under the License. diff --git a/trace/tests/__init__.py~HEAD b/trace/tests/__init__.py~HEAD new file mode 100644 index 000000000000..0fe161d30fc3 --- /dev/null +++ b/trace/tests/__init__.py~HEAD @@ -0,0 +1,13 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. \ No newline at end of file diff --git a/trace/tests/unit/test_client.py b/trace/tests/unit/test_client.py index 39193cbec69a..fb4889652708 100644 --- a/trace/tests/unit/test_client.py +++ b/trace/tests/unit/test_client.py @@ -62,6 +62,7 @@ def make_api(client_obj): self.assertIs(api, api_obj) self.assertEqual(clients, [client]) +<<<<<<< HEAD def test_trace_default(self): from google.cloud.trace.trace import Trace @@ -88,6 +89,8 @@ def test_trace_explicit(self): self.assertEqual(result_trace.project_id, self.project) self.assertEqual(result_trace.trace_id, trace_id) +======= +>>>>>>> e273319dfff93228889024f6194da48616a20e21 def test_patch_traces_default(self): from google.cloud.trace._gax import _TraceAPI diff --git a/trace/tests/unit/test_trace_span.py b/trace/tests/unit/test_trace_span.py index a56a2e3d8063..0c3978371e6b 100644 --- a/trace/tests/unit/test_trace_span.py +++ b/trace/tests/unit/test_trace_span.py @@ -116,20 +116,20 @@ def test_span(self): self.assertIsNone(result_child_span.start_time) self.assertIsNone(result_child_span.end_time) - def test_set_start_time(self): + def test_start(self): span_name = 'root_span' span = self._make_one(span_name) self.assertIsNone(span.start_time) - span.set_start_time() + span.start() self.assertIsNotNone(span.start_time) - def test_set_end_time(self): + def test_end(self): span_name = 'root_span' span = self._make_one(span_name) self.assertIsNone(span.end_time) - span.set_end_time() + span.end() self.assertIsNotNone(span.end_time) def test___iter__(self): diff --git a/translate/MANIFEST.in b/translate/MANIFEST.in index 9f7100c9528a..fc77f8c82ff0 100644 --- a/translate/MANIFEST.in +++ b/translate/MANIFEST.in @@ -1,4 +1,4 @@ include README.rst LICENSE recursive-include google *.json *.proto -recursive-include unit_tests * +recursive-include tests * global-exclude *.pyc __pycache__ diff --git a/translate/README.rst b/translate/README.rst index a85374ff5298..18bc34002258 100644 --- a/translate/README.rst +++ b/translate/README.rst @@ -9,7 +9,7 @@ Python Client for Google Cloud Translation - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/translate-usage.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/translate/usage.html Quick Start ----------- @@ -26,7 +26,7 @@ possible. Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html .. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication Using the API @@ -42,6 +42,6 @@ See the ``google-cloud-python`` API Translation `Documentation`_ to learn how to translate text using this library. .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-translate.svg - :target: https://pypi.python.org/pypi/google-cloud-translate + :target: https://pypi.org/project/google-cloud-translate/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-translate.svg - :target: https://pypi.python.org/pypi/google-cloud-translate + :target: https://pypi.org/project/google-cloud-translate/ diff --git a/translate/google/cloud/translate.py b/translate/google/cloud/translate.py new file mode 100644 index 000000000000..9a24ceebcd10 --- /dev/null +++ b/translate/google/cloud/translate.py @@ -0,0 +1,32 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Translation API wrapper.""" + + +from google.cloud.translate_v2 import __version__ +from google.cloud.translate_v2.client import Client + +# These constants are essentially deprecated; strings should be used instead. +# They are imported here for backwards compatibility. +from google.cloud.translate_v2.client import BASE +from google.cloud.translate_v2.client import NMT + + +__all__ = ( + '__version__', + 'BASE', + 'Client', + 'NMT', +) diff --git a/translate/google/cloud/translate/__init__.py b/translate/google/cloud/translate_v2/__init__.py similarity index 79% rename from translate/google/cloud/translate/__init__.py rename to translate/google/cloud/translate_v2/__init__.py index bf20faa86bdf..11b762101cf7 100644 --- a/translate/google/cloud/translate/__init__.py +++ b/translate/google/cloud/translate_v2/__init__.py @@ -18,9 +18,10 @@ from pkg_resources import get_distribution __version__ = get_distribution('google-cloud-translate').version -from google.cloud.translate.client import BASE -from google.cloud.translate.client import Client -from google.cloud.translate.client import NMT +from google.cloud.translate_v2.client import Client -__all__ = ['__version__', 'BASE', 'Client', 'NMT'] +__all__ = ( + '__version__', + 'Client', +) diff --git a/translate/google/cloud/translate/_http.py b/translate/google/cloud/translate_v2/_http.py similarity index 96% rename from translate/google/cloud/translate/_http.py rename to translate/google/cloud/translate_v2/_http.py index 0c404f2a4a3b..dedb17ec9e14 100644 --- a/translate/google/cloud/translate/_http.py +++ b/translate/google/cloud/translate_v2/_http.py @@ -16,7 +16,7 @@ from google.cloud import _http -from google.cloud.translate import __version__ +from google.cloud.translate_v2 import __version__ _CLIENT_INFO = _http.CLIENT_INFO_TEMPLATE.format(__version__) diff --git a/translate/google/cloud/translate/client.py b/translate/google/cloud/translate_v2/client.py similarity index 98% rename from translate/google/cloud/translate/client.py rename to translate/google/cloud/translate_v2/client.py index 9acd7d65cc47..d72993f0fffd 100644 --- a/translate/google/cloud/translate/client.py +++ b/translate/google/cloud/translate_v2/client.py @@ -20,7 +20,7 @@ from google.cloud._helpers import _to_bytes from google.cloud.client import Client as BaseClient -from google.cloud.translate._http import Connection +from google.cloud.translate_v2._http import Connection ENGLISH_ISO_639 = 'en' @@ -189,8 +189,8 @@ def translate(self, values, target_language=None, format_=None, in the query. :type model: str - :param model: (Optional) The model used to translate the text. The - only accepted values are :attr:`BASE` and :attr:`NMT`. + :param model: (Optional) The model used to translate the text, such + as ``'base'`` or ``'nmt'``. :rtype: str or list :returns: A list of dictionaries for each queried value. Each diff --git a/translate/nox.py b/translate/nox.py index 43fb3612de2a..a7baf305f3ec 100644 --- a/translate/nox.py +++ b/translate/nox.py @@ -30,6 +30,9 @@ def unit_tests(session, python_version): # Run unit tests against all supported versions of Python. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'unit-' + python_version + # Install all test dependencies, then install this package in-place. session.install('mock', 'pytest', 'pytest-cov', *LOCAL_DEPS) session.install('-e', '.') @@ -49,11 +52,14 @@ def system_tests(session, python_version): # Sanity check: Only run system tests if the environment variable is set. if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): - return + session.skip('Credentials must be set via environment variable.') # Run the system tests against latest Python 2 and Python 3 only. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'sys-' + python_version + # Install all test dependencies, then install this package into the # virutalenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) @@ -66,21 +72,32 @@ def system_tests(session, python_version): @nox.session def lint(session): - """Run flake8. + """Run linters. - Returns a failure if flake8 finds linting errors or sufficiently + Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ session.interpreter = 'python3.6' - session.install('flake8', *LOCAL_DEPS) + session.install('flake8', 'pylint', 'gcp-devrel-py-tools', *LOCAL_DEPS) session.install('.') session.run('flake8', 'google/cloud/translate') + session.run( + 'gcp-devrel-py-tools', 'run-pylint', + '--config', 'pylint.config.py', + '--library-filesets', 'google', + '--test-filesets', 'tests', + # Temporarily allow this to fail. + success_codes=range(0, 100)) @nox.session def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.interpreter = 'python3.6' + + # Set the virtualenv dirname. + session.virtualenv_dirname = 'setup' + session.install('docutils', 'Pygments') session.run( 'python', 'setup.py', 'check', '--restructuredtext', '--strict') diff --git a/translate/pylint.config.py b/translate/pylint.config.py new file mode 100644 index 000000000000..b618319b8b61 --- /dev/null +++ b/translate/pylint.config.py @@ -0,0 +1,25 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module is used to configure gcp-devrel-py-tools run-pylint.""" + +# Library configuration + +# library_additions = {} +# library_replacements = {} + +# Test configuration + +# test_additions = copy.deepcopy(library_additions) +# test_replacements = copy.deepcopy(library_replacements) diff --git a/translate/setup.py b/translate/setup.py index 38bf65477c35..12934c6b4e96 100644 --- a/translate/setup.py +++ b/translate/setup.py @@ -27,7 +27,7 @@ # consolidate. SETUP_BASE = { 'author': 'Google Cloud Platform', - 'author_email': 'jjg+google-cloud-python@google.com', + 'author_email': 'googleapis-publisher@google.com', 'scripts': [], 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', 'license': 'Apache 2.0', @@ -35,7 +35,7 @@ 'include_package_data': True, 'zip_safe': False, 'classifiers': [ - 'Development Status :: 4 - Beta', + 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', @@ -51,12 +51,12 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.24.0, < 0.25dev', + 'google-cloud-core >= 0.25.0, < 0.26dev', ] setup( name='google-cloud-translate', - version='0.24.0', + version='1.0.0', description='Python Client for Google Cloud Translation API', long_description=README, namespace_packages=[ diff --git a/translate/tests/system.py b/translate/tests/system.py index e4b971e238f0..7403ed3c0510 100644 --- a/translate/tests/system.py +++ b/translate/tests/system.py @@ -56,7 +56,7 @@ def test_translate(self): values = ['hvala ti', 'dankon', 'Me llamo Jeff', 'My name is Jeff'] translations = Config.CLIENT.translate( - values, target_language='de', model=translate.NMT) + values, target_language='de', model='nmt') self.assertEqual(len(values), len(translations)) self.assertEqual( diff --git a/translate/tests/unit/test__http.py b/translate/tests/unit/test__http.py index 1d7f7b4c6c18..2dc6b015d6de 100644 --- a/translate/tests/unit/test__http.py +++ b/translate/tests/unit/test__http.py @@ -21,7 +21,7 @@ class TestConnection(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.translate._http import Connection + from google.cloud.translate_v2._http import Connection return Connection @@ -57,7 +57,7 @@ def test_build_api_url_w_extra_query_params(self): def test_extra_headers(self): from google.cloud import _http as base_http - from google.cloud.translate import _http as MUT + from google.cloud.translate_v2 import _http as MUT http = mock.Mock(spec=['request']) response = mock.Mock(status=200, spec=['status']) diff --git a/translate/tests/unit/test_client.py b/translate/tests/unit/test_client.py index d2c26cec96c4..18c19c436e45 100644 --- a/translate/tests/unit/test_client.py +++ b/translate/tests/unit/test_client.py @@ -19,16 +19,15 @@ class TestClient(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.translate.client import Client - + from google.cloud.translate import Client return Client def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_constructor(self): - from google.cloud.translate._http import Connection - from google.cloud.translate.client import ENGLISH_ISO_639 + from google.cloud.translate_v2._http import Connection + from google.cloud.translate_v2.client import ENGLISH_ISO_639 http = object() client = self._make_one(_http=http) @@ -38,7 +37,7 @@ def test_constructor(self): self.assertEqual(client.target_language, ENGLISH_ISO_639) def test_constructor_non_default(self): - from google.cloud.translate._http import Connection + from google.cloud.translate_v2._http import Connection http = object() target = 'es' @@ -49,7 +48,7 @@ def test_constructor_non_default(self): self.assertEqual(client.target_language, target) def test_get_languages(self): - from google.cloud.translate.client import ENGLISH_ISO_639 + from google.cloud.translate_v2.client import ENGLISH_ISO_639 client = self._make_one(_http=object()) supported = [ diff --git a/videointelligence/MANIFEST.in b/videointelligence/MANIFEST.in index 8f5e2b1a8b1b..4e71ce57bacf 100644 --- a/videointelligence/MANIFEST.in +++ b/videointelligence/MANIFEST.in @@ -1,4 +1,4 @@ include README.rst LICENSE requirements.txt recursive-include google *.json *.proto -recursive-include unit_tests * +recursive-include tests * global-exclude *.pyc __pycache__ diff --git a/videointelligence/README.rst b/videointelligence/README.rst index d3741cd88fc1..fff70efacf7b 100644 --- a/videointelligence/README.rst +++ b/videointelligence/README.rst @@ -7,10 +7,6 @@ Python Client for Google Cloud Video Intelligence |pypi| |versions| -- `Documentation`_ - -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/video-intelligence-usage.html - Quick Start ----------- @@ -26,7 +22,7 @@ possible. Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html .. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication Using the API @@ -39,3 +35,8 @@ learning models in an easy to use REST API. .. _Video Intelligence: https://cloud.google.com/videointelligence/ .. _Video Intelligence API docs: https://cloud.google.com/videointelligence/reference/rest/ + +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-videointelligence.svg + :target: https://pypi.org/project/google-cloud-videointelligence/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-videointelligence.svg + :target: https://pypi.org/project/google-cloud-videointelligence/ diff --git a/videointelligence/google/cloud/gapic/videointelligence/v1beta1/enums.py b/videointelligence/google/cloud/gapic/videointelligence/v1beta1/enums.py index 13b9bd25f0b2..b0781914a3de 100644 --- a/videointelligence/google/cloud/gapic/videointelligence/v1beta1/enums.py +++ b/videointelligence/google/cloud/gapic/videointelligence/v1beta1/enums.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/videointelligence/google/cloud/gapic/videointelligence/v1beta1/video_intelligence_service_client.py b/videointelligence/google/cloud/gapic/videointelligence/v1beta1/video_intelligence_service_client.py index 733a95c3240a..5baae515c69c 100644 --- a/videointelligence/google/cloud/gapic/videointelligence/v1beta1/video_intelligence_service_client.py +++ b/videointelligence/google/cloud/gapic/videointelligence/v1beta1/video_intelligence_service_client.py @@ -225,7 +225,6 @@ def annotate_video(self, :exc:`google.gax.errors.GaxError` if the RPC is aborted. :exc:`ValueError` if the parameters are invalid. """ - # Create the request object. request = video_intelligence_pb2.AnnotateVideoRequest( input_uri=input_uri, features=features, diff --git a/videointelligence/google/cloud/gapic/videointelligence/v1beta1/video_intelligence_service_client_config.json b/videointelligence/google/cloud/gapic/videointelligence/v1beta1/video_intelligence_service_client_config.json index 7dd61bbb7b5d..996b2ab5e30b 100644 --- a/videointelligence/google/cloud/gapic/videointelligence/v1beta1/video_intelligence_service_client_config.json +++ b/videointelligence/google/cloud/gapic/videointelligence/v1beta1/video_intelligence_service_client_config.json @@ -6,9 +6,7 @@ "DEADLINE_EXCEEDED", "UNAVAILABLE" ], - "non_idempotent": [ - "UNAVAILABLE" - ] + "non_idempotent": [] }, "retry_params": { "default": { diff --git a/videointelligence/google/cloud/proto/videointelligence/v1beta1/video_intelligence_pb2.py b/videointelligence/google/cloud/proto/videointelligence/v1beta1/video_intelligence_pb2.py index 9046090f92d7..7eb2e71d7e05 100644 --- a/videointelligence/google/cloud/proto/videointelligence/v1beta1/video_intelligence_pb2.py +++ b/videointelligence/google/cloud/proto/videointelligence/v1beta1/video_intelligence_pb2.py @@ -884,6 +884,47 @@ AnnotateVideoRequest = _reflection.GeneratedProtocolMessageType('AnnotateVideoRequest', (_message.Message,), dict( DESCRIPTOR = _ANNOTATEVIDEOREQUEST, __module__ = 'google.cloud.proto.videointelligence.v1beta1.video_intelligence_pb2' + , + __doc__ = """Video annotation request. + + + Attributes: + input_uri: + Input video location. Currently, only `Google Cloud Storage + <https://cloud.google.com/storage/>`__ URIs are supported, + which must be specified in the following format: + ``gs://bucket-id/object-id`` (other URI formats return [google + .rpc.Code.INVALID\_ARGUMENT][google.rpc.Code.INVALID\_ARGUMENT + ]). For more information, see `Request URIs + </storage/docs/reference-uris>`__. A video URI may include + wildcards in ``object-id``, and thus identify multiple videos. + Supported wildcards: '\*' to match 0 or more characters; '?' + to match 1 character. If unset, the input video should be + embedded in the request as ``input_content``. If set, + ``input_content`` should be unset. + input_content: + The video data bytes. Encoding: base64. If unset, the input + video(s) should be specified via ``input_uri``. If set, + ``input_uri`` should be unset. + features: + Requested video annotation features. + video_context: + Additional video context and/or feature-specific parameters. + output_uri: + Optional location where the output (in JSON format) should be + stored. Currently, only `Google Cloud Storage + <https://cloud.google.com/storage/>`__ URIs are supported, + which must be specified in the following format: + ``gs://bucket-id/object-id`` (other URI formats return [google + .rpc.Code.INVALID\_ARGUMENT][google.rpc.Code.INVALID\_ARGUMENT + ]). For more information, see `Request URIs + </storage/docs/reference-uris>`__. + location_id: + Optional cloud region where annotation should take place. + Supported cloud regions: ``us-east1``, ``us-west1``, ``europe- + west1``, ``asia-east1``. If no region is specified, a region + will be determined based on video file location. + """, # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.AnnotateVideoRequest) )) _sym_db.RegisterMessage(AnnotateVideoRequest) @@ -891,6 +932,36 @@ VideoContext = _reflection.GeneratedProtocolMessageType('VideoContext', (_message.Message,), dict( DESCRIPTOR = _VIDEOCONTEXT, __module__ = 'google.cloud.proto.videointelligence.v1beta1.video_intelligence_pb2' + , + __doc__ = """Video context and/or feature-specific parameters. + + + Attributes: + segments: + Video segments to annotate. The segments may overlap and are + not required to be contiguous or span the whole video. If + unspecified, each video is treated as a single segment. + label_detection_mode: + If label detection has been requested, what labels should be + detected in addition to video-level labels or segment-level + labels. If unspecified, defaults to ``SHOT_MODE``. + stationary_camera: + Whether the video has been shot from a stationary (i.e. non- + moving) camera. When set to true, might improve detection + accuracy for moving objects. + label_detection_model: + Model to use for label detection. Supported values: "latest" + and "stable" (the default). + face_detection_model: + Model to use for face detection. Supported values: "latest" + and "stable" (the default). + shot_change_detection_model: + Model to use for shot change detection. Supported values: + "latest" and "stable" (the default). + safe_search_detection_model: + Model to use for safe search detection. Supported values: + "latest" and "stable" (the default). + """, # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.VideoContext) )) _sym_db.RegisterMessage(VideoContext) @@ -898,6 +969,16 @@ VideoSegment = _reflection.GeneratedProtocolMessageType('VideoSegment', (_message.Message,), dict( DESCRIPTOR = _VIDEOSEGMENT, __module__ = 'google.cloud.proto.videointelligence.v1beta1.video_intelligence_pb2' + , + __doc__ = """Video segment. + + + Attributes: + start_time_offset: + Start offset in microseconds (inclusive). Unset means 0. + end_time_offset: + End offset in microseconds (inclusive). Unset means 0. + """, # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.VideoSegment) )) _sym_db.RegisterMessage(VideoSegment) @@ -905,6 +986,21 @@ LabelLocation = _reflection.GeneratedProtocolMessageType('LabelLocation', (_message.Message,), dict( DESCRIPTOR = _LABELLOCATION, __module__ = 'google.cloud.proto.videointelligence.v1beta1.video_intelligence_pb2' + , + __doc__ = """Label location. + + + Attributes: + segment: + Video segment. Set to [-1, -1] for video-level labels. Set to + [timestamp, timestamp] for frame-level labels. Otherwise, + corresponds to one of ``AnnotateSpec.segments`` (if specified) + or to shot boundaries (if requested). + confidence: + Confidence that the label is accurate. Range: [0, 1]. + level: + Label level. + """, # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.LabelLocation) )) _sym_db.RegisterMessage(LabelLocation) @@ -912,6 +1008,18 @@ LabelAnnotation = _reflection.GeneratedProtocolMessageType('LabelAnnotation', (_message.Message,), dict( DESCRIPTOR = _LABELANNOTATION, __module__ = 'google.cloud.proto.videointelligence.v1beta1.video_intelligence_pb2' + , + __doc__ = """Label annotation. + + + Attributes: + description: + Textual description, e.g. ``Fixed-gear bicycle``. + language_code: + Language code for ``description`` in BCP-47 format. + locations: + Where the label was detected and with what confidence. + """, # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.LabelAnnotation) )) _sym_db.RegisterMessage(LabelAnnotation) @@ -919,6 +1027,29 @@ SafeSearchAnnotation = _reflection.GeneratedProtocolMessageType('SafeSearchAnnotation', (_message.Message,), dict( DESCRIPTOR = _SAFESEARCHANNOTATION, __module__ = 'google.cloud.proto.videointelligence.v1beta1.video_intelligence_pb2' + , + __doc__ = """Safe search annotation (based on per-frame visual signals only). If no + unsafe content has been detected in a frame, no annotations are present + for that frame. If only some types of unsafe content have been detected + in a frame, the likelihood is set to ``UNKNOWN`` for all other types of + unsafe content. + + + Attributes: + adult: + Likelihood of adult content. + spoof: + Likelihood that an obvious modification was made to the + original version to make it appear funny or offensive. + medical: + Likelihood of medical content. + violent: + Likelihood of violent content. + racy: + Likelihood of racy content. + time_offset: + Video time offset in microseconds. + """, # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.SafeSearchAnnotation) )) _sym_db.RegisterMessage(SafeSearchAnnotation) @@ -926,6 +1057,20 @@ BoundingBox = _reflection.GeneratedProtocolMessageType('BoundingBox', (_message.Message,), dict( DESCRIPTOR = _BOUNDINGBOX, __module__ = 'google.cloud.proto.videointelligence.v1beta1.video_intelligence_pb2' + , + __doc__ = """Bounding box. + + + Attributes: + left: + Left X coordinate. + right: + Right X coordinate. + bottom: + Bottom Y coordinate. + top: + Top Y coordinate. + """, # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.BoundingBox) )) _sym_db.RegisterMessage(BoundingBox) @@ -933,6 +1078,16 @@ FaceLocation = _reflection.GeneratedProtocolMessageType('FaceLocation', (_message.Message,), dict( DESCRIPTOR = _FACELOCATION, __module__ = 'google.cloud.proto.videointelligence.v1beta1.video_intelligence_pb2' + , + __doc__ = """Face location. + + + Attributes: + bounding_box: + Bounding box in a frame. + time_offset: + Video time offset in microseconds. + """, # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.FaceLocation) )) _sym_db.RegisterMessage(FaceLocation) @@ -940,6 +1095,21 @@ FaceAnnotation = _reflection.GeneratedProtocolMessageType('FaceAnnotation', (_message.Message,), dict( DESCRIPTOR = _FACEANNOTATION, __module__ = 'google.cloud.proto.videointelligence.v1beta1.video_intelligence_pb2' + , + __doc__ = """Face annotation. + + + Attributes: + thumbnail: + Thumbnail of a representative face view (in JPEG format). + Encoding: base64. + segments: + All locations where a face was detected. Faces are detected + and tracked on a per-video basis (as opposed to across + multiple videos). + locations: + Face locations at one frame per second. + """, # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.FaceAnnotation) )) _sym_db.RegisterMessage(FaceAnnotation) @@ -947,6 +1117,29 @@ VideoAnnotationResults = _reflection.GeneratedProtocolMessageType('VideoAnnotationResults', (_message.Message,), dict( DESCRIPTOR = _VIDEOANNOTATIONRESULTS, __module__ = 'google.cloud.proto.videointelligence.v1beta1.video_intelligence_pb2' + , + __doc__ = """Annotation results for a single video. + + + Attributes: + input_uri: + Video file location in `Google Cloud Storage + <https://cloud.google.com/storage/>`__. + label_annotations: + Label annotations. There is exactly one element for each + unique label. + face_annotations: + Face annotations. There is exactly one element for each unique + face. + shot_annotations: + Shot annotations. Each shot is represented as a video segment. + safe_search_annotations: + Safe search annotations. + error: + If set, indicates an error. Note that for a single + ``AnnotateVideoRequest`` some videos may succeed and some may + fail. + """, # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.VideoAnnotationResults) )) _sym_db.RegisterMessage(VideoAnnotationResults) @@ -954,6 +1147,17 @@ AnnotateVideoResponse = _reflection.GeneratedProtocolMessageType('AnnotateVideoResponse', (_message.Message,), dict( DESCRIPTOR = _ANNOTATEVIDEORESPONSE, __module__ = 'google.cloud.proto.videointelligence.v1beta1.video_intelligence_pb2' + , + __doc__ = """Video annotation response. Included in the ``response`` field of the + ``Operation`` returned by the ``GetOperation`` call of the + ``google::longrunning::Operations`` service. + + + Attributes: + annotation_results: + Annotation results for all videos specified in + ``AnnotateVideoRequest``. + """, # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.AnnotateVideoResponse) )) _sym_db.RegisterMessage(AnnotateVideoResponse) @@ -961,6 +1165,22 @@ VideoAnnotationProgress = _reflection.GeneratedProtocolMessageType('VideoAnnotationProgress', (_message.Message,), dict( DESCRIPTOR = _VIDEOANNOTATIONPROGRESS, __module__ = 'google.cloud.proto.videointelligence.v1beta1.video_intelligence_pb2' + , + __doc__ = """Annotation progress for a single video. + + + Attributes: + input_uri: + Video file location in `Google Cloud Storage + <https://cloud.google.com/storage/>`__. + progress_percent: + Approximate percentage processed thus far. Guaranteed to be + 100 when fully processed. + start_time: + Time when the request was received. + update_time: + Time of the most recent update. + """, # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.VideoAnnotationProgress) )) _sym_db.RegisterMessage(VideoAnnotationProgress) @@ -968,6 +1188,17 @@ AnnotateVideoProgress = _reflection.GeneratedProtocolMessageType('AnnotateVideoProgress', (_message.Message,), dict( DESCRIPTOR = _ANNOTATEVIDEOPROGRESS, __module__ = 'google.cloud.proto.videointelligence.v1beta1.video_intelligence_pb2' + , + __doc__ = """Video annotation progress. Included in the ``metadata`` field of the + ``Operation`` returned by the ``GetOperation`` call of the + ``google::longrunning::Operations`` service. + + + Attributes: + annotation_progress: + Progress metadata for all videos specified in + ``AnnotateVideoRequest``. + """, # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.AnnotateVideoProgress) )) _sym_db.RegisterMessage(AnnotateVideoProgress) @@ -979,10 +1210,10 @@ # THESE ELEMENTS WILL BE DEPRECATED. # Please use the generated *_pb2_grpc.py files instead. import grpc - from grpc.framework.common import cardinality - from grpc.framework.interfaces.face import utilities as face_utilities from grpc.beta import implementations as beta_implementations from grpc.beta import interfaces as beta_interfaces + from grpc.framework.common import cardinality + from grpc.framework.interfaces.face import utilities as face_utilities class VideoIntelligenceServiceStub(object): diff --git a/videointelligence/google/cloud/proto/videointelligence/v1beta1/video_intelligence_pb2_grpc.py b/videointelligence/google/cloud/proto/videointelligence/v1beta1/video_intelligence_pb2_grpc.py index 4ea0e1df20f5..ca09db976c12 100644 --- a/videointelligence/google/cloud/proto/videointelligence/v1beta1/video_intelligence_pb2_grpc.py +++ b/videointelligence/google/cloud/proto/videointelligence/v1beta1/video_intelligence_pb2_grpc.py @@ -1,7 +1,5 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! import grpc -from grpc.framework.common import cardinality -from grpc.framework.interfaces.face import utilities as face_utilities import google.cloud.proto.videointelligence.v1beta1.video_intelligence_pb2 as google_dot_cloud_dot_proto_dot_videointelligence_dot_v1beta1_dot_video__intelligence__pb2 import google.longrunning.operations_pb2 as google_dot_longrunning_dot_operations__pb2 diff --git a/videointelligence/google/cloud/videointelligence_v1beta1/types.py b/videointelligence/google/cloud/videointelligence_v1beta1/types.py index 9ac3b8a6b2a5..bfc99c3ab24b 100644 --- a/videointelligence/google/cloud/videointelligence_v1beta1/types.py +++ b/videointelligence/google/cloud/videointelligence_v1beta1/types.py @@ -21,6 +21,7 @@ names = [] for name, message in get_messages(video_intelligence_pb2).items(): + message.__module__ = 'google.cloud.videointelligence_v1beta1.types' setattr(sys.modules[__name__], name, message) names.append(name) diff --git a/videointelligence/nox.py b/videointelligence/nox.py index 0f6bd713afbe..a76156e43a41 100644 --- a/videointelligence/nox.py +++ b/videointelligence/nox.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -25,6 +25,9 @@ def unit_tests(session, python_version): # Run unit tests against all supported versions of Python. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'unit-' + python_version + # Install all test dependencies, then install this package in-place. session.install('mock', 'pytest', 'pytest-cov') session.install('-e', '.') @@ -36,6 +39,10 @@ def unit_tests(session, python_version): def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.interpreter = 'python3.6' + + # Set the virtualenv dirname. + session.virtualenv_dirname = 'setup' + session.install('docutils', 'pygments') session.run( 'python', 'setup.py', 'check', '--restructuredtext', '--strict') diff --git a/videointelligence/pylint.config.py b/videointelligence/pylint.config.py new file mode 100644 index 000000000000..b618319b8b61 --- /dev/null +++ b/videointelligence/pylint.config.py @@ -0,0 +1,25 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module is used to configure gcp-devrel-py-tools run-pylint.""" + +# Library configuration + +# library_additions = {} +# library_replacements = {} + +# Test configuration + +# test_additions = copy.deepcopy(library_additions) +# test_replacements = copy.deepcopy(library_replacements) diff --git a/videointelligence/setup.py b/videointelligence/setup.py index 9325a8ffb09a..a47f897e3855 100644 --- a/videointelligence/setup.py +++ b/videointelligence/setup.py @@ -27,7 +27,7 @@ setup( author='Google Cloud Platform', - author_email='googleapis-packages@google.com', + author_email='googleapis-publisher@google.com', name='google-cloud-videointelligence', version='0.25.0', description='Python Client for Google Cloud Video Intelligence', diff --git a/vision/README.rst b/vision/README.rst index f8b5adc2a0b9..d54f36c3e8c8 100644 --- a/vision/README.rst +++ b/vision/README.rst @@ -9,7 +9,7 @@ Python Client for Google Cloud Vision - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/vision-usage.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/vision/usage.html Quick Start ----------- @@ -26,7 +26,7 @@ possible. Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html .. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication Using the API @@ -50,6 +50,6 @@ See the ``google-cloud-python`` API `Documentation`_ to learn how to analyze images using this library. .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-vision.svg - :target: https://pypi.python.org/pypi/google-cloud-vision + :target: https://pypi.org/project/google-cloud-vision/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-vision.svg - :target: https://pypi.python.org/pypi/google-cloud-vision + :target: https://pypi.org/project/google-cloud-vision/ diff --git a/vision/google/cloud/gapic/vision/v1/enums.py b/vision/google/cloud/gapic/vision/v1/enums.py index 80eea7a1729e..1951f8c57df1 100644 --- a/vision/google/cloud/gapic/vision/v1/enums.py +++ b/vision/google/cloud/gapic/vision/v1/enums.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/vision/google/cloud/vision/__init__.py b/vision/google/cloud/vision/__init__.py index 9867337983cf..f446c00ca578 100644 --- a/vision/google/cloud/vision/__init__.py +++ b/vision/google/cloud/vision/__init__.py @@ -34,7 +34,7 @@ __version__ = get_distribution('google-cloud-vision').version from google.cloud.vision.client import Client -from google.cloud.vision_v1 import * +from google.cloud.vision_v1 import * # noqa __all__ = ( diff --git a/vision/google/cloud/vision/decorators.py b/vision/google/cloud/vision/decorators.py index ae280c579e5e..3f44664de231 100644 --- a/vision/google/cloud/vision/decorators.py +++ b/vision/google/cloud/vision/decorators.py @@ -13,8 +13,6 @@ # limitations under the License. from __future__ import absolute_import -import functools -import sys def add_single_feature_methods(cls): @@ -87,6 +85,8 @@ def _create_single_feature_method(feature, enum): image (:class:`~.{module}.types.Image`): The image to analyze. options (:class:`google.gax.CallOptions`): Overrides the default settings for this call, e.g, timeout, retries, etc. + kwargs (dict): Additional properties to be set on the + :class:`~.{module}.types.AnnotateImageRequest`. Returns: :class:`~.{module}.types.AnnotateImageResponse`: The API response. @@ -96,16 +96,17 @@ def _create_single_feature_method(feature, enum): feature_value = {'type': enum.__dict__[feature]} # Define the function to be returned. - def inner(self, image, options=None): + def inner(self, image, options=None, **kwargs): """Return a single feature annotation for the given image. Intended for use with functools.partial, to create the particular single-feature methods. """ - request = { - 'image': image, - 'features': [feature_value], - } + request = dict( + image=image, + features=[feature_value], + **kwargs + ) return self.annotate_image(request, options=options) # Set the appropriate function metadata. diff --git a/vision/google/cloud/vision/helpers.py b/vision/google/cloud/vision/helpers.py index 0ce8b0311b22..d85f8fab04f8 100644 --- a/vision/google/cloud/vision/helpers.py +++ b/vision/google/cloud/vision/helpers.py @@ -13,9 +13,7 @@ # limitations under the License. from __future__ import absolute_import -import collections import io -import six from google.gax.utils import protobuf diff --git a/vision/nox.py b/vision/nox.py index 984adfe7db00..a030b7a9e5bb 100644 --- a/vision/nox.py +++ b/vision/nox.py @@ -27,6 +27,9 @@ def unit_tests(session, python_version): # Run unit tests against all supported versions of Python. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'unit-' + python_version + # Install all test dependencies, then install this package in-place. session.install('mock', 'pytest', 'pytest-cov', '../core/') session.install('-e', '.') @@ -46,11 +49,14 @@ def system_tests(session, python_version): # Sanity check: Only run system tests if the environment variable is set. if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): - return + session.skip('Credentials must be set via environment variable.') # Run unit tests against all supported versions of Python. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'sys-' + python_version + # Install all test dependencies, then install this package in-place. session.install('pytest', '../core/', '../storage/') session.install('../test_utils/') @@ -67,7 +73,7 @@ def system_tests_manual_layer(session, python_version): # Sanity check: Only run system tests if the environment variable is set. if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): - return + session.skip('Credentials must be set via environment variable.') # Run unit tests against all supported versions of Python. session.interpreter = 'python{}'.format(python_version) @@ -83,21 +89,32 @@ def system_tests_manual_layer(session, python_version): @nox.session def lint(session): - """Run flake8. + """Run linters. - Returns a failure if flake8 finds linting errors or sufficiently + Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ session.interpreter = 'python3.6' - session.install('flake8') + session.install('flake8', 'pylint', 'gcp-devrel-py-tools') session.install('.') - session.run('flake8', 'google/cloud/vision.py') + session.run('flake8', 'google/cloud/vision') + session.run( + 'gcp-devrel-py-tools', 'run-pylint', + '--config', 'pylint.config.py', + '--library-filesets', 'google', + '--test-filesets', 'tests', + # Temporarily allow this to fail. + success_codes=range(0, 100)) @nox.session def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.interpreter = 'python3.6' + + # Set the virtualenv dirname. + session.virtualenv_dirname = 'setup' + session.install('docutils', 'pygments') session.run( 'python', 'setup.py', 'check', '--restructuredtext', '--strict') diff --git a/vision/pylint.config.py b/vision/pylint.config.py new file mode 100644 index 000000000000..b618319b8b61 --- /dev/null +++ b/vision/pylint.config.py @@ -0,0 +1,25 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module is used to configure gcp-devrel-py-tools run-pylint.""" + +# Library configuration + +# library_additions = {} +# library_replacements = {} + +# Test configuration + +# test_additions = copy.deepcopy(library_additions) +# test_replacements = copy.deepcopy(library_replacements) diff --git a/vision/setup.py b/vision/setup.py index 6860e23fbaec..ad485c0e8642 100644 --- a/vision/setup.py +++ b/vision/setup.py @@ -25,8 +25,8 @@ readme = readme_file.read() REQUIREMENTS = [ - 'google-cloud-core >= 0.24.0, < 0.25dev', - 'google-gax >= 0.15.7, < 0.16dev', + 'google-cloud-core >= 0.25.0, < 0.26dev', + 'google-gax >= 0.15.13, < 0.16dev', 'googleapis-common-protos[grpc] >= 1.5.2, < 2.0dev', ] EXTRAS_REQUIRE = { @@ -35,9 +35,9 @@ setup( author='Google Cloud Platform', - author_email='googleapis-packages@google.com', + author_email='googleapis-publisher@google.com', name='google-cloud-vision', - version='0.25.0', + version='0.25.1', description='Python Client for Google Cloud Vision', long_description=readme, namespace_packages=[ diff --git a/vision/tests/unit/test_decorators.py b/vision/tests/unit/test_decorators.py index 8ef86b71ec61..f0841e8ecd21 100644 --- a/vision/tests/unit/test_decorators.py +++ b/vision/tests/unit/test_decorators.py @@ -55,15 +55,21 @@ class SingleFeatureMethodTests(unittest.TestCase): def test_runs_generic_single_image(self, ai): ai.return_value = vision.types.AnnotateImageResponse() + # Prove that other aspects of the AnnotateImageRequest, such as the + # image context, will be preserved. + SENTINEL = object() + # Make a face detection request. client = vision.ImageAnnotatorClient( credentials=mock.Mock(spec=Credentials), ) image = {'source': {'image_uri': 'gs://my-test-bucket/image.jpg'}} - response = client.face_detection(image) + response = client.face_detection(image, image_context=SENTINEL) + assert isinstance(response, vision.types.AnnotateImageResponse) # Assert that the single-image method was called as expected. ai.assert_called_once_with({ 'features': [{'type': vision.enums.Feature.Type.FACE_DETECTION}], 'image': image, + 'image_context': SENTINEL, }, options=None)