From dd2a8c12b89e0d8fa8931fdd412a358ff51409d2 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 22 Sep 2016 15:48:51 -0700 Subject: [PATCH 001/468] Moving core into designated package. Only files have been moved for now. Will be made into a proper package shortly. Done via: $ mkdir -p core/google/cloud/streaming $ cp google/__init__.py core/google/__init__.py $ git add core/google/__init__.py $ cp google/cloud/__init__.py core/google/cloud/__init__.py $ git add core/google/cloud/__init__.py $ git mv google/cloud/_helpers.py core/google/cloud/_helpers.py $ git mv google/cloud/_testing.py core/google/cloud/_testing.py $ git mv google/cloud/client.py core/google/cloud/client.py $ git mv google/cloud/connection.py core/google/cloud/connection.py $ git mv google/cloud/credentials.py core/google/cloud/credentials.py $ git mv google/cloud/environment_vars.py core/google/cloud/environment_vars.py $ git mv google/cloud/exceptions.py core/google/cloud/exceptions.py $ git mv google/cloud/iterator.py core/google/cloud/iterator.py $ git mv google/cloud/operation.py core/google/cloud/operation.py $ git mv google/cloud/streaming/__init__.py core/google/cloud/streaming/__init__.py $ git mv google/cloud/streaming/buffered_stream.py core/google/cloud/streaming/buffered_stream.py $ git mv google/cloud/streaming/exceptions.py core/google/cloud/streaming/exceptions.py $ git mv google/cloud/streaming/http_wrapper.py core/google/cloud/streaming/http_wrapper.py $ git mv google/cloud/streaming/stream_slice.py core/google/cloud/streaming/stream_slice.py $ git mv google/cloud/streaming/transfer.py core/google/cloud/streaming/transfer.py $ git mv google/cloud/streaming/util.py core/google/cloud/streaming/util.py --- packages/google-cloud-core/google/__init__.py | 20 + .../google/cloud/__init__.py | 20 + .../google/cloud/_helpers.py | 690 ++++++++++ .../google/cloud/_testing.py | 85 ++ .../google-cloud-core/google/cloud/client.py | 186 +++ .../google/cloud/connection.py | 361 +++++ .../google/cloud/credentials.py | 251 ++++ .../google/cloud/environment_vars.py | 44 + .../google/cloud/exceptions.py | 244 ++++ .../google/cloud/iterator.py | 184 +++ .../google/cloud/operation.py | 137 ++ .../google/cloud/streaming/__init__.py | 17 + .../google/cloud/streaming/buffered_stream.py | 104 ++ .../google/cloud/streaming/exceptions.py | 122 ++ .../google/cloud/streaming/http_wrapper.py | 395 ++++++ .../google/cloud/streaming/stream_slice.py | 85 ++ .../google/cloud/streaming/transfer.py | 1214 +++++++++++++++++ .../google/cloud/streaming/util.py | 74 + 18 files changed, 4233 insertions(+) create mode 100644 packages/google-cloud-core/google/__init__.py create mode 100644 packages/google-cloud-core/google/cloud/__init__.py create mode 100644 packages/google-cloud-core/google/cloud/_helpers.py create mode 100644 packages/google-cloud-core/google/cloud/_testing.py create mode 100644 packages/google-cloud-core/google/cloud/client.py create mode 100644 packages/google-cloud-core/google/cloud/connection.py create mode 100644 packages/google-cloud-core/google/cloud/credentials.py create mode 100644 packages/google-cloud-core/google/cloud/environment_vars.py create mode 100644 packages/google-cloud-core/google/cloud/exceptions.py create mode 100644 packages/google-cloud-core/google/cloud/iterator.py create mode 100644 packages/google-cloud-core/google/cloud/operation.py create mode 100644 packages/google-cloud-core/google/cloud/streaming/__init__.py create mode 100644 packages/google-cloud-core/google/cloud/streaming/buffered_stream.py create mode 100644 packages/google-cloud-core/google/cloud/streaming/exceptions.py create mode 100644 packages/google-cloud-core/google/cloud/streaming/http_wrapper.py create mode 100644 packages/google-cloud-core/google/cloud/streaming/stream_slice.py create mode 100644 packages/google-cloud-core/google/cloud/streaming/transfer.py create mode 100644 packages/google-cloud-core/google/cloud/streaming/util.py diff --git a/packages/google-cloud-core/google/__init__.py b/packages/google-cloud-core/google/__init__.py new file mode 100644 index 000000000000..b2b833373882 --- /dev/null +++ b/packages/google-cloud-core/google/__init__.py @@ -0,0 +1,20 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +try: + import pkg_resources + pkg_resources.declare_namespace(__name__) +except ImportError: + import pkgutil + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/packages/google-cloud-core/google/cloud/__init__.py b/packages/google-cloud-core/google/cloud/__init__.py new file mode 100644 index 000000000000..8ac7b74af136 --- /dev/null +++ b/packages/google-cloud-core/google/cloud/__init__.py @@ -0,0 +1,20 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +try: + import pkg_resources + pkg_resources.declare_namespace(__name__) +except ImportError: + import pkgutil + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/packages/google-cloud-core/google/cloud/_helpers.py b/packages/google-cloud-core/google/cloud/_helpers.py new file mode 100644 index 000000000000..6e2cdc31ec96 --- /dev/null +++ b/packages/google-cloud-core/google/cloud/_helpers.py @@ -0,0 +1,690 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Shared helpers for Google Cloud packages. + +This module is not part of the public API surface. +""" + +import calendar +import datetime +import json +import os +import re +import socket +from threading import local as Local + +from google.protobuf import timestamp_pb2 +try: + from google.appengine.api import app_identity +except ImportError: + app_identity = None +try: + import grpc +except ImportError: # pragma: NO COVER + grpc = None +import six +from six.moves import http_client +from six.moves import configparser + +# pylint: disable=ungrouped-imports +from google.cloud.environment_vars import PROJECT +from google.cloud.environment_vars import CREDENTIALS +# pylint: enable=ungrouped-imports + + +_NOW = datetime.datetime.utcnow # To be replaced by tests. +_RFC3339_MICROS = '%Y-%m-%dT%H:%M:%S.%fZ' +_RFC3339_NO_FRACTION = '%Y-%m-%dT%H:%M:%S' +# datetime.strptime cannot handle nanosecond precision: parse w/ regex +_RFC3339_NANOS = re.compile(r""" + (?P + \d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2} # YYYY-MM-DDTHH:MM:SS + ) + \. # decimal point + (?P\d{1,9}) # nanoseconds, maybe truncated + Z # Zulu +""", re.VERBOSE) +# NOTE: Catching this ImportError is a workaround for GAE not supporting the +# "pwd" module which is imported lazily when "expanduser" is called. +try: + _USER_ROOT = os.path.expanduser('~') +except ImportError: # pragma: NO COVER + _USER_ROOT = None +_GCLOUD_CONFIG_FILE = os.path.join( + 'gcloud', 'configurations', 'config_default') +_GCLOUD_CONFIG_SECTION = 'core' +_GCLOUD_CONFIG_KEY = 'project' + + +class _LocalStack(Local): + """Manage a thread-local LIFO stack of resources. + + Intended for use in :class:`google.cloud.datastore.batch.Batch.__enter__`, + :class:`google.cloud.storage.batch.Batch.__enter__`, etc. + """ + def __init__(self): + super(_LocalStack, self).__init__() + self._stack = [] + + def __iter__(self): + """Iterate the stack in LIFO order. + """ + return iter(reversed(self._stack)) + + def push(self, resource): + """Push a resource onto our stack. + """ + self._stack.append(resource) + + def pop(self): + """Pop a resource from our stack. + + :rtype: object + :returns: the top-most resource, after removing it. + :raises IndexError: if the stack is empty. + """ + return self._stack.pop() + + @property + def top(self): + """Get the top-most resource + + :rtype: object + :returns: the top-most item, or None if the stack is empty. + """ + if len(self._stack) > 0: + return self._stack[-1] + + +class _UTC(datetime.tzinfo): + """Basic UTC implementation. + + Implementing a small surface area to avoid depending on ``pytz``. + """ + + _dst = datetime.timedelta(0) + _tzname = 'UTC' + _utcoffset = _dst + + def dst(self, dt): # pylint: disable=unused-argument + """Daylight savings time offset.""" + return self._dst + + def fromutc(self, dt): + """Convert a timestamp from (naive) UTC to this timezone.""" + if dt.tzinfo is None: + return dt.replace(tzinfo=self) + return super(_UTC, self).fromutc(dt) + + def tzname(self, dt): # pylint: disable=unused-argument + """Get the name of this timezone.""" + return self._tzname + + def utcoffset(self, dt): # pylint: disable=unused-argument + """UTC offset of this timezone.""" + return self._utcoffset + + def __repr__(self): + return '<%s>' % (self._tzname,) + + def __str__(self): + return self._tzname + + +def _ensure_tuple_or_list(arg_name, tuple_or_list): + """Ensures an input is a tuple or list. + + This effectively reduces the iterable types allowed to a very short + whitelist: list and tuple. + + :type arg_name: str + :param arg_name: Name of argument to use in error message. + + :type tuple_or_list: sequence of str + :param tuple_or_list: Sequence to be verified. + + :rtype: list of str + :returns: The ``tuple_or_list`` passed in cast to a ``list``. + :raises TypeError: if the ``tuple_or_list`` is not a tuple or list. + """ + if not isinstance(tuple_or_list, (tuple, list)): + raise TypeError('Expected %s to be a tuple or list. ' + 'Received %r' % (arg_name, tuple_or_list)) + return list(tuple_or_list) + + +def _app_engine_id(): + """Gets the App Engine application ID if it can be inferred. + + :rtype: str or ``NoneType`` + :returns: App Engine application ID if running in App Engine, + else ``None``. + """ + if app_identity is None: + return None + + return app_identity.get_application_id() + + +def _file_project_id(): + """Gets the project ID from the credentials file if one is available. + + :rtype: str or ``NoneType`` + :returns: Project ID from JSON credentials file if value exists, + else ``None``. + """ + credentials_file_path = os.getenv(CREDENTIALS) + if credentials_file_path: + with open(credentials_file_path, 'rb') as credentials_file: + credentials_json = credentials_file.read() + credentials = json.loads(credentials_json.decode('utf-8')) + return credentials.get('project_id') + + +def _get_nix_config_path(): + """Get the ``gcloud`` CLI config path on *nix systems. + + :rtype: str + :returns: The filename on a *nix system containing the CLI + config file. + """ + return os.path.join(_USER_ROOT, '.config', _GCLOUD_CONFIG_FILE) + + +def _get_windows_config_path(): + """Get the ``gcloud`` CLI config path on Windows systems. + + :rtype: str + :returns: The filename on a Windows system containing the CLI + config file. + """ + appdata_dir = os.getenv('APPDATA', '') + return os.path.join(appdata_dir, _GCLOUD_CONFIG_FILE) + + +def _default_service_project_id(): + """Retrieves the project ID from the gcloud command line tool. + + This assumes the ``.config`` directory is stored + - in ~/.config on *nix systems + - in the %APPDATA% directory on Windows systems + + Additionally, the ${HOME} / "~" directory may not be present on Google + App Engine, so this may be conditionally ignored. + + Files that cannot be opened with configparser are silently ignored; this is + designed so that you can specify a list of potential configuration file + locations. + + :rtype: str or ``NoneType`` + :returns: Project-ID from default configuration file else ``None`` + """ + search_paths = [] + if _USER_ROOT is not None: + search_paths.append(_get_nix_config_path()) + + if os.name == 'nt': + search_paths.append(_get_windows_config_path()) + + config = configparser.RawConfigParser() + config.read(search_paths) + + if config.has_section(_GCLOUD_CONFIG_SECTION): + return config.get(_GCLOUD_CONFIG_SECTION, _GCLOUD_CONFIG_KEY) + + +def _compute_engine_id(): + """Gets the Compute Engine project ID if it can be inferred. + + Uses 169.254.169.254 for the metadata server to avoid request + latency from DNS lookup. + + See https://cloud.google.com/compute/docs/metadata#metadataserver + for information about this IP address. (This IP is also used for + Amazon EC2 instances, so the metadata flavor is crucial.) + + See https://github.com/google/oauth2client/issues/93 for context about + DNS latency. + + :rtype: str or ``NoneType`` + :returns: Compute Engine project ID if the metadata service is available, + else ``None``. + """ + host = '169.254.169.254' + uri_path = '/computeMetadata/v1/project/project-id' + headers = {'Metadata-Flavor': 'Google'} + connection = http_client.HTTPConnection(host, timeout=0.1) + + try: + connection.request('GET', uri_path, headers=headers) + response = connection.getresponse() + if response.status == 200: + return response.read() + except socket.error: # socket.timeout or socket.error(64, 'Host is down') + pass + finally: + connection.close() + + +def _get_production_project(): + """Gets the production project if it can be inferred.""" + return os.getenv(PROJECT) + + +def _determine_default_project(project=None): + """Determine default project ID explicitly or implicitly as fall-back. + + In implicit case, supports three environments. In order of precedence, the + implicit environments are: + + * GOOGLE_CLOUD_PROJECT environment variable + * GOOGLE_APPLICATION_CREDENTIALS JSON file + * Get default service project from + ``$ gcloud beta auth application-default login`` + * Google App Engine application ID + * Google Compute Engine project ID (from metadata server) + + :type project: str + :param project: Optional. The project name to use as default. + + :rtype: str or ``NoneType`` + :returns: Default project if it can be determined. + """ + if project is None: + project = _get_production_project() + + if project is None: + project = _file_project_id() + + if project is None: + project = _default_service_project_id() + + if project is None: + project = _app_engine_id() + + if project is None: + project = _compute_engine_id() + + return project + + +def _millis(when): + """Convert a zone-aware datetime to integer milliseconds. + + :type when: :class:`datetime.datetime` + :param when: the datetime to convert + + :rtype: int + :returns: milliseconds since epoch for ``when`` + """ + micros = _microseconds_from_datetime(when) + return micros // 1000 + + +def _datetime_from_microseconds(value): + """Convert timestamp to datetime, assuming UTC. + + :type value: float + :param value: The timestamp to convert + + :rtype: :class:`datetime.datetime` + :returns: The datetime object created from the value. + """ + return _EPOCH + datetime.timedelta(microseconds=value) + + +def _microseconds_from_datetime(value): + """Convert non-none datetime to microseconds. + + :type value: :class:`datetime.datetime` + :param value: The timestamp to convert. + + :rtype: int + :returns: The timestamp, in microseconds. + """ + if not value.tzinfo: + value = value.replace(tzinfo=UTC) + # Regardless of what timezone is on the value, convert it to UTC. + value = value.astimezone(UTC) + # Convert the datetime to a microsecond timestamp. + return int(calendar.timegm(value.timetuple()) * 1e6) + value.microsecond + + +def _millis_from_datetime(value): + """Convert non-none datetime to timestamp, assuming UTC. + + :type value: :class:`datetime.datetime`, or None + :param value: the timestamp + + :rtype: int, or ``NoneType`` + :returns: the timestamp, in milliseconds, or None + """ + if value is not None: + return _millis(value) + + +def _date_from_iso8601_date(value): + """Convert a ISO8601 date string to native datetime date + + :type value: str + :param value: The date string to convert + + :rtype: :class:`datetime.date` + :returns: A datetime date object created from the string + + """ + return datetime.datetime.strptime(value, '%Y-%m-%d').date() + + +def _rfc3339_to_datetime(dt_str): + """Convert a microsecond-precision timetamp to a native datetime. + + :type dt_str: str + :param dt_str: The string to convert. + + :rtype: :class:`datetime.datetime` + :returns: The datetime object created from the string. + """ + return datetime.datetime.strptime( + dt_str, _RFC3339_MICROS).replace(tzinfo=UTC) + + +def _rfc3339_nanos_to_datetime(dt_str): + """Convert a nanosecond-precision timestamp to a native datetime. + + .. note:: + + Python datetimes do not support nanosecond precision; this function + therefore truncates such values to microseconds. + + :type dt_str: str + :param dt_str: The string to convert. + + :rtype: :class:`datetime.datetime` + :returns: The datetime object created from the string. + :raises ValueError: If the timestamp does not match the RFC 3339 + regular expression. + """ + with_nanos = _RFC3339_NANOS.match(dt_str) + if with_nanos is None: + raise ValueError( + 'Timestamp: %r, does not match pattern: %r' % ( + dt_str, _RFC3339_NANOS.pattern)) + bare_seconds = datetime.datetime.strptime( + with_nanos.group('no_fraction'), _RFC3339_NO_FRACTION) + fraction = with_nanos.group('nanos') + scale = 9 - len(fraction) + nanos = int(fraction) * (10 ** scale) + micros = nanos // 1000 + return bare_seconds.replace(microsecond=micros, tzinfo=UTC) + + +def _datetime_to_rfc3339(value, ignore_zone=True): + """Convert a timestamp to a string. + + :type value: :class:`datetime.datetime` + :param value: The datetime object to be converted to a string. + + :type ignore_zone: boolean + :param ignore_zone: If True, then the timezone (if any) of the datetime + object is ignored. + + :rtype: str + :returns: The string representing the datetime stamp. + """ + if not ignore_zone and value.tzinfo is not None: + # Convert to UTC and remove the time zone info. + value = value.replace(tzinfo=None) - value.utcoffset() + + return value.strftime(_RFC3339_MICROS) + + +def _to_bytes(value, encoding='ascii'): + """Converts a string value to bytes, if necessary. + + Unfortunately, ``six.b`` is insufficient for this task since in + Python2 it does not modify ``unicode`` objects. + + :type value: str / bytes or unicode + :param value: The string/bytes value to be converted. + + :type encoding: str + :param encoding: The encoding to use to convert unicode to bytes. Defaults + to "ascii", which will not allow any characters from + ordinals larger than 127. Other useful values are + "latin-1", which which will only allows byte ordinals + (up to 255) and "utf-8", which will encode any unicode + that needs to be. + + :rtype: str / bytes + :returns: The original value converted to bytes (if unicode) or as passed + in if it started out as bytes. + :raises TypeError: if the value could not be converted to bytes. + """ + result = (value.encode(encoding) + if isinstance(value, six.text_type) else value) + if isinstance(result, six.binary_type): + return result + else: + raise TypeError('%r could not be converted to bytes' % (value,)) + + +def _bytes_to_unicode(value): + """Converts bytes to a unicode value, if necessary. + + :type value: bytes + :param value: bytes value to attempt string conversion on. + + :rtype: str + :returns: The original value converted to unicode (if bytes) or as passed + in if it started out as unicode. + + :raises ValueError: if the value could not be converted to unicode. + """ + result = (value.decode('utf-8') + if isinstance(value, six.binary_type) else value) + if isinstance(result, six.text_type): + return result + else: + raise ValueError('%r could not be converted to unicode' % (value,)) + + +def _pb_timestamp_to_datetime(timestamp_pb): + """Convert a Timestamp protobuf to a datetime object. + + :type timestamp_pb: :class:`google.protobuf.timestamp_pb2.Timestamp` + :param timestamp_pb: A Google returned timestamp protobuf. + + :rtype: :class:`datetime.datetime` + :returns: A UTC datetime object converted from a protobuf timestamp. + """ + return ( + _EPOCH + + datetime.timedelta( + seconds=timestamp_pb.seconds, + microseconds=(timestamp_pb.nanos / 1000.0), + ) + ) + + +def _pb_timestamp_to_rfc3339(timestamp_pb): + """Convert a Timestamp protobuf to an RFC 3339 string. + + :type timestamp_pb: :class:`google.protobuf.timestamp_pb2.Timestamp` + :param timestamp_pb: A Google returned timestamp protobuf. + + :rtype: string + :returns: An RFC 3339 formatted timestamp string. + """ + timestamp = _pb_timestamp_to_datetime(timestamp_pb) + return _datetime_to_rfc3339(timestamp) + + +def _datetime_to_pb_timestamp(when): + """Convert a datetime object to a Timestamp protobuf. + + :type when: :class:`datetime.datetime` + :param when: the datetime to convert + + :rtype: :class:`google.protobuf.timestamp_pb2.Timestamp` + :returns: A timestamp protobuf corresponding to the object. + """ + ms_value = _microseconds_from_datetime(when) + seconds, micros = divmod(ms_value, 10**6) + nanos = micros * 10**3 + return timestamp_pb2.Timestamp(seconds=seconds, nanos=nanos) + + +def _name_from_project_path(path, project, template): + """Validate a URI path and get the leaf object's name. + + :type path: str + :param path: URI path containing the name. + + :type project: str or NoneType + :param project: The project associated with the request. It is + included for validation purposes. If passed as None, + disables validation. + + :type template: str + :param template: Template regex describing the expected form of the path. + The regex must have two named groups, 'project' and + 'name'. + + :rtype: str + :returns: Name parsed from ``path``. + :raises ValueError: if the ``path`` is ill-formed or if the project from + the ``path`` does not agree with the ``project`` + passed in. + """ + if isinstance(template, str): + template = re.compile(template) + + match = template.match(path) + + if not match: + raise ValueError('path "%s" did not match expected pattern "%s"' % ( + path, template.pattern,)) + + if project is not None: + found_project = match.group('project') + if found_project != project: + raise ValueError( + 'Project from client (%s) should agree with ' + 'project from resource(%s).' % (project, found_project)) + + return match.group('name') + + +class MetadataPlugin(object): + """Callable class to transform metadata for gRPC requests. + + :type credentials: :class:`oauth2client.client.OAuth2Credentials` + :param credentials: The OAuth2 Credentials to use for creating + access tokens. + """ + + def __init__(self, credentials): + self._credentials = credentials + + def __call__(self, unused_context, callback): + """Adds authorization header to request metadata. + + :type unused_context: object + :param unused_context: A gRPC context which is not needed + to modify headers. + + :type callback: callable + :param callback: A callback which will use the headers. + """ + access_token = self._credentials.get_access_token().access_token + headers = [ + ('authorization', 'Bearer ' + access_token), + ] + callback(headers, None) + + +def make_secure_stub(credentials, user_agent, stub_class, host): + """Makes a secure stub for an RPC service. + + Uses / depends on gRPC. + + :type credentials: :class:`oauth2client.client.OAuth2Credentials` + :param credentials: The OAuth2 Credentials to use for creating + access tokens. + + :type user_agent: str + :param user_agent: (Optional) The user agent to be used with API requests. + + :type stub_class: type + :param stub_class: A gRPC stub type for a given service. + + :type host: str + :param host: The host for the service. + + :rtype: object, instance of ``stub_class`` + :returns: The stub object used to make gRPC requests to a given API. + """ + # ssl_channel_credentials() loads root certificates from + # `grpc/_adapter/credentials/roots.pem`. + transport_creds = grpc.ssl_channel_credentials() + custom_metadata_plugin = MetadataPlugin(credentials) + auth_creds = grpc.metadata_call_credentials( + custom_metadata_plugin, name='google_creds') + channel_creds = grpc.composite_channel_credentials( + transport_creds, auth_creds) + target = '%s:%d' % (host, http_client.HTTPS_PORT) + channel_args = ( + ('grpc.primary_user_agent', user_agent), + ) + channel = grpc.secure_channel(target, channel_creds, + options=channel_args) + return stub_class(channel) + + +def make_insecure_stub(stub_class, host, port=None): + """Makes an insecure stub for an RPC service. + + Uses / depends on gRPC. + + :type stub_class: type + :param stub_class: A gRPC stub type for a given service. + + :type host: str + :param host: The host for the service. May also include the port + if ``port`` is unspecified. + + :type port: int + :param port: (Optional) The port for the service. + + :rtype: object, instance of ``stub_class`` + :returns: The stub object used to make gRPC requests to a given API. + """ + if port is None: + target = host + else: + # NOTE: This assumes port != http_client.HTTPS_PORT: + target = '%s:%d' % (host, port) + channel = grpc.insecure_channel(target) + return stub_class(channel) + + +try: + from pytz import UTC # pylint: disable=unused-import,wrong-import-order +except ImportError: + UTC = _UTC() # Singleton instance to be used throughout. + +# Need to define _EPOCH at the end of module since it relies on UTC. +_EPOCH = datetime.datetime.utcfromtimestamp(0).replace(tzinfo=UTC) diff --git a/packages/google-cloud-core/google/cloud/_testing.py b/packages/google-cloud-core/google/cloud/_testing.py new file mode 100644 index 000000000000..6aece7ec652b --- /dev/null +++ b/packages/google-cloud-core/google/cloud/_testing.py @@ -0,0 +1,85 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Shared testing utilities.""" + + +class _Monkey(object): + # context-manager for replacing module names in the scope of a test. + + def __init__(self, module, **kw): + self.module = module + if len(kw) == 0: # pragma: NO COVER + raise ValueError('_Monkey was used with nothing to monkey-patch') + self.to_restore = {key: getattr(module, key) for key in kw} + for key, value in kw.items(): + setattr(module, key, value) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + for key, value in self.to_restore.items(): + setattr(self.module, key, value) + + +class _NamedTemporaryFile(object): + + def __init__(self, suffix=''): + import os + import tempfile + filehandle, self.name = tempfile.mkstemp(suffix=suffix) + os.close(filehandle) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + import os + os.remove(self.name) + + +class _GAXBaseAPI(object): + + _random_gax_error = False + + def __init__(self, **kw): + self.__dict__.update(kw) + + def _make_grpc_error(self, status_code): + from grpc._channel import _RPCState + from google.cloud.exceptions import GrpcRendezvous + + details = 'Some error details.' + exc_state = _RPCState((), None, None, status_code, details) + return GrpcRendezvous(exc_state, None, None, None) + + def _make_grpc_not_found(self): + from grpc import StatusCode + return self._make_grpc_error(StatusCode.NOT_FOUND) + + def _make_grpc_failed_precondition(self): + from grpc import StatusCode + return self._make_grpc_error(StatusCode.FAILED_PRECONDITION) + + +class _GAXPageIterator(object): + + def __init__(self, items, page_token): + self._items = items + self.page_token = page_token + + def next(self): + items, self._items = self._items, None + return items diff --git a/packages/google-cloud-core/google/cloud/client.py b/packages/google-cloud-core/google/cloud/client.py new file mode 100644 index 000000000000..ea079e14fd6e --- /dev/null +++ b/packages/google-cloud-core/google/cloud/client.py @@ -0,0 +1,186 @@ +# Copyright 2015 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Base classes for client used to interact with Google Cloud APIs.""" + +from oauth2client.service_account import ServiceAccountCredentials +import six + +from google.cloud._helpers import _determine_default_project +from google.cloud.connection import Connection +from google.cloud.credentials import get_credentials + + +class _ClientFactoryMixin(object): + """Mixin to allow factories that create credentials. + + .. note:: + + This class is virtual. + """ + + @classmethod + def from_service_account_json(cls, json_credentials_path, *args, **kwargs): + """Factory to retrieve JSON credentials while creating client. + + :type json_credentials_path: string + :param json_credentials_path: The path to a private key file (this file + was given to you when you created the + service account). This file must contain + a JSON object with a private key and + other credentials information (downloaded + from the Google APIs console). + + :type args: tuple + :param args: Remaining positional arguments to pass to constructor. + + :type kwargs: dict + :param kwargs: Remaining keyword arguments to pass to constructor. + + :rtype: :class:`google.cloud.pubsub.client.Client` + :returns: The client created with the retrieved JSON credentials. + :raises: :class:`TypeError` if there is a conflict with the kwargs + and the credentials created by the factory. + """ + if 'credentials' in kwargs: + raise TypeError('credentials must not be in keyword arguments') + credentials = ServiceAccountCredentials.from_json_keyfile_name( + json_credentials_path) + kwargs['credentials'] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_p12(cls, client_email, private_key_path, + *args, **kwargs): + """Factory to retrieve P12 credentials while creating client. + + .. note:: + Unless you have an explicit reason to use a PKCS12 key for your + service account, we recommend using a JSON key. + + :type client_email: string + :param client_email: The e-mail attached to the service account. + + :type private_key_path: string + :param private_key_path: The path to a private key file (this file was + given to you when you created the service + account). This file must be in P12 format. + + :type args: tuple + :param args: Remaining positional arguments to pass to constructor. + + :type kwargs: dict + :param kwargs: Remaining keyword arguments to pass to constructor. + + :rtype: :class:`google.cloud.client.Client` + :returns: The client created with the retrieved P12 credentials. + :raises: :class:`TypeError` if there is a conflict with the kwargs + and the credentials created by the factory. + """ + if 'credentials' in kwargs: + raise TypeError('credentials must not be in keyword arguments') + credentials = ServiceAccountCredentials.from_p12_keyfile( + client_email, private_key_path) + kwargs['credentials'] = credentials + return cls(*args, **kwargs) + + +class Client(_ClientFactoryMixin): + """Client to bundle configuration needed for API requests. + + Assumes that the associated ``_connection_class`` only accepts + ``http`` and ``credentials`` in its constructor. + + :type credentials: :class:`oauth2client.client.OAuth2Credentials` or + :class:`NoneType` + :param credentials: The OAuth2 Credentials to use for the connection + owned by this client. If not passed (and if no ``http`` + object is passed), falls back to the default inferred + from the environment. + + :type http: :class:`httplib2.Http` or class that defines ``request()``. + :param http: An optional HTTP object to make requests. If not passed, an + ``http`` object is created that is bound to the + ``credentials`` for the current object. + """ + + _connection_class = Connection + + def __init__(self, credentials=None, http=None): + if credentials is None and http is None: + credentials = get_credentials() + self.connection = self._connection_class( + credentials=credentials, http=http) + + +class _ClientProjectMixin(object): + """Mixin to allow setting the project on the client. + + :type project: string + :param project: the project which the client acts on behalf of. If not + passed falls back to the default inferred from the + environment. + + :raises: :class:`EnvironmentError` if the project is neither passed in nor + set in the environment. :class:`ValueError` if the project value + is invalid. + """ + + def __init__(self, project=None): + project = self._determine_default(project) + if project is None: + raise EnvironmentError('Project was not passed and could not be ' + 'determined from the environment.') + if isinstance(project, six.binary_type): + project = project.decode('utf-8') + if not isinstance(project, six.string_types): + raise ValueError('Project must be a string.') + self.project = project + + @staticmethod + def _determine_default(project): + """Helper: use default project detection.""" + return _determine_default_project(project) + + +class JSONClient(Client, _ClientProjectMixin): + """Client to for Google JSON-based API. + + Assumes such APIs use the ``project`` and the client needs to store this + value. + + :type project: string + :param project: the project which the client acts on behalf of. If not + passed falls back to the default inferred from the + environment. + + :type credentials: :class:`oauth2client.client.OAuth2Credentials` or + :class:`NoneType` + :param credentials: The OAuth2 Credentials to use for the connection + owned by this client. If not passed (and if no ``http`` + object is passed), falls back to the default inferred + from the environment. + + :type http: :class:`httplib2.Http` or class that defines ``request()``. + :param http: An optional HTTP object to make requests. If not passed, an + ``http`` object is created that is bound to the + ``credentials`` for the current object. + + :raises: :class:`ValueError` if the project is neither passed in nor + set in the environment. + """ + + def __init__(self, project=None, credentials=None, http=None): + _ClientProjectMixin.__init__(self, project=project) + Client.__init__(self, credentials=credentials, http=http) diff --git a/packages/google-cloud-core/google/cloud/connection.py b/packages/google-cloud-core/google/cloud/connection.py new file mode 100644 index 000000000000..c5ed3a627e99 --- /dev/null +++ b/packages/google-cloud-core/google/cloud/connection.py @@ -0,0 +1,361 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Shared implementation of connections to API servers.""" + +import json +from pkg_resources import get_distribution +import six +from six.moves.urllib.parse import urlencode + +import httplib2 + +from google.cloud.exceptions import make_exception + + +API_BASE_URL = 'https://www.googleapis.com' +"""The base of the API call URL.""" + +DEFAULT_USER_AGENT = 'gcloud-python/{0}'.format( + get_distribution('google-cloud').version) +"""The user agent for google-cloud-python requests.""" + + +class Connection(object): + """A generic connection to Google Cloud Platform. + + Subclasses should understand only the basic types in method arguments, + however they should be capable of returning advanced types. + + If no value is passed in for ``http``, a :class:`httplib2.Http` object + will be created and authorized with the ``credentials``. If not, the + ``credentials`` and ``http`` need not be related. + + Subclasses may seek to use the private key from ``credentials`` to sign + data. + + A custom (non-``httplib2``) HTTP object must have a ``request`` method + which accepts the following arguments: + + * ``uri`` + * ``method`` + * ``body`` + * ``headers`` + + In addition, ``redirections`` and ``connection_type`` may be used. + + Without the use of ``credentials.authorize(http)``, a custom ``http`` + object will also need to be able to add a bearer token to API + requests and handle token refresh on 401 errors. + + :type credentials: :class:`oauth2client.client.OAuth2Credentials` or + :class:`NoneType` + :param credentials: The OAuth2 Credentials to use for this connection. + + :type http: :class:`httplib2.Http` or class that defines ``request()``. + :param http: An optional HTTP object to make requests. + """ + + USER_AGENT = DEFAULT_USER_AGENT + + SCOPE = None + """The scopes required for authenticating with a service. + + Needs to be set by subclasses. + """ + + def __init__(self, credentials=None, http=None): + self._http = http + self._credentials = self._create_scoped_credentials( + credentials, self.SCOPE) + + @property + def credentials(self): + """Getter for current credentials. + + :rtype: :class:`oauth2client.client.OAuth2Credentials` or + :class:`NoneType` + :returns: The credentials object associated with this connection. + """ + return self._credentials + + @property + def http(self): + """A getter for the HTTP transport used in talking to the API. + + :rtype: :class:`httplib2.Http` + :returns: A Http object used to transport data. + """ + if self._http is None: + self._http = httplib2.Http() + if self._credentials: + self._http = self._credentials.authorize(self._http) + return self._http + + @staticmethod + def _create_scoped_credentials(credentials, scope): + """Create a scoped set of credentials if it is required. + + :type credentials: :class:`oauth2client.client.OAuth2Credentials` or + :class:`NoneType` + :param credentials: The OAuth2 Credentials to add a scope to. + + :type scope: list of URLs + :param scope: the effective service auth scopes for the connection. + + :rtype: :class:`oauth2client.client.OAuth2Credentials` or + :class:`NoneType` + :returns: A new credentials object that has a scope added (if needed). + """ + if credentials: + try: + if credentials.create_scoped_required(): + credentials = credentials.create_scoped(scope) + except AttributeError: + pass + return credentials + + +class JSONConnection(Connection): + """A connection to a Google JSON-based API. + + These APIs are discovery based. For reference: + + https://developers.google.com/discovery/ + + This defines :meth:`api_request` for making a generic JSON + API request and API requests are created elsewhere. + + The class constants + + * :attr:`API_BASE_URL` + * :attr:`API_VERSION` + * :attr:`API_URL_TEMPLATE` + + must be updated by subclasses. + """ + + API_BASE_URL = None + """The base of the API call URL.""" + + API_VERSION = None + """The version of the API, used in building the API call's URL.""" + + API_URL_TEMPLATE = None + """A template for the URL of a particular API call.""" + + @classmethod + def build_api_url(cls, path, query_params=None, + api_base_url=None, api_version=None): + """Construct an API url given a few components, some optional. + + Typically, you shouldn't need to use this method. + + :type path: string + :param path: The path to the resource (ie, ``'/b/bucket-name'``). + + :type query_params: dict or list + :param query_params: A dictionary of keys and values (or list of + key-value pairs) to insert into the query + string of the URL. + + :type api_base_url: string + :param api_base_url: The base URL for the API endpoint. + Typically you won't have to provide this. + + :type api_version: string + :param api_version: The version of the API to call. + Typically you shouldn't provide this and instead + use the default for the library. + + :rtype: string + :returns: The URL assembled from the pieces provided. + """ + url = cls.API_URL_TEMPLATE.format( + api_base_url=(api_base_url or cls.API_BASE_URL), + api_version=(api_version or cls.API_VERSION), + path=path) + + query_params = query_params or {} + if query_params: + url += '?' + urlencode(query_params) + + return url + + def _make_request(self, method, url, data=None, content_type=None, + headers=None, target_object=None): + """A low level method to send a request to the API. + + Typically, you shouldn't need to use this method. + + :type method: string + :param method: The HTTP method to use in the request. + + :type url: string + :param url: The URL to send the request to. + + :type data: string + :param data: The data to send as the body of the request. + + :type content_type: string + :param content_type: The proper MIME type of the data provided. + + :type headers: dict + :param headers: A dictionary of HTTP headers to send with the request. + + :type target_object: object or :class:`NoneType` + :param target_object: Argument to be used by library callers. + This can allow custom behavior, for example, to + defer an HTTP request and complete initialization + of the object at a later time. + + :rtype: tuple of ``response`` (a dictionary of sorts) + and ``content`` (a string). + :returns: The HTTP response object and the content of the response, + returned by :meth:`_do_request`. + """ + headers = headers or {} + headers['Accept-Encoding'] = 'gzip' + + if data: + content_length = len(str(data)) + else: + content_length = 0 + + # NOTE: str is intended, bytes are sufficient for headers. + headers['Content-Length'] = str(content_length) + + if content_type: + headers['Content-Type'] = content_type + + headers['User-Agent'] = self.USER_AGENT + + return self._do_request(method, url, headers, data, target_object) + + def _do_request(self, method, url, headers, data, + target_object): # pylint: disable=unused-argument + """Low-level helper: perform the actual API request over HTTP. + + Allows batch context managers to override and defer a request. + + :type method: string + :param method: The HTTP method to use in the request. + + :type url: string + :param url: The URL to send the request to. + + :type headers: dict + :param headers: A dictionary of HTTP headers to send with the request. + + :type data: string + :param data: The data to send as the body of the request. + + :type target_object: object or :class:`NoneType` + :param target_object: Unused ``target_object`` here but may be used + by a superclass. + + :rtype: tuple of ``response`` (a dictionary of sorts) + and ``content`` (a string). + :returns: The HTTP response object and the content of the response. + """ + return self.http.request(uri=url, method=method, headers=headers, + body=data) + + def api_request(self, method, path, query_params=None, + data=None, content_type=None, + api_base_url=None, api_version=None, + expect_json=True, _target_object=None): + """Make a request over the HTTP transport to the API. + + You shouldn't need to use this method, but if you plan to + interact with the API using these primitives, this is the + correct one to use. + + :type method: string + :param method: The HTTP method name (ie, ``GET``, ``POST``, etc). + Required. + + :type path: string + :param path: The path to the resource (ie, ``'/b/bucket-name'``). + Required. + + :type query_params: dict or list + :param query_params: A dictionary of keys and values (or list of + key-value pairs) to insert into the query + string of the URL. + + :type data: string + :param data: The data to send as the body of the request. Default is + the empty string. + + :type content_type: string + :param content_type: The proper MIME type of the data provided. Default + is None. + + :type api_base_url: string + :param api_base_url: The base URL for the API endpoint. + Typically you won't have to provide this. + Default is the standard API base URL. + + :type api_version: string + :param api_version: The version of the API to call. Typically + you shouldn't provide this and instead use + the default for the library. Default is the + latest API version supported by + google-cloud-python. + + :type expect_json: bool + :param expect_json: If True, this method will try to parse the + response as JSON and raise an exception if + that cannot be done. Default is True. + + :type _target_object: :class:`object` or :class:`NoneType` + :param _target_object: Protected argument to be used by library + callers. This can allow custom behavior, for + example, to defer an HTTP request and complete + initialization of the object at a later time. + + :raises: Exception if the response code is not 200 OK. + :rtype: dict or str + :returns: The API response payload, either as a raw string or + a dictionary if the response is valid JSON. + """ + url = self.build_api_url(path=path, query_params=query_params, + api_base_url=api_base_url, + api_version=api_version) + + # Making the executive decision that any dictionary + # data will be sent properly as JSON. + if data and isinstance(data, dict): + data = json.dumps(data) + content_type = 'application/json' + + response, content = self._make_request( + method=method, url=url, data=data, content_type=content_type, + target_object=_target_object) + + if not 200 <= response.status < 300: + raise make_exception(response, content, + error_info=method + ' ' + url) + + string_or_bytes = (six.binary_type, six.text_type) + if content and expect_json and isinstance(content, string_or_bytes): + content_type = response.get('content-type', '') + if not content_type.startswith('application/json'): + raise TypeError('Expected JSON, got %s' % content_type) + if isinstance(content, six.binary_type): + content = content.decode('utf-8') + return json.loads(content) + + return content diff --git a/packages/google-cloud-core/google/cloud/credentials.py b/packages/google-cloud-core/google/cloud/credentials.py new file mode 100644 index 000000000000..61f918856b9c --- /dev/null +++ b/packages/google-cloud-core/google/cloud/credentials.py @@ -0,0 +1,251 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""A simple wrapper around the OAuth2 credentials library.""" + +import base64 +import datetime +import six +from six.moves.urllib.parse import urlencode + +from oauth2client import client + +from google.cloud._helpers import UTC +from google.cloud._helpers import _NOW +from google.cloud._helpers import _microseconds_from_datetime + + +def get_credentials(): + """Gets credentials implicitly from the current environment. + + .. note:: + + You should not need to use this function directly. Instead, use a + helper method which uses this method under the hood. + + Checks environment in order of precedence: + + * Google App Engine (production and testing) + * Environment variable :envvar:`GOOGLE_APPLICATION_CREDENTIALS` pointing to + a file with stored credentials information. + * Stored "well known" file associated with ``gcloud`` command line tool. + * Google Compute Engine production environment. + + The file referred to in :envvar:`GOOGLE_APPLICATION_CREDENTIALS` is + expected to contain information about credentials that are ready to use. + This means either service account information or user account information + with a ready-to-use refresh token: + + .. code:: json + + { + 'type': 'authorized_user', + 'client_id': '...', + 'client_secret': '...', + 'refresh_token': '...' + } + + or + + .. code:: json + + { + 'type': 'service_account', + 'project_id': '...', + 'private_key_id': '...', + 'private_key': '...', + 'client_email': '...', + 'client_id': '...', + 'auth_uri': '...', + 'token_uri': '...', + 'auth_provider_x509_cert_url': '...', + 'client_x509_cert_url': '...' + } + + The second of these is simply a JSON key downloaded from the Google APIs + console. The first is a close cousin of the "client secrets" JSON file + used by :mod:`oauth2client.clientsecrets` but differs in formatting. + + :rtype: :class:`oauth2client.client.GoogleCredentials`, + :class:`oauth2client.contrib.appengine.AppAssertionCredentials`, + :class:`oauth2client.contrib.gce.AppAssertionCredentials`, + :class:`oauth2client.service_account.ServiceAccountCredentials` + :returns: A new credentials instance corresponding to the implicit + environment. + """ + return client.GoogleCredentials.get_application_default() + + +def _get_signed_query_params(credentials, expiration, string_to_sign): + """Gets query parameters for creating a signed URL. + + :type credentials: :class:`oauth2client.client.AssertionCredentials` + :param credentials: The credentials used to create a private key + for signing text. + + :type expiration: int or long + :param expiration: When the signed URL should expire. + + :type string_to_sign: string + :param string_to_sign: The string to be signed by the credentials. + + :raises AttributeError: If :meth: sign_blob is unavailable. + + :rtype: dict + :returns: Query parameters matching the signing credentials with a + signed payload. + """ + if not hasattr(credentials, 'sign_blob'): + auth_uri = ('http://google-cloud-python.readthedocs.io/en/latest/' + 'google-cloud-auth.html#setting-up-a-service-account') + raise AttributeError('you need a private key to sign credentials.' + 'the credentials you are currently using %s ' + 'just contains a token. see %s for more ' + 'details.' % (type(credentials), auth_uri)) + + _, signature_bytes = credentials.sign_blob(string_to_sign) + signature = base64.b64encode(signature_bytes) + service_account_name = credentials.service_account_email + return { + 'GoogleAccessId': service_account_name, + 'Expires': str(expiration), + 'Signature': signature, + } + + +def _get_expiration_seconds(expiration): + """Convert 'expiration' to a number of seconds in the future. + + :type expiration: int, long, datetime.datetime, datetime.timedelta + :param expiration: When the signed URL should expire. + + :raises TypeError: When expiration is not an integer. + + :rtype: int + :returns: a timestamp as an absolute number of seconds. + """ + # If it's a timedelta, add it to `now` in UTC. + if isinstance(expiration, datetime.timedelta): + now = _NOW().replace(tzinfo=UTC) + expiration = now + expiration + + # If it's a datetime, convert to a timestamp. + if isinstance(expiration, datetime.datetime): + micros = _microseconds_from_datetime(expiration) + expiration = micros // 10**6 + + if not isinstance(expiration, six.integer_types): + raise TypeError('Expected an integer timestamp, datetime, or ' + 'timedelta. Got %s' % type(expiration)) + return expiration + + +def generate_signed_url(credentials, resource, expiration, + api_access_endpoint='', + method='GET', content_md5=None, + content_type=None, response_type=None, + response_disposition=None, generation=None): + """Generate signed URL to provide query-string auth'n to a resource. + + .. note:: + + Assumes ``credentials`` implements a ``sign_blob()`` method that takes + bytes to sign and returns a pair of the key ID (unused here) and the + signed bytes (this is abstract in the base class + :class:`oauth2client.client.AssertionCredentials`). Also assumes + ``credentials`` has a ``service_account_email`` property which + identifies the credentials. + + .. note:: + + If you are on Google Compute Engine, you can't generate a signed URL. + Follow `Issue 922`_ for updates on this. If you'd like to be able to + generate a signed URL from GCE, you can use a standard service account + from a JSON file rather than a GCE service account. + + See headers `reference`_ for more details on optional arguments. + + .. _Issue 922: https://github.com/GoogleCloudPlatform/\ + google-cloud-python/issues/922 + .. _reference: https://cloud.google.com/storage/docs/reference-headers + + :type credentials: :class:`oauth2client.appengine.AppAssertionCredentials` + :param credentials: Credentials object with an associated private key to + sign text. + + :type resource: string + :param resource: A pointer to a specific resource + (typically, ``/bucket-name/path/to/blob.txt``). + + :type expiration: :class:`int`, :class:`long`, :class:`datetime.datetime`, + :class:`datetime.timedelta` + :param expiration: When the signed URL should expire. + + :type api_access_endpoint: str + :param api_access_endpoint: Optional URI base. Defaults to empty string. + + :type method: str + :param method: The HTTP verb that will be used when requesting the URL. + Defaults to ``'GET'``. + + :type content_md5: str + :param content_md5: (Optional) The MD5 hash of the object referenced by + ``resource``. + + :type content_type: str + :param content_type: (Optional) The content type of the object referenced + by ``resource``. + + :type response_type: str + :param response_type: (Optional) Content type of responses to requests for + the signed URL. Used to over-ride the content type of + the underlying resource. + + :type response_disposition: str + :param response_disposition: (Optional) Content disposition of responses to + requests for the signed URL. + + :type generation: str + :param generation: (Optional) A value that indicates which generation of + the resource to fetch. + + :rtype: string + :returns: A signed URL you can use to access the resource + until expiration. + """ + expiration = _get_expiration_seconds(expiration) + + # Generate the string to sign. + string_to_sign = '\n'.join([ + method, + content_md5 or '', + content_type or '', + str(expiration), + resource]) + + # Set the right query parameters. + query_params = _get_signed_query_params(credentials, + expiration, + string_to_sign) + if response_type is not None: + query_params['response-content-type'] = response_type + if response_disposition is not None: + query_params['response-content-disposition'] = response_disposition + if generation is not None: + query_params['generation'] = generation + + # Return the built URL. + return '{endpoint}{resource}?{querystring}'.format( + endpoint=api_access_endpoint, resource=resource, + querystring=urlencode(query_params)) diff --git a/packages/google-cloud-core/google/cloud/environment_vars.py b/packages/google-cloud-core/google/cloud/environment_vars.py new file mode 100644 index 000000000000..d27eca8742d0 --- /dev/null +++ b/packages/google-cloud-core/google/cloud/environment_vars.py @@ -0,0 +1,44 @@ +# Copyright 2015 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Comprehensive list of environment variables used in google-cloud. + +These enable many types of implicit behavior in both production +and tests. +""" + +PROJECT = 'GOOGLE_CLOUD_PROJECT' +"""Environment variable defining default project.""" + +GCD_DATASET = 'DATASTORE_DATASET' +"""Environment variable defining default dataset ID under GCD.""" + +GCD_HOST = 'DATASTORE_EMULATOR_HOST' +"""Environment variable defining host for GCD dataset server.""" + +PUBSUB_EMULATOR = 'PUBSUB_EMULATOR_HOST' +"""Environment variable defining host for Pub/Sub emulator.""" + +BIGTABLE_EMULATOR = 'BIGTABLE_EMULATOR_HOST' +"""Environment variable defining host for Bigtable emulator.""" + +CREDENTIALS = 'GOOGLE_APPLICATION_CREDENTIALS' +"""Environment variable defining location of Google credentials.""" + +DISABLE_GRPC = 'GOOGLE_CLOUD_DISABLE_GRPC' +"""Environment variable acting as flag to disable gRPC. + +To be used for APIs where both an HTTP and gRPC implementation +exist. +""" diff --git a/packages/google-cloud-core/google/cloud/exceptions.py b/packages/google-cloud-core/google/cloud/exceptions.py new file mode 100644 index 000000000000..6a4b8706917a --- /dev/null +++ b/packages/google-cloud-core/google/cloud/exceptions.py @@ -0,0 +1,244 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Custom exceptions for :mod:`google.cloud` package. + +See: https://cloud.google.com/storage/docs/json_api/v1/status-codes +""" + +import copy +import json +import six + +from google.cloud._helpers import _to_bytes + +_HTTP_CODE_TO_EXCEPTION = {} # populated at end of module + +try: + from grpc._channel import _Rendezvous +except ImportError: # pragma: NO COVER + _Rendezvous = None + + +# pylint: disable=invalid-name +GrpcRendezvous = _Rendezvous +"""Exception class raised by gRPC stable.""" +# pylint: enable=invalid-name + + +class GoogleCloudError(Exception): + """Base error class for Google Cloud errors (abstract). + + Each subclass represents a single type of HTTP error response. + """ + code = None + """HTTP status code. Concrete subclasses *must* define. + + See: http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html + """ + + def __init__(self, message, errors=()): + super(GoogleCloudError, self).__init__(message) + self.message = message + self._errors = errors + + def __str__(self): + result = u'%d %s' % (self.code, self.message) + if six.PY2: + result = _to_bytes(result, 'utf-8') + return result + + @property + def errors(self): + """Detailed error information. + + :rtype: list(dict) + :returns: a list of mappings describing each error. + """ + return [copy.deepcopy(error) for error in self._errors] + + +class Redirection(GoogleCloudError): + """Base for 3xx responses + + This class is abstract. + """ + + +class MovedPermanently(Redirection): + """Exception mapping a '301 Moved Permanently' response.""" + code = 301 + + +class NotModified(Redirection): + """Exception mapping a '304 Not Modified' response.""" + code = 304 + + +class TemporaryRedirect(Redirection): + """Exception mapping a '307 Temporary Redirect' response.""" + code = 307 + + +class ResumeIncomplete(Redirection): + """Exception mapping a '308 Resume Incomplete' response.""" + code = 308 + + +class ClientError(GoogleCloudError): + """Base for 4xx responses + + This class is abstract + """ + + +class BadRequest(ClientError): + """Exception mapping a '400 Bad Request' response.""" + code = 400 + + +class Unauthorized(ClientError): + """Exception mapping a '401 Unauthorized' response.""" + code = 401 + + +class Forbidden(ClientError): + """Exception mapping a '403 Forbidden' response.""" + code = 403 + + +class NotFound(ClientError): + """Exception mapping a '404 Not Found' response.""" + code = 404 + + +class MethodNotAllowed(ClientError): + """Exception mapping a '405 Method Not Allowed' response.""" + code = 405 + + +class Conflict(ClientError): + """Exception mapping a '409 Conflict' response.""" + code = 409 + + +class LengthRequired(ClientError): + """Exception mapping a '411 Length Required' response.""" + code = 411 + + +class PreconditionFailed(ClientError): + """Exception mapping a '412 Precondition Failed' response.""" + code = 412 + + +class RequestRangeNotSatisfiable(ClientError): + """Exception mapping a '416 Request Range Not Satisfiable' response.""" + code = 416 + + +class TooManyRequests(ClientError): + """Exception mapping a '429 Too Many Requests' response.""" + code = 429 + + +class ServerError(GoogleCloudError): + """Base for 5xx responses: (abstract)""" + + +class InternalServerError(ServerError): + """Exception mapping a '500 Internal Server Error' response.""" + code = 500 + + +class MethodNotImplemented(ServerError): + """Exception mapping a '501 Not Implemented' response.""" + code = 501 + + +class BadGateway(ServerError): + """Exception mapping a '502 Bad Gateway' response.""" + code = 502 + + +class ServiceUnavailable(ServerError): + """Exception mapping a '503 Service Unavailable' response.""" + code = 503 + + +def make_exception(response, content, error_info=None, use_json=True): + """Factory: create exception based on HTTP response code. + + :type response: :class:`httplib2.Response` or other HTTP response object + :param response: A response object that defines a status code as the + status attribute. + + :type content: string or dictionary + :param content: The body of the HTTP error response. + + :type error_info: string + :param error_info: Optional string giving extra information about the + failed request. + + :type use_json: bool + :param use_json: Flag indicating if ``content`` is expected to be JSON. + + :rtype: instance of :class:`GoogleCloudError`, or a concrete subclass. + :returns: Exception specific to the error response. + """ + if isinstance(content, six.binary_type): + content = content.decode('utf-8') + + if isinstance(content, six.string_types): + payload = None + if use_json: + try: + payload = json.loads(content) + except ValueError: + # Expected JSON but received something else. + pass + if payload is None: + payload = {'error': {'message': content}} + else: + payload = content + + message = payload.get('error', {}).get('message', '') + errors = payload.get('error', {}).get('errors', ()) + + if error_info is not None: + message += ' (%s)' % (error_info,) + + try: + klass = _HTTP_CODE_TO_EXCEPTION[response.status] + except KeyError: + error = GoogleCloudError(message, errors) + error.code = response.status + else: + error = klass(message, errors) + return error + + +def _walk_subclasses(klass): + """Recursively walk subclass tree.""" + for sub in klass.__subclasses__(): + yield sub + for subsub in _walk_subclasses(sub): + yield subsub + + +# Build the code->exception class mapping. +for _eklass in _walk_subclasses(GoogleCloudError): + code = getattr(_eklass, 'code', None) + if code is not None: + _HTTP_CODE_TO_EXCEPTION[code] = _eklass diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py new file mode 100644 index 000000000000..eecaa5759e80 --- /dev/null +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -0,0 +1,184 @@ +# Copyright 2015 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Iterators for paging through API responses. + +These iterators simplify the process of paging through API responses +where the response is a list of results with a ``nextPageToken``. + +To make an iterator work, just override the ``get_items_from_response`` +method so that given a response (containing a page of results) it parses +those results into an iterable of the actual objects you want:: + + class MyIterator(Iterator): + def get_items_from_response(self, response): + items = response.get('items', []) + for item in items: + my_item = MyItemClass(other_arg=True) + my_item._set_properties(item) + yield my_item + +You then can use this to get **all** the results from a resource:: + + >>> iterator = MyIterator(...) + >>> list(iterator) # Convert to a list (consumes all values). + +Or you can walk your way through items and call off the search early if +you find what you're looking for (resulting in possibly fewer +requests):: + + >>> for item in MyIterator(...): + >>> print item.name + >>> if not item.is_valid: + >>> break +""" + + +class Iterator(object): + """A generic class for iterating through Cloud JSON APIs list responses. + + :type client: :class:`google.cloud.client.Client` + :param client: The client, which owns a connection to make requests. + + :type path: string + :param path: The path to query for the list of items. + + :type extra_params: dict or None + :param extra_params: Extra query string parameters for the API call. + """ + + PAGE_TOKEN = 'pageToken' + RESERVED_PARAMS = frozenset([PAGE_TOKEN]) + + def __init__(self, client, path, extra_params=None): + self.client = client + self.path = path + self.page_number = 0 + self.next_page_token = None + self.extra_params = extra_params or {} + reserved_in_use = self.RESERVED_PARAMS.intersection( + self.extra_params) + if reserved_in_use: + raise ValueError(('Using a reserved parameter', + reserved_in_use)) + + def __iter__(self): + """Iterate through the list of items.""" + while self.has_next_page(): + response = self.get_next_page_response() + for item in self.get_items_from_response(response): + yield item + + def has_next_page(self): + """Determines whether or not this iterator has more pages. + + :rtype: boolean + :returns: Whether the iterator has more pages or not. + """ + if self.page_number == 0: + return True + + return self.next_page_token is not None + + def get_query_params(self): + """Getter for query parameters for the next request. + + :rtype: dict + :returns: A dictionary of query parameters. + """ + result = ({self.PAGE_TOKEN: self.next_page_token} + if self.next_page_token else {}) + result.update(self.extra_params) + return result + + def get_next_page_response(self): + """Requests the next page from the path provided. + + :rtype: dict + :returns: The parsed JSON response of the next page's contents. + """ + if not self.has_next_page(): + raise RuntimeError('No more pages. Try resetting the iterator.') + + response = self.client.connection.api_request( + method='GET', path=self.path, query_params=self.get_query_params()) + + self.page_number += 1 + self.next_page_token = response.get('nextPageToken') + + return response + + def reset(self): + """Resets the iterator to the beginning.""" + self.page_number = 0 + self.next_page_token = None + + def get_items_from_response(self, response): + """Factory method called while iterating. This should be overridden. + + This method should be overridden by a subclass. It should + accept the API response of a request for the next page of items, + and return a list (or other iterable) of items. + + Typically this method will construct a Bucket or a Blob from the + page of results in the response. + + :type response: dict + :param response: The response of asking for the next page of items. + """ + raise NotImplementedError + + +class MethodIterator(object): + """Method-based iterator iterating through Cloud JSON APIs list responses. + + :type method: instance method + :param method: ``list_foo`` method of a domain object, taking as arguments + ``page_token``, ``page_size``, and optional additional + keyword arguments. + + :type page_token: string or ``NoneType`` + :param page_token: Initial page token to pass. if ``None``, fetch the + first page from the ``method`` API call. + + :type page_size: integer or ``NoneType`` + :param page_size: Maximum number of items to return from the ``method`` + API call; if ``None``, uses the default for the API. + + :type max_calls: integer or ``NoneType`` + :param max_calls: Maximum number of times to make the ``method`` + API call; if ``None``, applies no limit. + + :type kw: dict + :param kw: optional keyword arguments to be passed to ``method``. + """ + def __init__(self, method, page_token=None, page_size=None, + max_calls=None, **kw): + self._method = method + self._token = page_token + self._page_size = page_size + self._kw = kw + self._max_calls = max_calls + self._page_num = 0 + + def __iter__(self): + while self._max_calls is None or self._page_num < self._max_calls: + items, new_token = self._method( + page_token=self._token, page_size=self._page_size, **self._kw) + for item in items: + yield item + if new_token is None: + return + self._page_num += 1 + self._token = new_token diff --git a/packages/google-cloud-core/google/cloud/operation.py b/packages/google-cloud-core/google/cloud/operation.py new file mode 100644 index 000000000000..156212794487 --- /dev/null +++ b/packages/google-cloud-core/google/cloud/operation.py @@ -0,0 +1,137 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Wrap long-running operations returned from Google Cloud APIs.""" + +from google.longrunning import operations_pb2 + + +_GOOGLE_APIS_PREFIX = 'types.googleapis.com' + +_TYPE_URL_MAP = { +} + + +def _compute_type_url(klass, prefix=_GOOGLE_APIS_PREFIX): + """Compute a type URL for a klass. + + :type klass: type + :param klass: class to be used as a factory for the given type + + :type prefix: str + :param prefix: URL prefix for the type + + :rtype: str + :returns: the URL, prefixed as appropriate + """ + name = klass.DESCRIPTOR.full_name + return '%s/%s' % (prefix, name) + + +def _register_type_url(type_url, klass): + """Register a klass as the factory for a given type URL. + + :type type_url: str + :param type_url: URL naming the type + + :type klass: type + :param klass: class to be used as a factory for the given type + + :raises: ValueError if a registration already exists for the URL. + """ + if type_url in _TYPE_URL_MAP: + if _TYPE_URL_MAP[type_url] is not klass: + raise ValueError("Conflict: %s" % (_TYPE_URL_MAP[type_url],)) + + _TYPE_URL_MAP[type_url] = klass + + +class Operation(object): + """Representation of a Google API Long-Running Operation. + + :type name: str + :param name: The fully-qualified path naming the operation. + + :type client: object: must provide ``_operations_stub`` accessor. + :param client: The client used to poll for the status of the operation. + + :type pb_metadata: object + :param pb_metadata: Instance of protobuf metadata class + + :type kw: dict + :param kw: caller-assigned metadata about the operation + """ + + target = None + """Instance assocated with the operations: callers may set.""" + + def __init__(self, name, client, pb_metadata=None, **kw): + self.name = name + self.client = client + self.pb_metadata = pb_metadata + self.metadata = kw.copy() + self._complete = False + + @classmethod + def from_pb(cls, op_pb, client, **kw): + """Factory: construct an instance from a protobuf. + + :type op_pb: :class:`google.longrunning.operations_pb2.Operation` + :param op_pb: Protobuf to be parsed. + + :type client: object: must provide ``_operations_stub`` accessor. + :param client: The client used to poll for the status of the operation. + + :type kw: dict + :param kw: caller-assigned metadata about the operation + + :rtype: :class:`Operation` + :returns: new instance, with attributes based on the protobuf. + """ + pb_metadata = None + if op_pb.metadata.type_url: + type_url = op_pb.metadata.type_url + md_klass = _TYPE_URL_MAP.get(type_url) + if md_klass: + pb_metadata = md_klass.FromString(op_pb.metadata.value) + return cls(op_pb.name, client, pb_metadata, **kw) + + @property + def complete(self): + """Has the operation already completed? + + :rtype: bool + :returns: True if already completed, else false. + """ + return self._complete + + def poll(self): + """Check if the operation has finished. + + :rtype: bool + :returns: A boolean indicating if the current operation has completed. + :raises: :class:`ValueError ` if the operation + has already completed. + """ + if self.complete: + raise ValueError('The operation has completed.') + + request_pb = operations_pb2.GetOperationRequest(name=self.name) + # We expect a `google.longrunning.operations_pb2.Operation`. + operation_pb = self.client._operations_stub.GetOperation(request_pb) + + if operation_pb.done: + self._complete = True + + return self.complete diff --git a/packages/google-cloud-core/google/cloud/streaming/__init__.py b/packages/google-cloud-core/google/cloud/streaming/__init__.py new file mode 100644 index 000000000000..44e00907cb66 --- /dev/null +++ b/packages/google-cloud-core/google/cloud/streaming/__init__.py @@ -0,0 +1,17 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Vendored-in from google-apitools 0.4.11 + +"""Base ``google.cloud.streaming`` package.""" diff --git a/packages/google-cloud-core/google/cloud/streaming/buffered_stream.py b/packages/google-cloud-core/google/cloud/streaming/buffered_stream.py new file mode 100644 index 000000000000..b9a3d2ff84d1 --- /dev/null +++ b/packages/google-cloud-core/google/cloud/streaming/buffered_stream.py @@ -0,0 +1,104 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Small helper class to provide a small slice of a stream. + +This class reads ahead to detect if we are at the end of the stream. +""" + + +class BufferedStream(object): + """Buffers a stream, reading ahead to determine if we're at the end. + + :type stream: readable file-like object + :param stream: the stream to be buffered + + :type start: integer + :param start: the starting point in the stream + + :type size: integer + :param size: the size of the buffer + """ + def __init__(self, stream, start, size): + self._stream = stream + self._start_pos = start + self._buffer_pos = 0 + + if not hasattr(self._stream, 'closed') or not self._stream.closed: + self._buffered_data = self._stream.read(size) + else: + self._buffered_data = b'' + + self._stream_at_end = len(self._buffered_data) < size + self._end_pos = self._start_pos + len(self._buffered_data) + + def __repr__(self): + return ('Buffered stream %s from position %s-%s with %s ' + 'bytes remaining' % (self._stream, self._start_pos, + self._end_pos, self._bytes_remaining)) + + def __len__(self): + return len(self._buffered_data) + + @property + def stream_exhausted(self): + """Does the stream have bytes remaining beyond the buffer + + :rtype: boolean + :returns: Boolean indicating if the stream is exhausted. + """ + return self._stream_at_end + + @property + def stream_end_position(self): + """Point to which stream was read into the buffer + + :rtype: integer + :returns: The end-position of the stream. + """ + return self._end_pos + + @property + def _bytes_remaining(self): + """Bytes remaining to be read from the buffer + + :rtype: integer + :returns: The number of bytes remaining. + """ + return len(self._buffered_data) - self._buffer_pos + + def read(self, size=None): + """Read bytes from the buffer. + + :type size: integer or None + :param size: How many bytes to read (defaults to all remaining bytes). + + :rtype: str + :returns: The data read from the stream. + """ + if size is None or size < 0: + raise ValueError( + 'Illegal read of size %s requested on BufferedStream. ' + 'Wrapped stream %s is at position %s-%s, ' + '%s bytes remaining.' % + (size, self._stream, self._start_pos, self._end_pos, + self._bytes_remaining)) + + if not self._bytes_remaining: + return b'' + + size = min(size, self._bytes_remaining) + data = self._buffered_data[self._buffer_pos:self._buffer_pos + size] + self._buffer_pos += size + return data diff --git a/packages/google-cloud-core/google/cloud/streaming/exceptions.py b/packages/google-cloud-core/google/cloud/streaming/exceptions.py new file mode 100644 index 000000000000..0a7e4b94815e --- /dev/null +++ b/packages/google-cloud-core/google/cloud/streaming/exceptions.py @@ -0,0 +1,122 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Exceptions for generated client libraries.""" + + +class Error(Exception): + """Base class for all exceptions.""" + + +class CommunicationError(Error): + """Any communication error talking to an API server.""" + + +class HttpError(CommunicationError): + """Error making a request. Soon to be HttpError. + + :type response: dict + :param response: headers from the response which returned the error + + :type content: bytes + :param content: payload of the response which returned the error + + :type url: string + :param url: URL of the response which returned the error + """ + def __init__(self, response, content, url): + super(HttpError, self).__init__() + self.response = response + self.content = content + self.url = url + + def __str__(self): + content = self.content.decode('ascii', 'replace') + return 'HttpError accessing <%s>: response: <%s>, content <%s>' % ( + self.url, self.response, content) + + @property + def status_code(self): + """Status code for the response. + + :rtype: integer + :returns: the code + """ + return int(self.response['status']) + + @classmethod + def from_response(cls, http_response): + """Factory: construct an exception from a response. + + :type http_response: :class:`~.http_wrapper.Response` + :param http_response: the response which returned the error + + :rtype: :class:`HttpError` + :returns: The error created from the response. + """ + return cls(http_response.info, http_response.content, + http_response.request_url) + + +class TransferError(CommunicationError): + """Errors related to transfers.""" + + +class TransferRetryError(TransferError): + """Retryable errors related to transfers.""" + + +class TransferInvalidError(TransferError): + """The given transfer is invalid.""" + + +class RequestError(CommunicationError): + """The request was not successful.""" + + +class RetryAfterError(HttpError): + """The response contained a retry-after header. + + :type response: dict + :param response: headers from the response which returned the error. + + :type content: bytes + :param content: payload of the response which returned the error. + + :type url: string + :param url: URL of the response which returned the error. + + :type retry_after: integer + :param retry_after: seconds to wait before retrying. + """ + def __init__(self, response, content, url, retry_after): + super(RetryAfterError, self).__init__(response, content, url) + self.retry_after = int(retry_after) + + @classmethod + def from_response(cls, http_response): + """Factory: construct an exception from a response. + + :type http_response: :class:`~.http_wrapper.Response` + :param http_response: the response which returned the error. + + :rtype: :class:`RetryAfterError` + :returns: The error created from the response. + """ + return cls(http_response.info, http_response.content, + http_response.request_url, http_response.retry_after) + + +class BadStatusCodeError(HttpError): + """The request completed but returned a bad status code.""" diff --git a/packages/google-cloud-core/google/cloud/streaming/http_wrapper.py b/packages/google-cloud-core/google/cloud/streaming/http_wrapper.py new file mode 100644 index 000000000000..398c9f2f572f --- /dev/null +++ b/packages/google-cloud-core/google/cloud/streaming/http_wrapper.py @@ -0,0 +1,395 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""HTTP wrapper for apitools. + +This library wraps the underlying http library we use, which is +currently :mod:`httplib2`. +""" + +import collections +import contextlib +import logging +import socket +import time + +import httplib2 +import six +from six.moves import http_client +from six.moves.urllib import parse + +from google.cloud.streaming.exceptions import BadStatusCodeError +from google.cloud.streaming.exceptions import RequestError +from google.cloud.streaming.exceptions import RetryAfterError +from google.cloud.streaming.util import calculate_wait_for_retry + + +_REDIRECTIONS = 5 +# 308 and 429 don't have names in httplib. +RESUME_INCOMPLETE = 308 +TOO_MANY_REQUESTS = 429 + + +_REDIRECT_STATUS_CODES = ( + http_client.MOVED_PERMANENTLY, + http_client.FOUND, + http_client.SEE_OTHER, + http_client.TEMPORARY_REDIRECT, + RESUME_INCOMPLETE, +) + + +_RETRYABLE_EXCEPTIONS = ( + http_client.BadStatusLine, + http_client.IncompleteRead, + http_client.ResponseNotReady, + socket.error, + httplib2.ServerNotFoundError, + ValueError, + RequestError, + BadStatusCodeError, + RetryAfterError, +) + + +@contextlib.contextmanager +def _httplib2_debug_level(http_request, level, http=None): + """Temporarily change the value of httplib2.debuglevel, if necessary. + + If http_request has a `loggable_body` distinct from `body`, then we + need to prevent httplib2 from logging the full body. This sets + httplib2.debuglevel for the duration of the `with` block; however, + that alone won't change the value of existing HTTP connections. If + an httplib2.Http object is provided, we'll also change the level on + any cached connections attached to it. + + :type http_request: :class:`Request` + :param http_request: the request to be logged. + + :type level: integer + :param level: the debuglevel for logging. + + :type http: :class:`httplib2.Http`, or ``None`` + :param http: the instance on whose connections to set the debuglevel. + """ + if http_request.loggable_body is None: + yield + return + old_level = httplib2.debuglevel + http_levels = {} + httplib2.debuglevel = level + if http is not None: + for connection_key, connection in http.connections.items(): + # httplib2 stores two kinds of values in this dict, connection + # classes and instances. Since the connection types are all + # old-style classes, we can't easily distinguish by connection + # type -- so instead we use the key pattern. + if ':' not in connection_key: + continue + http_levels[connection_key] = connection.debuglevel + connection.set_debuglevel(level) + yield + httplib2.debuglevel = old_level + if http is not None: + for connection_key, old_level in http_levels.items(): + http.connections[connection_key].set_debuglevel(old_level) + + +class Request(object): + """Encapsulates the data for an HTTP request. + + :type url: str + :param url: the URL for the request + + :type http_method: str + :param http_method: the HTTP method to use for the request + + :type headers: mapping or None + :param headers: headers to be sent with the request + + :type body: str + :param body: body to be sent with the request + """ + def __init__(self, url='', http_method='GET', headers=None, body=''): + self.url = url + self.http_method = http_method + self.headers = headers or {} + self._body = None + self._loggable_body = None + self.body = body + + @property + def loggable_body(self): + """Request body for logging purposes + + :rtype: str + :returns: The body to be logged. + """ + return self._loggable_body + + @loggable_body.setter + def loggable_body(self, value): + """Update request body for logging purposes + + :type value: str + :param value: updated body + + :raises: :exc:`RequestError` if the request does not have a body. + """ + if self.body is None: + raise RequestError( + 'Cannot set loggable body on request with no body') + self._loggable_body = value + + @property + def body(self): + """Request body + + :rtype: str + :returns: The body of the request. + """ + return self._body + + @body.setter + def body(self, value): + """Update the request body + + Handles logging and length measurement. + + :type value: str + :param value: updated body + """ + self._body = value + if value is not None: + # Avoid calling len() which cannot exceed 4GiB in 32-bit python. + body_length = getattr( + self._body, 'length', None) or len(self._body) + self.headers['content-length'] = str(body_length) + else: + self.headers.pop('content-length', None) + # This line ensures we don't try to print large requests. + if not isinstance(value, (type(None), six.string_types)): + self.loggable_body = '' + + +def _process_content_range(content_range): + """Convert a 'Content-Range' header into a length for the response. + + Helper for :meth:`Response.length`. + + :type content_range: str + :param content_range: the header value being parsed. + + :rtype: integer + :returns: the length of the response chunk. + """ + _, _, range_spec = content_range.partition(' ') + byte_range, _, _ = range_spec.partition('/') + start, _, end = byte_range.partition('-') + return int(end) - int(start) + 1 + + +# Note: currently the order of fields here is important, since we want +# to be able to pass in the result from httplib2.request. +_ResponseTuple = collections.namedtuple( + 'HttpResponse', ['info', 'content', 'request_url']) + + +class Response(_ResponseTuple): + """Encapsulates data for an HTTP response. + """ + __slots__ = () + + def __len__(self): + return self.length + + @property + def length(self): + """Length of this response. + + Exposed as an attribute since using ``len()`` directly can fail + for responses larger than ``sys.maxint``. + + :rtype: integer or long + :returns: The length of the response. + """ + if 'content-encoding' in self.info and 'content-range' in self.info: + # httplib2 rewrites content-length in the case of a compressed + # transfer; we can't trust the content-length header in that + # case, but we *can* trust content-range, if it's present. + return _process_content_range(self.info['content-range']) + elif 'content-length' in self.info: + return int(self.info.get('content-length')) + elif 'content-range' in self.info: + return _process_content_range(self.info['content-range']) + return len(self.content) + + @property + def status_code(self): + """HTTP status code + + :rtype: integer + :returns: The response status code. + """ + return int(self.info['status']) + + @property + def retry_after(self): + """Retry interval (if set). + + :rtype: integer + :returns: interval in seconds + """ + if 'retry-after' in self.info: + return int(self.info['retry-after']) + + @property + def is_redirect(self): + """Does this response contain a redirect + + :rtype: boolean + :returns: True if the status code indicates a redirect and the + 'location' header is present. + """ + return (self.status_code in _REDIRECT_STATUS_CODES and + 'location' in self.info) + + +def _check_response(response): + """Validate a response + + :type response: :class:`Response` + :param response: the response to validate + + :raises: :exc:`google.cloud.streaming.exceptions.RequestError` if response + is None, :exc:`~.exceptions.BadStatusCodeError` if response status + code indicates an error, or :exc:`~.exceptions.RetryAfterError` + if response indicates a retry interval. + """ + if response is None: + # Caller shouldn't call us if the response is None, but handle anyway. + raise RequestError( + 'Request did not return a response.') + elif (response.status_code >= 500 or + response.status_code == TOO_MANY_REQUESTS): + raise BadStatusCodeError.from_response(response) + elif response.retry_after: + raise RetryAfterError.from_response(response) + + +def _reset_http_connections(http): + """Rebuild all http connections in the httplib2.Http instance. + + httplib2 overloads the map in http.connections to contain two different + types of values: + { scheme string: connection class } and + { scheme + authority string : actual http connection } + Here we remove all of the entries for actual connections so that on the + next request httplib2 will rebuild them from the connection types. + + :type http: :class:`httplib2.Http` + :param http: the instance whose connections are to be rebuilt + """ + if getattr(http, 'connections', None): + for conn_key in list(http.connections.keys()): + if ':' in conn_key: + del http.connections[conn_key] + + +def _make_api_request_no_retry(http, http_request, redirections=_REDIRECTIONS): + """Send an HTTP request via the given http instance. + + This wrapper exists to handle translation between the plain httplib2 + request/response types and the Request and Response types above. + + :type http: :class:`httplib2.Http` + :param http: an instance which impelements the `Http` API. + + :type http_request: :class:`Request` + :param http_request: the request to send. + + :type redirections: integer + :param redirections: Number of redirects to follow. + + :rtype: :class:`Response` + :returns: an object representing the server's response + + :raises: :exc:`google.cloud.streaming.exceptions.RequestError` if no + response could be parsed. + """ + connection_type = None + # Handle overrides for connection types. This is used if the caller + # wants control over the underlying connection for managing callbacks + # or hash digestion. + if getattr(http, 'connections', None): + url_scheme = parse.urlsplit(http_request.url).scheme + if url_scheme and url_scheme in http.connections: + connection_type = http.connections[url_scheme] + + # Custom printing only at debuglevel 4 + new_debuglevel = 4 if httplib2.debuglevel == 4 else 0 + with _httplib2_debug_level(http_request, new_debuglevel, http=http): + info, content = http.request( + str(http_request.url), method=str(http_request.http_method), + body=http_request.body, headers=http_request.headers, + redirections=redirections, connection_type=connection_type) + + if info is None: + raise RequestError() + + response = Response(info, content, http_request.url) + _check_response(response) + return response + + +def make_api_request(http, http_request, retries=7, + redirections=_REDIRECTIONS): + """Send an HTTP request via the given http, performing error/retry handling. + + :type http: :class:`httplib2.Http` + :param http: an instance which implements the `Http` API. + + :type http_request: :class:`Request` + :param http_request: the request to send. + + :type retries: integer + :param retries: Number of retries to attempt on retryable + responses (such as 429 or 5XX). + + :type redirections: integer + :param redirections: Number of redirects to follow. + + :rtype: :class:`Response` + :returns: an object representing the server's response. + + :raises: :exc:`google.cloud.streaming.exceptions.RequestError` if no + response could be parsed. + """ + retry = 0 + while True: + try: + return _make_api_request_no_retry(http, http_request, + redirections=redirections) + except _RETRYABLE_EXCEPTIONS as exc: + retry += 1 + if retry >= retries: + raise + retry_after = getattr(exc, 'retry_after', None) + if retry_after is None: + retry_after = calculate_wait_for_retry(retry) + + _reset_http_connections(http) + logging.debug('Retrying request to url %s after exception %s', + http_request.url, type(exc).__name__) + time.sleep(retry_after) diff --git a/packages/google-cloud-core/google/cloud/streaming/stream_slice.py b/packages/google-cloud-core/google/cloud/streaming/stream_slice.py new file mode 100644 index 000000000000..dc2c3229d6f9 --- /dev/null +++ b/packages/google-cloud-core/google/cloud/streaming/stream_slice.py @@ -0,0 +1,85 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Small helper class to provide a small slice of a stream.""" + +from six.moves import http_client + + +class StreamSlice(object): + """Provides a slice-like object for streams. + + :type stream: readable file-like object + :param stream: the stream to be buffered. + + :type max_bytes: integer + :param max_bytes: maximum number of bytes to return in the slice. + """ + def __init__(self, stream, max_bytes): + self._stream = stream + self._remaining_bytes = max_bytes + self._max_bytes = max_bytes + + def __repr__(self): + return 'Slice of stream %s with %s/%s bytes not yet read' % ( + self._stream, self._remaining_bytes, self._max_bytes) + + def __len__(self): + return self._max_bytes + + def __nonzero__(self): + # For 32-bit python2.x, len() cannot exceed a 32-bit number; avoid + # accidental len() calls from httplib in the form of "if this_object:". + return bool(self._max_bytes) + + @property + def length(self): + """Maximum number of bytes to return in the slice. + + .. note:: + + For 32-bit python2.x, len() cannot exceed a 32-bit number. + + :rtype: integer + :returns: The max "length" of the stream. + """ + return self._max_bytes + + def read(self, size=None): + """Read bytes from the slice. + + Compared to other streams, there is one case where we may + unexpectedly raise an exception on read: if the underlying stream + is exhausted (i.e. returns no bytes on read), and the size of this + slice indicates we should still be able to read more bytes, we + raise :exc:`IncompleteRead`. + + :type size: integer or None + :param size: If provided, read no more than size bytes from the stream. + + :rtype: bytes + :returns: bytes read from this slice. + + :raises: :exc:`IncompleteRead` + """ + if size is not None: + read_size = min(size, self._remaining_bytes) + else: + read_size = self._remaining_bytes + data = self._stream.read(read_size) + if read_size > 0 and not data: + raise http_client.IncompleteRead( + self._max_bytes - self._remaining_bytes, self._max_bytes) + self._remaining_bytes -= len(data) + return data diff --git a/packages/google-cloud-core/google/cloud/streaming/transfer.py b/packages/google-cloud-core/google/cloud/streaming/transfer.py new file mode 100644 index 000000000000..196a388f3237 --- /dev/null +++ b/packages/google-cloud-core/google/cloud/streaming/transfer.py @@ -0,0 +1,1214 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# pylint: disable=too-many-lines + +"""Upload and download support for apitools.""" + +import email.generator as email_generator +import email.mime.multipart as mime_multipart +import email.mime.nonmultipart as mime_nonmultipart +import mimetypes +import os + +import httplib2 +import six +from six.moves import http_client + +from google.cloud._helpers import _to_bytes +from google.cloud.streaming.buffered_stream import BufferedStream +from google.cloud.streaming.exceptions import CommunicationError +from google.cloud.streaming.exceptions import HttpError +from google.cloud.streaming.exceptions import TransferInvalidError +from google.cloud.streaming.exceptions import TransferRetryError +from google.cloud.streaming.http_wrapper import make_api_request +from google.cloud.streaming.http_wrapper import Request +from google.cloud.streaming.http_wrapper import RESUME_INCOMPLETE +from google.cloud.streaming.stream_slice import StreamSlice +from google.cloud.streaming.util import acceptable_mime_type + + +RESUMABLE_UPLOAD_THRESHOLD = 5 << 20 +SIMPLE_UPLOAD = 'simple' +RESUMABLE_UPLOAD = 'resumable' + + +_DEFAULT_CHUNKSIZE = 1 << 20 + + +class _Transfer(object): + """Generic bits common to Uploads and Downloads. + + :type stream: file-like object + :param stream: stream to/from which data is downloaded/uploaded. + + :type close_stream: boolean + :param close_stream: should this instance close the stream when deleted + + :type chunksize: integer + :param chunksize: the size of chunks used to download/upload a file. + + :type auto_transfer: boolean + :param auto_transfer: should this instance automatically begin transfering + data when initialized + + :type http: :class:`httplib2.Http` (or workalike) + :param http: Http instance used to perform requests. + + :type num_retries: integer + :param num_retries: how many retries should the transfer attempt + """ + + _num_retries = None + + def __init__(self, stream, close_stream=False, + chunksize=_DEFAULT_CHUNKSIZE, auto_transfer=True, + http=None, num_retries=5): + self._bytes_http = None + self._close_stream = close_stream + self._http = http + self._stream = stream + self._url = None + + # Let the @property do validation. + self.num_retries = num_retries + + self.auto_transfer = auto_transfer + self.chunksize = chunksize + + def __repr__(self): + return str(self) + + @property + def close_stream(self): + """Should this instance close the stream when deleted. + + :rtype: boolean + :returns: Boolean indicated if the stream should be closed. + """ + return self._close_stream + + @property + def http(self): + """Http instance used to perform requests. + + :rtype: :class:`httplib2.Http` (or workalike) + :returns: The HTTP object used for requests. + """ + return self._http + + @property + def bytes_http(self): + """Http instance used to perform binary requests. + + Defaults to :attr:`http`. + + :rtype: :class:`httplib2.Http` (or workalike) + :returns: The HTTP object used for binary requests. + """ + return self._bytes_http or self.http + + @bytes_http.setter + def bytes_http(self, value): + """Update Http instance used to perform binary requests. + + :type value: :class:`httplib2.Http` (or workalike) + :param value: new instance + """ + self._bytes_http = value + + @property + def num_retries(self): + """How many retries should the transfer attempt + + :rtype: integer + :returns: The number of retries allowed. + """ + return self._num_retries + + @num_retries.setter + def num_retries(self, value): + """Update how many retries should the transfer attempt + + :type value: integer + """ + if not isinstance(value, six.integer_types): + raise ValueError("num_retries: pass an integer") + + if value < 0: + raise ValueError( + 'Cannot have negative value for num_retries') + self._num_retries = value + + @property + def stream(self): + """Stream to/from which data is downloaded/uploaded. + + :rtype: file-like object + :returns: The stream that sends/receives data. + """ + return self._stream + + @property + def url(self): + """URL to / from which data is downloaded/uploaded. + + :rtype: string + :returns: The URL where data is sent/received. + """ + return self._url + + def _initialize(self, http, url): + """Initialize this download by setting :attr:`http` and :attr`url`. + + Allow the user to be able to pre-initialize :attr:`http` by setting + the value in the constructor; in that case, we ignore the provided + http. + + :type http: :class:`httplib2.Http` (or a worklike) or None. + :param http: the Http instance to use to make requests. + + :type url: string + :param url: The url for this transfer. + """ + self._ensure_uninitialized() + if self.http is None: + self._http = http or httplib2.Http() + self._url = url + + @property + def initialized(self): + """Has the instance been initialized + + :rtype: boolean + :returns: Boolean indicating if the current transfer + has been initialized. + """ + return self.url is not None and self.http is not None + + def _ensure_initialized(self): + """Helper: assert that the instance is initialized. + + :raises: :exc:`google.cloud.streaming.exceptions.TransferInvalidError` + if the instance is not initialized. + """ + if not self.initialized: + raise TransferInvalidError( + 'Cannot use uninitialized %s', type(self).__name__) + + def _ensure_uninitialized(self): + """Helper: assert that the instance is not initialized. + + :raises: :exc:`google.cloud.streaming.exceptions.TransferInvalidError` + if the instance is already initialized. + """ + if self.initialized: + raise TransferInvalidError( + 'Cannot re-initialize %s', type(self).__name__) + + def __del__(self): + if self._close_stream: + self._stream.close() + + +class Download(_Transfer): + """Represent a single download. + + :type stream: file-like object + :param stream: stream to/from which data is downloaded/uploaded. + + :type kwds: dict + :param kwds: keyword arguments: all except ``total_size`` are passed + through to :meth:`_Transfer.__init__()`. + """ + _ACCEPTABLE_STATUSES = set(( + http_client.OK, + http_client.NO_CONTENT, + http_client.PARTIAL_CONTENT, + http_client.REQUESTED_RANGE_NOT_SATISFIABLE, + )) + + def __init__(self, stream, **kwds): + total_size = kwds.pop('total_size', None) + super(Download, self).__init__(stream, **kwds) + self._initial_response = None + self._progress = 0 + self._total_size = total_size + self._encoding = None + + @classmethod + def from_file(cls, filename, overwrite=False, auto_transfer=True, **kwds): + """Create a new download object from a filename. + + :type filename: string + :param filename: path/filename for the target file + + :type overwrite: boolean + :param overwrite: should an existing file be overwritten + + :type auto_transfer: boolean + :param auto_transfer: should the transfer be started immediately + + :type kwds: dict + :param kwds: keyword arguments: passed + through to :meth:`_Transfer.__init__()`. + + :rtype: :class:`Download` + :returns: The download initiated from the file passed. + """ + path = os.path.expanduser(filename) + if os.path.exists(path) and not overwrite: + raise ValueError( + 'File %s exists and overwrite not specified' % path) + return cls(open(path, 'wb'), close_stream=True, + auto_transfer=auto_transfer, **kwds) + + @classmethod + def from_stream(cls, stream, auto_transfer=True, total_size=None, **kwds): + """Create a new Download object from a stream. + + :type stream: writable file-like object + :param stream: the target file + + :type total_size: integer or None + :param total_size: total size of the file to be downloaded + + :type auto_transfer: boolean + :param auto_transfer: should the transfer be started immediately + + :type kwds: dict + :param kwds: keyword arguments: passed + through to :meth:`_Transfer.__init__()`. + + :rtype: :class:`Download` + :returns: The download initiated from the stream passed. + """ + return cls(stream, auto_transfer=auto_transfer, total_size=total_size, + **kwds) + + @property + def progress(self): + """Number of bytes have been downloaded. + + :rtype: integer >= 0 + :returns: The number of downloaded bytes. + """ + return self._progress + + @property + def total_size(self): + """Total number of bytes to be downloaded. + + :rtype: integer or None + :returns: The total number of bytes to download. + """ + return self._total_size + + @property + def encoding(self): + """'Content-Encoding' used to transfer the file + + :rtype: string or None + :returns: The encoding of the downloaded content. + """ + return self._encoding + + def __repr__(self): + if not self.initialized: + return 'Download (uninitialized)' + else: + return 'Download with %d/%s bytes transferred from url %s' % ( + self.progress, self.total_size, self.url) + + def configure_request(self, http_request, url_builder): + """Update http_request/url_builder with download-appropriate values. + + :type http_request: :class:`~.streaming.http_wrapper.Request` + :param http_request: the request to be updated + + :type url_builder: instance with settable 'query_params' attribute. + :param url_builder: transfer policy object to be updated + """ + url_builder.query_params['alt'] = 'media' + http_request.headers['Range'] = 'bytes=0-%d' % (self.chunksize - 1,) + + def _set_total(self, info): + """Update 'total_size' based on data from a response. + + :type info: mapping + :param info: response headers + """ + if 'content-range' in info: + _, _, total = info['content-range'].rpartition('/') + if total != '*': + self._total_size = int(total) + # Note "total_size is None" means we don't know it; if no size + # info was returned on our initial range request, that means we + # have a 0-byte file. (That last statement has been verified + # empirically, but is not clearly documented anywhere.) + if self.total_size is None: + self._total_size = 0 + + def initialize_download(self, http_request, http): + """Initialize this download. + + If the instance has :attr:`auto_transfer` enabled, begins the + download immediately. + + :type http_request: :class:`~.streaming.http_wrapper.Request` + :param http_request: the request to use to initialize this download. + + :type http: :class:`httplib2.Http` (or workalike) + :param http: Http instance for this request. + """ + self._ensure_uninitialized() + url = http_request.url + if self.auto_transfer: + end_byte = self._compute_end_byte(0) + self._set_range_header(http_request, 0, end_byte) + response = make_api_request( + self.bytes_http or http, http_request) + if response.status_code not in self._ACCEPTABLE_STATUSES: + raise HttpError.from_response(response) + self._initial_response = response + self._set_total(response.info) + url = response.info.get('content-location', response.request_url) + self._initialize(http, url) + # Unless the user has requested otherwise, we want to just + # go ahead and pump the bytes now. + if self.auto_transfer: + self.stream_file(use_chunks=True) + + def _normalize_start_end(self, start, end=None): + """Validate / fix up byte range. + + :type start: integer + :param start: start byte of the range: if negative, used as an + offset from the end. + + :type end: integer + :param end: end byte of the range. + + :rtype: tuple, (start, end) + :returns: the normalized start, end pair. + :raises: :exc:`google.cloud.streaming.exceptions.TransferInvalidError` + for invalid combinations of start, end. + """ + if end is not None: + if start < 0: + raise TransferInvalidError( + 'Cannot have end index with negative start index') + elif start >= self.total_size: + raise TransferInvalidError( + 'Cannot have start index greater than total size') + end = min(end, self.total_size - 1) + if end < start: + raise TransferInvalidError( + 'Range requested with end[%s] < start[%s]' % (end, start)) + return start, end + else: + if start < 0: + start = max(0, start + self.total_size) + return start, self.total_size - 1 + + @staticmethod + def _set_range_header(request, start, end=None): + """Update the 'Range' header in a request to match a byte range. + + :type request: :class:`google.cloud.streaming.http_wrapper.Request` + :param request: the request to update + + :type start: integer + :param start: start byte of the range: if negative, used as an + offset from the end. + + :type end: integer + :param end: end byte of the range. + """ + if start < 0: + request.headers['range'] = 'bytes=%d' % start + elif end is None: + request.headers['range'] = 'bytes=%d-' % start + else: + request.headers['range'] = 'bytes=%d-%d' % (start, end) + + def _compute_end_byte(self, start, end=None, use_chunks=True): + """Compute the last byte to fetch for this request. + + Based on the HTTP spec for Range and Content-Range. + + .. note:: + This is potentially confusing in several ways: + - the value for the last byte is 0-based, eg "fetch 10 bytes + from the beginning" would return 9 here. + - if we have no information about size, and don't want to + use the chunksize, we'll return None. + + :type start: integer + :param start: start byte of the range. + + :type end: integer or None + :param end: suggested last byte of the range. + + :type use_chunks: boolean + :param use_chunks: If False, ignore :attr:`chunksize`. + + :rtype: str + :returns: Last byte to use in a 'Range' header, or None. + """ + end_byte = end + + if start < 0 and not self.total_size: + return end_byte + + if use_chunks: + alternate = start + self.chunksize - 1 + if end_byte is not None: + end_byte = min(end_byte, alternate) + else: + end_byte = alternate + + if self.total_size: + alternate = self.total_size - 1 + if end_byte is not None: + end_byte = min(end_byte, alternate) + else: + end_byte = alternate + + return end_byte + + def _get_chunk(self, start, end): + """Retrieve a chunk of the file. + + :type start: integer + :param start: start byte of the range. + + :type end: integer or None + :param end: end byte of the range. + + :rtype: :class:`google.cloud.streaming.http_wrapper.Response` + :returns: response from the chunk request. + """ + self._ensure_initialized() + request = Request(url=self.url) + self._set_range_header(request, start, end=end) + return make_api_request( + self.bytes_http, request, retries=self.num_retries) + + def _process_response(self, response): + """Update attribtes and writing stream, based on response. + + :type response: :class:`google.cloud.streaming.http_wrapper.Response` + :param response: response from a download request. + + :rtype: :class:`google.cloud.streaming.http_wrapper.Response` + :returns: the response + :raises: :exc:`google.cloud.streaming.exceptions.HttpError` for + missing / unauthorized responses; + :exc:`google.cloud.streaming.exceptions.TransferRetryError` + for other error responses. + """ + if response.status_code not in self._ACCEPTABLE_STATUSES: + # We distinguish errors that mean we made a mistake in setting + # up the transfer versus something we should attempt again. + if response.status_code in (http_client.FORBIDDEN, + http_client.NOT_FOUND): + raise HttpError.from_response(response) + else: + raise TransferRetryError(response.content) + if response.status_code in (http_client.OK, + http_client.PARTIAL_CONTENT): + self.stream.write(response.content) + self._progress += response.length + if response.info and 'content-encoding' in response.info: + self._encoding = response.info['content-encoding'] + elif response.status_code == http_client.NO_CONTENT: + # It's important to write something to the stream for the case + # of a 0-byte download to a file, as otherwise python won't + # create the file. + self.stream.write('') + return response + + def get_range(self, start, end=None, use_chunks=True): + """Retrieve a given byte range from this download, inclusive. + + Writes retrieved bytes into :attr:`stream`. + + Range must be of one of these three forms: + * 0 <= start, end = None: Fetch from start to the end of the file. + * 0 <= start <= end: Fetch the bytes from start to end. + * start < 0, end = None: Fetch the last -start bytes of the file. + + (These variations correspond to those described in the HTTP 1.1 + protocol for range headers in RFC 2616, sec. 14.35.1.) + + :type start: integer + :param start: Where to start fetching bytes. (See above.) + + :type end: integer or ``None`` + :param end: Where to stop fetching bytes. (See above.) + + :type use_chunks: boolean + :param use_chunks: If False, ignore :attr:`chunksize` + and fetch this range in a single request. + If True, streams via chunks. + + :raises: :exc:`google.cloud.streaming.exceptions.TransferRetryError` + if a request returns an empty response. + """ + self._ensure_initialized() + progress_end_normalized = False + if self.total_size is not None: + progress, end_byte = self._normalize_start_end(start, end) + progress_end_normalized = True + else: + progress = start + end_byte = end + while (not progress_end_normalized or end_byte is None or + progress <= end_byte): + end_byte = self._compute_end_byte(progress, end=end_byte, + use_chunks=use_chunks) + response = self._get_chunk(progress, end_byte) + if not progress_end_normalized: + self._set_total(response.info) + progress, end_byte = self._normalize_start_end(start, end) + progress_end_normalized = True + response = self._process_response(response) + progress += response.length + if response.length == 0: + raise TransferRetryError( + 'Zero bytes unexpectedly returned in download response') + + def stream_file(self, use_chunks=True): + """Stream the entire download. + + Writes retrieved bytes into :attr:`stream`. + + :type use_chunks: boolean + :param use_chunks: If False, ignore :attr:`chunksize` + and stream this download in a single request. + If True, streams via chunks. + """ + self._ensure_initialized() + while True: + if self._initial_response is not None: + response = self._initial_response + self._initial_response = None + else: + end_byte = self._compute_end_byte(self.progress, + use_chunks=use_chunks) + response = self._get_chunk(self.progress, end_byte) + if self.total_size is None: + self._set_total(response.info) + response = self._process_response(response) + if (response.status_code == http_client.OK or + self.progress >= self.total_size): + break + + +class Upload(_Transfer): + """Represent a single Upload. + + :type stream: file-like object + :param stream: stream to/from which data is downloaded/uploaded. + + :type mime_type: string: + :param mime_type: MIME type of the upload. + + :type total_size: integer or None + :param total_size: Total upload size for the stream. + + :type http: :class:`httplib2.Http` (or workalike) + :param http: Http instance used to perform requests. + + :type close_stream: boolean + :param close_stream: should this instance close the stream when deleted + + :type auto_transfer: boolean + :param auto_transfer: should this instance automatically begin transfering + data when initialized + + :type kwds: dict + :param kwds: keyword arguments: all except ``total_size`` are passed + through to :meth:`_Transfer.__init__()`. + """ + _REQUIRED_SERIALIZATION_KEYS = set(( + 'auto_transfer', 'mime_type', 'total_size', 'url')) + + def __init__(self, stream, mime_type, total_size=None, http=None, + close_stream=False, auto_transfer=True, + **kwds): + super(Upload, self).__init__( + stream, close_stream=close_stream, auto_transfer=auto_transfer, + http=http, **kwds) + self._final_response = None + self._server_chunk_granularity = None + self._complete = False + self._mime_type = mime_type + self._progress = 0 + self._strategy = None + self._total_size = total_size + + @classmethod + def from_file(cls, filename, mime_type=None, auto_transfer=True, **kwds): + """Create a new Upload object from a filename. + + :type filename: string + :param filename: path/filename to the file being uploaded + + :type mime_type: string + :param mime_type: MIMEtype of the file being uploaded + + :type auto_transfer: boolean or None + :param auto_transfer: should the transfer be started immediately + + :type kwds: dict + :param kwds: keyword arguments: passed + through to :meth:`_Transfer.__init__()`. + + :rtype: :class:`Upload` + :returns: The upload initiated from the file passed. + """ + path = os.path.expanduser(filename) + if not mime_type: + mime_type, _ = mimetypes.guess_type(path) + if mime_type is None: + raise ValueError( + 'Could not guess mime type for %s' % path) + size = os.stat(path).st_size + return cls(open(path, 'rb'), mime_type, total_size=size, + close_stream=True, auto_transfer=auto_transfer, **kwds) + + @classmethod + def from_stream(cls, stream, mime_type, + total_size=None, auto_transfer=True, **kwds): + """Create a new Upload object from a stream. + + :type stream: writable file-like object + :param stream: the target file + + :type mime_type: string + :param mime_type: MIMEtype of the file being uploaded + + :type total_size: integer or None + :param total_size: Size of the file being uploaded + + :type auto_transfer: boolean or None + :param auto_transfer: should the transfer be started immediately + + :type kwds: dict + :param kwds: keyword arguments: passed + through to :meth:`_Transfer.__init__()`. + + :rtype: :class:`Upload` + :returns: The upload initiated from the stream passed. + """ + if mime_type is None: + raise ValueError( + 'No mime_type specified for stream') + return cls(stream, mime_type, total_size=total_size, + close_stream=False, auto_transfer=auto_transfer, **kwds) + + @property + def complete(self): + """Has the entire stream been uploaded. + + :rtype: boolean + :returns: Boolean indicated if the upload is complete. + """ + return self._complete + + @property + def mime_type(self): + """MIMEtype of the file being uploaded. + + :rtype: string + :returns: The mime-type of the upload. + """ + return self._mime_type + + @property + def progress(self): + """Bytes uploaded so far + + :rtype: integer + :returns: The amount uploaded so far. + """ + return self._progress + + @property + def strategy(self): + """Upload strategy to use + + :rtype: string or None + :returns: The strategy used to upload the data. + """ + return self._strategy + + @strategy.setter + def strategy(self, value): + """Update upload strategy to use + + :type value: string (one of :data:`SIMPLE_UPLOAD` or + :data:`RESUMABLE_UPLOAD`) + + :raises: :exc:`ValueError` if value is not one of the two allowed + strings. + """ + if value not in (SIMPLE_UPLOAD, RESUMABLE_UPLOAD): + raise ValueError(( + 'Invalid value "%s" for upload strategy, must be one of ' + '"simple" or "resumable".') % value) + self._strategy = value + + @property + def total_size(self): + """Total size of the stream to be uploaded. + + :rtype: integer or None + :returns: The total size to be uploaded. + """ + return self._total_size + + @total_size.setter + def total_size(self, value): + """Update total size of the stream to be uploaded. + + :type value: integer or None + :param value: the size + """ + self._ensure_uninitialized() + self._total_size = value + + def __repr__(self): + if not self.initialized: + return 'Upload (uninitialized)' + else: + return 'Upload with %d/%s bytes transferred for url %s' % ( + self.progress, self.total_size or '???', self.url) + + def _set_default_strategy(self, upload_config, http_request): + """Determine and set the default upload strategy for this upload. + + We generally prefer simple or multipart, unless we're forced to + use resumable. This happens when any of (1) the upload is too + large, (2) the simple endpoint doesn't support multipart requests + and we have metadata, or (3) there is no simple upload endpoint. + + :type upload_config: instance w/ ``max_size`` and ``accept`` + attributes + :param upload_config: Configuration for the upload endpoint. + + :type http_request: :class:`~.streaming.http_wrapper.Request` + :param http_request: The associated http request. + """ + if upload_config.resumable_path is None: + self.strategy = SIMPLE_UPLOAD + if self.strategy is not None: + return + strategy = SIMPLE_UPLOAD + if (self.total_size is not None and + self.total_size > RESUMABLE_UPLOAD_THRESHOLD): + strategy = RESUMABLE_UPLOAD + if http_request.body and not upload_config.simple_multipart: + strategy = RESUMABLE_UPLOAD + if not upload_config.simple_path: + strategy = RESUMABLE_UPLOAD + self.strategy = strategy + + def configure_request(self, upload_config, http_request, url_builder): + """Configure the request and url for this upload. + + :type upload_config: instance w/ ``max_size`` and ``accept`` + attributes + :param upload_config: transfer policy object to be queried + + :type http_request: :class:`~.streaming.http_wrapper.Request` + :param http_request: the request to be updated + + :type url_builder: instance with settable 'relative_path' and + 'query_params' attributes. + :param url_builder: transfer policy object to be updated + + :raises: :exc:`ValueError` if the requested upload is too big, + or does not have an acceptable MIME type. + """ + # Validate total_size vs. max_size + if (self.total_size and upload_config.max_size and + self.total_size > upload_config.max_size): + raise ValueError( + 'Upload too big: %s larger than max size %s' % ( + self.total_size, upload_config.max_size)) + # Validate mime type + if not acceptable_mime_type(upload_config.accept, self.mime_type): + raise ValueError( + 'MIME type %s does not match any accepted MIME ranges %s' % ( + self.mime_type, upload_config.accept)) + + self._set_default_strategy(upload_config, http_request) + if self.strategy == SIMPLE_UPLOAD: + url_builder.relative_path = upload_config.simple_path + if http_request.body: + url_builder.query_params['uploadType'] = 'multipart' + self._configure_multipart_request(http_request) + else: + url_builder.query_params['uploadType'] = 'media' + self._configure_media_request(http_request) + else: + url_builder.relative_path = upload_config.resumable_path + url_builder.query_params['uploadType'] = 'resumable' + self._configure_resumable_request(http_request) + + def _configure_media_request(self, http_request): + """Helper for 'configure_request': set up simple request.""" + http_request.headers['content-type'] = self.mime_type + http_request.body = self.stream.read() + http_request.loggable_body = '' + + def _configure_multipart_request(self, http_request): + """Helper for 'configure_request': set up multipart request.""" + # This is a multipart/related upload. + msg_root = mime_multipart.MIMEMultipart('related') + # msg_root should not write out its own headers + setattr(msg_root, '_write_headers', lambda self: None) + + # attach the body as one part + msg = mime_nonmultipart.MIMENonMultipart( + *http_request.headers['content-type'].split('/')) + msg.set_payload(http_request.body) + msg_root.attach(msg) + + # attach the media as the second part + msg = mime_nonmultipart.MIMENonMultipart(*self.mime_type.split('/')) + msg['Content-Transfer-Encoding'] = 'binary' + msg.set_payload(self.stream.read()) + msg_root.attach(msg) + + # NOTE: generate multipart message as bytes, not text + stream = six.BytesIO() + if six.PY3: # pragma: NO COVER Python3 + generator_class = email_generator.BytesGenerator + else: + generator_class = email_generator.Generator + generator = generator_class(stream, mangle_from_=False) + generator.flatten(msg_root, unixfrom=False) + http_request.body = stream.getvalue() + + multipart_boundary = msg_root.get_boundary() + http_request.headers['content-type'] = ( + 'multipart/related; boundary="%s"' % multipart_boundary) + + boundary_bytes = _to_bytes(multipart_boundary) + body_components = http_request.body.split(boundary_bytes) + headers, _, _ = body_components[-2].partition(b'\n\n') + body_components[-2] = b'\n\n'.join([headers, b'\n\n--']) + http_request.loggable_body = boundary_bytes.join(body_components) + + def _configure_resumable_request(self, http_request): + """Helper for 'configure_request': set up resumable request.""" + http_request.headers['X-Upload-Content-Type'] = self.mime_type + if self.total_size is not None: + http_request.headers[ + 'X-Upload-Content-Length'] = str(self.total_size) + + def refresh_upload_state(self): + """Refresh the state of a resumable upload via query to the back-end. + """ + if self.strategy != RESUMABLE_UPLOAD: + return + self._ensure_initialized() + # NOTE: Per RFC 2616[1]/7231[2], a 'PUT' request is inappropriate + # here: it is intended to be used to replace the entire + # resource, not to query for a status. + # + # If the back-end doesn't provide a way to query for this state + # via a 'GET' request, somebody should be spanked. + # + # The violation is documented[3]. + # + # [1] http://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html#sec9.6 + # [2] http://tools.ietf.org/html/rfc7231#section-4.3.4 + # [3] + # https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#resume-upload + refresh_request = Request( + url=self.url, http_method='PUT', + headers={'Content-Range': 'bytes */*'}) + refresh_response = make_api_request( + self.http, refresh_request, redirections=0, + retries=self.num_retries) + range_header = self._get_range_header(refresh_response) + if refresh_response.status_code in (http_client.OK, + http_client.CREATED): + self._complete = True + self._progress = self.total_size + self.stream.seek(self.progress) + # If we're finished, the refresh response will contain the metadata + # originally requested. Cache it so it can be returned in + # StreamInChunks. + self._final_response = refresh_response + elif refresh_response.status_code == RESUME_INCOMPLETE: + if range_header is None: + self._progress = 0 + else: + self._progress = self._last_byte(range_header) + 1 + self.stream.seek(self.progress) + else: + raise HttpError.from_response(refresh_response) + + @staticmethod + def _get_range_header(response): + """Return a 'Range' header from a response. + + :type response: :class:`google.cloud.streaming.http_wrapper.Response` + :param response: response to be queried + + :rtype: string + :returns: The header used to determine the bytes range. + """ + # NOTE: Per RFC 2616[1]/7233[2][3], 'Range' is a request header, + # not a response header. If the back-end is actually setting + # 'Range' on responses, somebody should be spanked: it should + # be sending 'Content-Range' (including the # '/' + # trailer). + # + # The violation is documented[4]. + # + # [1] http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html + # [2] http://tools.ietf.org/html/rfc7233#section-3.1 + # [3] http://tools.ietf.org/html/rfc7233#section-4.2 + # [4] + # https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#chunking + return response.info.get('Range', response.info.get('range')) + + def initialize_upload(self, http_request, http): + """Initialize this upload from the given http_request. + + :type http_request: :class:`~.streaming.http_wrapper.Request` + :param http_request: the request to be used + + :type http: :class:`httplib2.Http` (or workalike) + :param http: Http instance for this request. + + :raises: :exc:`ValueError` if the instance has not been configured + with a strategy. + :rtype: :class:`~google.cloud.streaming.http_wrapper.Response` + :returns: The response if the upload is resumable and auto transfer + is not used. + """ + if self.strategy is None: + raise ValueError( + 'No upload strategy set; did you call configure_request?') + if self.strategy != RESUMABLE_UPLOAD: + return + self._ensure_uninitialized() + http_response = make_api_request(http, http_request, + retries=self.num_retries) + if http_response.status_code != http_client.OK: + raise HttpError.from_response(http_response) + + granularity = http_response.info.get('X-Goog-Upload-Chunk-Granularity') + if granularity is not None: + granularity = int(granularity) + self._server_chunk_granularity = granularity + url = http_response.info['location'] + self._initialize(http, url) + + # Unless the user has requested otherwise, we want to just + # go ahead and pump the bytes now. + if self.auto_transfer: + return self.stream_file(use_chunks=True) + else: + return http_response + + @staticmethod + def _last_byte(range_header): + """Parse the last byte from a 'Range' header. + + :type range_header: string + :param range_header: 'Range' header value per RFC 2616/7233 + + :rtype: int + :returns: The last byte from a range header. + """ + _, _, end = range_header.partition('-') + return int(end) + + def _validate_chunksize(self, chunksize=None): + """Validate chunksize against server-specified granularity. + + Helper for :meth:`stream_file`. + + :type chunksize: integer or None + :param chunksize: the chunk size to be tested. + + :raises: :exc:`ValueError` if ``chunksize`` is not a multiple + of the server-specified granulariy. + """ + if self._server_chunk_granularity is None: + return + chunksize = chunksize or self.chunksize + if chunksize % self._server_chunk_granularity: + raise ValueError( + 'Server requires chunksize to be a multiple of %d', + self._server_chunk_granularity) + + def stream_file(self, use_chunks=True): + """Upload the stream. + + :type use_chunks: boolean + :param use_chunks: If False, send the stream in a single request. + Otherwise, send it in chunks. + + :rtype: :class:`google.cloud.streaming.http_wrapper.Response` + :returns: The response for the final request made. + """ + if self.strategy != RESUMABLE_UPLOAD: + raise ValueError( + 'Cannot stream non-resumable upload') + # final_response is set if we resumed an already-completed upload. + response = self._final_response + send_func = self._send_chunk if use_chunks else self._send_media_body + if use_chunks: + self._validate_chunksize(self.chunksize) + self._ensure_initialized() + while not self.complete: + response = send_func(self.stream.tell()) + if response.status_code in (http_client.OK, http_client.CREATED): + self._complete = True + break + self._progress = self._last_byte(response.info['range']) + if self.progress + 1 != self.stream.tell(): + raise CommunicationError( + 'Failed to transfer all bytes in chunk, upload paused at ' + 'byte %d' % self.progress) + if self.complete and hasattr(self.stream, 'seek'): + if not hasattr(self.stream, 'seekable') or self.stream.seekable(): + current_pos = self.stream.tell() + self.stream.seek(0, os.SEEK_END) + end_pos = self.stream.tell() + self.stream.seek(current_pos) + if current_pos != end_pos: + raise TransferInvalidError( + 'Upload complete with %s ' + 'additional bytes left in stream' % + (int(end_pos) - int(current_pos))) + return response + + def _send_media_request(self, request, end): + """Peform API upload request. + + Helper for _send_media_body & _send_chunk: + + :type request: :class:`google.cloud.streaming.http_wrapper.Request` + :param request: the request to upload + + :type end: integer + :param end: end byte of the to be uploaded + + :rtype: :class:`google.cloud.streaming.http_wrapper.Response` + :returns: the response + :raises: :exc:`~.streaming.exceptions.HttpError` if the status + code from the response indicates an error. + """ + response = make_api_request( + self.bytes_http, request, retries=self.num_retries) + if response.status_code not in (http_client.OK, http_client.CREATED, + RESUME_INCOMPLETE): + # We want to reset our state to wherever the server left us + # before this failed request, and then raise. + self.refresh_upload_state() + raise HttpError.from_response(response) + if response.status_code == RESUME_INCOMPLETE: + last_byte = self._last_byte( + self._get_range_header(response)) + if last_byte + 1 != end: + self.stream.seek(last_byte) + return response + + def _send_media_body(self, start): + """Send the entire stream in a single request. + + Helper for :meth:`stream_file`: + + :type start: integer + :param start: start byte of the range. + + :rtype: :class:`google.cloud.streaming.http_wrapper.Response` + :returns: The response from the media upload request. + """ + self._ensure_initialized() + if self.total_size is None: + raise TransferInvalidError( + 'Total size must be known for SendMediaBody') + body_stream = StreamSlice(self.stream, self.total_size - start) + + request = Request(url=self.url, http_method='PUT', body=body_stream) + request.headers['Content-Type'] = self.mime_type + if start == self.total_size: + # End of an upload with 0 bytes left to send; just finalize. + range_string = 'bytes */%s' % self.total_size + else: + range_string = 'bytes %s-%s/%s' % (start, self.total_size - 1, + self.total_size) + + request.headers['Content-Range'] = range_string + + return self._send_media_request(request, self.total_size) + + def _send_chunk(self, start): + """Send a chunk of the stream. + + Helper for :meth:`stream_file`: + + :type start: integer + :param start: start byte of the range. + + :rtype: :class:`google.cloud.streaming.http_wrapper.Response` + :returns: The response from the chunked upload request. + """ + self._ensure_initialized() + no_log_body = self.total_size is None + if self.total_size is None: + # For the streaming resumable case, we need to detect when + # we're at the end of the stream. + body_stream = BufferedStream( + self.stream, start, self.chunksize) + end = body_stream.stream_end_position + if body_stream.stream_exhausted: + self._total_size = end + # Here, change body_stream from a stream to a string object, + # which means reading a chunk into memory. This works around + # https://code.google.com/p/httplib2/issues/detail?id=176 which can + # cause httplib2 to skip bytes on 401's for file objects. + body_stream = body_stream.read(self.chunksize) + else: + end = min(start + self.chunksize, self.total_size) + body_stream = StreamSlice(self.stream, end - start) + request = Request(url=self.url, http_method='PUT', body=body_stream) + request.headers['Content-Type'] = self.mime_type + if no_log_body: + # Disable logging of streaming body. + request.loggable_body = '' + if self.total_size is None: + # Streaming resumable upload case, unknown total size. + range_string = 'bytes %s-%s/*' % (start, end - 1) + elif end == start: + # End of an upload with 0 bytes left to send; just finalize. + range_string = 'bytes */%s' % self.total_size + else: + # Normal resumable upload case with known sizes. + range_string = 'bytes %s-%s/%s' % (start, end - 1, self.total_size) + + request.headers['Content-Range'] = range_string + + return self._send_media_request(request, end) diff --git a/packages/google-cloud-core/google/cloud/streaming/util.py b/packages/google-cloud-core/google/cloud/streaming/util.py new file mode 100644 index 000000000000..c5d1e5b082f3 --- /dev/null +++ b/packages/google-cloud-core/google/cloud/streaming/util.py @@ -0,0 +1,74 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Assorted utilities shared between parts of apitools.""" + +import random + + +_MAX_RETRY_WAIT = 60 + + +def calculate_wait_for_retry(retry_attempt): + """Calculate the amount of time to wait before a retry attempt. + + Wait time grows exponentially with the number of attempts. A + random amount of jitter is added to spread out retry attempts from + different clients. + + :type retry_attempt: integer + :param retry_attempt: Retry attempt counter. + + :rtype: integer + :returns: Number of seconds to wait before retrying request. + """ + wait_time = 2 ** retry_attempt + max_jitter = wait_time / 4.0 + wait_time += random.uniform(-max_jitter, max_jitter) + return max(1, min(wait_time, _MAX_RETRY_WAIT)) + + +def acceptable_mime_type(accept_patterns, mime_type): + """Check that ``mime_type`` matches one of ``accept_patterns``. + + Note that this function assumes that all patterns in accept_patterns + will be simple types of the form "type/subtype", where one or both + of these can be "*". We do not support parameters (i.e. "; q=") in + patterns. + + :type accept_patterns: list of string + :param accept_patterns: acceptable MIME types. + + :type mime_type: string + :param mime_type: the MIME being checked + + :rtype: boolean + :returns: True if the supplied MIME type matches at least one of the + patterns, else False. + """ + if '/' not in mime_type: + raise ValueError( + 'Invalid MIME type: "%s"' % mime_type) + unsupported_patterns = [p for p in accept_patterns if ';' in p] + if unsupported_patterns: + raise ValueError( + 'MIME patterns with parameter unsupported: "%s"' % ', '.join( + unsupported_patterns)) + + def _match(pattern, mime_type): + """Return True iff mime_type is acceptable for pattern.""" + return all(accept in ('*', provided) for accept, provided + in zip(pattern.split('/'), mime_type.split('/'))) + + return any(_match(pattern, mime_type) for pattern in accept_patterns) From 9da705e346df8023d8ca1c5c93522f6ccb20cbb2 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 22 Sep 2016 15:55:56 -0700 Subject: [PATCH 002/468] Making core directory a proper package. Also changing the version from 0.19.0 to 0.20.0dev. Done by adding new setup.py, MANIFEST and README to core subdirectory, adding core to the list of packages in verify_included_modules, updating the umbrella setup to depend on core and adding the local core package to the umbrella tox config. --- packages/google-cloud-core/MANIFEST.in | 4 ++ packages/google-cloud-core/README.rst | 20 +++++++ packages/google-cloud-core/setup.py | 72 ++++++++++++++++++++++++++ 3 files changed, 96 insertions(+) create mode 100644 packages/google-cloud-core/MANIFEST.in create mode 100644 packages/google-cloud-core/README.rst create mode 100644 packages/google-cloud-core/setup.py diff --git a/packages/google-cloud-core/MANIFEST.in b/packages/google-cloud-core/MANIFEST.in new file mode 100644 index 000000000000..cb3a2b9ef4fa --- /dev/null +++ b/packages/google-cloud-core/MANIFEST.in @@ -0,0 +1,4 @@ +include README.rst +graft google +graft unit_tests +global-exclude *.pyc diff --git a/packages/google-cloud-core/README.rst b/packages/google-cloud-core/README.rst new file mode 100644 index 000000000000..d8ec01b781be --- /dev/null +++ b/packages/google-cloud-core/README.rst @@ -0,0 +1,20 @@ +Core Helpers for Google Cloud Python Client Library +=================================================== + +This library is not meant to stand-alone. Instead it defines +common helpers (e.g. base ``Client`` and ``Connection`` classes) +used by all of the ``google-cloud-*``. + + +- `Homepage`_ +- `API Documentation`_ + +.. _Homepage: https://googlecloudplatform.github.io/google-cloud-python/ +.. _API Documentation: http://googlecloudplatform.github.io/google-cloud-python/ + +Quick Start +----------- + +:: + + $ pip install --upgrade google-cloud-core diff --git a/packages/google-cloud-core/setup.py b/packages/google-cloud-core/setup.py new file mode 100644 index 000000000000..97b695e1a71e --- /dev/null +++ b/packages/google-cloud-core/setup.py @@ -0,0 +1,72 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +from setuptools import find_packages +from setuptools import setup + + +PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) + +with open(os.path.join(PACKAGE_ROOT, 'README.rst')) as file_obj: + README = file_obj.read() + +# NOTE: This is duplicated throughout and we should try to +# consolidate. +SETUP_BASE = { + 'author': 'Google Cloud Platform', + 'author_email': 'jjg+google-cloud-python@google.com', + 'scripts': [], + 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', + 'license': 'Apache 2.0', + 'platforms': 'Posix; MacOS X; Windows', + 'include_package_data': True, + 'zip_safe': False, + 'classifiers': [ + 'Development Status :: 4 - Beta', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: Apache Software License', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.4', + 'Programming Language :: Python :: 3.5', + 'Topic :: Internet', + ], +} + + +REQUIREMENTS = [ + 'httplib2 >= 0.9.1', + 'googleapis-common-protos', + 'oauth2client >= 2.0.1, < 3.0.0dev', + 'protobuf >= 3.0.0', + 'six', +] + +setup( + name='google-cloud-core', + version='0.20.0dev', + description='API Client library for Google Cloud: Core Helpers', + long_description=README, + namespace_packages=[ + 'google', + 'google.cloud', + ], + packages=find_packages(), + install_requires=REQUIREMENTS, + **SETUP_BASE +) From 4f0d83661a3d13f5daf6e77353b4d5dfe2be4d72 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 22 Sep 2016 15:57:26 -0700 Subject: [PATCH 003/468] Changing user-agent to use the version from google-cloud-core. See #2391 for larger discussion. --- packages/google-cloud-core/google/cloud/connection.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-core/google/cloud/connection.py b/packages/google-cloud-core/google/cloud/connection.py index c5ed3a627e99..e5893a34630e 100644 --- a/packages/google-cloud-core/google/cloud/connection.py +++ b/packages/google-cloud-core/google/cloud/connection.py @@ -28,7 +28,7 @@ """The base of the API call URL.""" DEFAULT_USER_AGENT = 'gcloud-python/{0}'.format( - get_distribution('google-cloud').version) + get_distribution('google-cloud-core').version) """The user agent for google-cloud-python requests.""" From b81516c40a7788f1888fec2acc8ad24db4f01871 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 22 Sep 2016 15:58:31 -0700 Subject: [PATCH 004/468] Moving core unit tests into core subdirectory. Done via: $ mkdir -p core/unit_tests/streaming $ cp unit_tests/__init__.py core/unit_tests/__init__.py $ git add core/unit_tests/__init__.py $ cp unit_tests/streaming/__init__.py core/unit_tests/streaming/__init__.py $ git add core/unit_tests/streaming/__init__.py $ git mv unit_tests/test__helpers.py core/unit_tests/test__helpers.py $ git mv unit_tests/test_client.py core/unit_tests/test_client.py $ git mv unit_tests/test_connection.py core/unit_tests/test_connection.py $ git mv unit_tests/test_credentials.py core/unit_tests/test_credentials.py $ git mv unit_tests/test_exceptions.py core/unit_tests/test_exceptions.py $ git mv unit_tests/test_iterator.py core/unit_tests/test_iterator.py $ git mv unit_tests/test_operation.py core/unit_tests/test_operation.py $ git mv unit_tests/streaming/test_buffered_stream.py core/unit_tests/streaming/test_buffered_stream.py $ git mv unit_tests/streaming/test_exceptions.py core/unit_tests/streaming/test_exceptions.py $ git mv unit_tests/streaming/test_http_wrapper.py core/unit_tests/streaming/test_http_wrapper.py $ git mv unit_tests/streaming/test_stream_slice.py core/unit_tests/streaming/test_stream_slice.py $ git mv unit_tests/streaming/test_transfer.py core/unit_tests/streaming/test_transfer.py $ git mv unit_tests/streaming/test_util.py core/unit_tests/streaming/test_util.py --- .../google-cloud-core/unit_tests/__init__.py | 13 + .../unit_tests/streaming/__init__.py | 13 + .../streaming/test_buffered_stream.py | 117 + .../unit_tests/streaming/test_exceptions.py | 101 + .../unit_tests/streaming/test_http_wrapper.py | 482 ++++ .../unit_tests/streaming/test_stream_slice.py | 82 + .../unit_tests/streaming/test_transfer.py | 1956 +++++++++++++++++ .../unit_tests/streaming/test_util.py | 62 + .../unit_tests/test__helpers.py | 1093 +++++++++ .../unit_tests/test_client.py | 228 ++ .../unit_tests/test_connection.py | 390 ++++ .../unit_tests/test_credentials.py | 256 +++ .../unit_tests/test_exceptions.py | 131 ++ .../unit_tests/test_iterator.py | 278 +++ .../unit_tests/test_operation.py | 244 ++ 15 files changed, 5446 insertions(+) create mode 100644 packages/google-cloud-core/unit_tests/__init__.py create mode 100644 packages/google-cloud-core/unit_tests/streaming/__init__.py create mode 100644 packages/google-cloud-core/unit_tests/streaming/test_buffered_stream.py create mode 100644 packages/google-cloud-core/unit_tests/streaming/test_exceptions.py create mode 100644 packages/google-cloud-core/unit_tests/streaming/test_http_wrapper.py create mode 100644 packages/google-cloud-core/unit_tests/streaming/test_stream_slice.py create mode 100644 packages/google-cloud-core/unit_tests/streaming/test_transfer.py create mode 100644 packages/google-cloud-core/unit_tests/streaming/test_util.py create mode 100644 packages/google-cloud-core/unit_tests/test__helpers.py create mode 100644 packages/google-cloud-core/unit_tests/test_client.py create mode 100644 packages/google-cloud-core/unit_tests/test_connection.py create mode 100644 packages/google-cloud-core/unit_tests/test_credentials.py create mode 100644 packages/google-cloud-core/unit_tests/test_exceptions.py create mode 100644 packages/google-cloud-core/unit_tests/test_iterator.py create mode 100644 packages/google-cloud-core/unit_tests/test_operation.py diff --git a/packages/google-cloud-core/unit_tests/__init__.py b/packages/google-cloud-core/unit_tests/__init__.py new file mode 100644 index 000000000000..58e0d9153632 --- /dev/null +++ b/packages/google-cloud-core/unit_tests/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/packages/google-cloud-core/unit_tests/streaming/__init__.py b/packages/google-cloud-core/unit_tests/streaming/__init__.py new file mode 100644 index 000000000000..58e0d9153632 --- /dev/null +++ b/packages/google-cloud-core/unit_tests/streaming/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/packages/google-cloud-core/unit_tests/streaming/test_buffered_stream.py b/packages/google-cloud-core/unit_tests/streaming/test_buffered_stream.py new file mode 100644 index 000000000000..3304e2bd3cc0 --- /dev/null +++ b/packages/google-cloud-core/unit_tests/streaming/test_buffered_stream.py @@ -0,0 +1,117 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class Test_BufferedStream(unittest.TestCase): + + def _getTargetClass(self): + from google.cloud.streaming.buffered_stream import BufferedStream + return BufferedStream + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_start_zero_longer_than_buffer(self): + from io import BytesIO + CONTENT = b'CONTENT GOES HERE' + START = 0 + BUFSIZE = 4 + stream = BytesIO(CONTENT) + bufstream = self._makeOne(stream, START, BUFSIZE) + self.assertIs(bufstream._stream, stream) + self.assertEqual(bufstream._start_pos, START) + self.assertEqual(bufstream._buffer_pos, 0) + self.assertEqual(bufstream._buffered_data, CONTENT[:BUFSIZE]) + self.assertEqual(len(bufstream), BUFSIZE) + self.assertFalse(bufstream.stream_exhausted) + self.assertEqual(bufstream.stream_end_position, BUFSIZE) + + def test_ctor_start_nonzero_shorter_than_buffer(self): + from io import BytesIO + CONTENT = b'CONTENT GOES HERE' + START = 8 + BUFSIZE = 10 + stream = BytesIO(CONTENT) + stream.read(START) # already consumed + bufstream = self._makeOne(stream, START, BUFSIZE) + self.assertIs(bufstream._stream, stream) + self.assertEqual(bufstream._start_pos, START) + self.assertEqual(bufstream._buffer_pos, 0) + self.assertEqual(bufstream._buffered_data, CONTENT[START:]) + self.assertEqual(len(bufstream), len(CONTENT) - START) + self.assertTrue(bufstream.stream_exhausted) + self.assertEqual(bufstream.stream_end_position, len(CONTENT)) + + def test__bytes_remaining_start_zero_longer_than_buffer(self): + from io import BytesIO + CONTENT = b'CONTENT GOES HERE' + START = 0 + BUFSIZE = 4 + stream = BytesIO(CONTENT) + bufstream = self._makeOne(stream, START, BUFSIZE) + self.assertEqual(bufstream._bytes_remaining, BUFSIZE) + + def test__bytes_remaining_start_zero_shorter_than_buffer(self): + from io import BytesIO + CONTENT = b'CONTENT GOES HERE' + START = 8 + BUFSIZE = 10 + stream = BytesIO(CONTENT) + stream.read(START) # already consumed + bufstream = self._makeOne(stream, START, BUFSIZE) + self.assertEqual(bufstream._bytes_remaining, len(CONTENT) - START) + + def test_read_w_none(self): + from io import BytesIO + CONTENT = b'CONTENT GOES HERE' + START = 0 + BUFSIZE = 4 + stream = BytesIO(CONTENT) + bufstream = self._makeOne(stream, START, BUFSIZE) + with self.assertRaises(ValueError): + bufstream.read(None) + + def test_read_w_negative_size(self): + from io import BytesIO + CONTENT = b'CONTENT GOES HERE' + START = 0 + BUFSIZE = 4 + stream = BytesIO(CONTENT) + bufstream = self._makeOne(stream, START, BUFSIZE) + with self.assertRaises(ValueError): + bufstream.read(-2) + + def test_read_from_start(self): + from io import BytesIO + CONTENT = b'CONTENT GOES HERE' + START = 0 + BUFSIZE = 4 + stream = BytesIO(CONTENT) + bufstream = self._makeOne(stream, START, BUFSIZE) + self.assertEqual(bufstream.read(4), CONTENT[:4]) + + def test_read_exhausted(self): + from io import BytesIO + CONTENT = b'CONTENT GOES HERE' + START = len(CONTENT) + BUFSIZE = 10 + stream = BytesIO(CONTENT) + stream.read(START) # already consumed + bufstream = self._makeOne(stream, START, BUFSIZE) + self.assertTrue(bufstream.stream_exhausted) + self.assertEqual(bufstream.stream_end_position, len(CONTENT)) + self.assertEqual(bufstream._bytes_remaining, 0) + self.assertEqual(bufstream.read(10), b'') diff --git a/packages/google-cloud-core/unit_tests/streaming/test_exceptions.py b/packages/google-cloud-core/unit_tests/streaming/test_exceptions.py new file mode 100644 index 000000000000..b72dfabac38b --- /dev/null +++ b/packages/google-cloud-core/unit_tests/streaming/test_exceptions.py @@ -0,0 +1,101 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class Test_HttpError(unittest.TestCase): + + def _getTargetClass(self): + from google.cloud.streaming.exceptions import HttpError + return HttpError + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor(self): + RESPONSE = {'status': '404'} + CONTENT = b'CONTENT' + URL = 'http://www.example.com' + exception = self._makeOne(RESPONSE, CONTENT, URL) + self.assertEqual(exception.response, RESPONSE) + self.assertEqual(exception.content, CONTENT) + self.assertEqual(exception.url, URL) + self.assertEqual(exception.status_code, 404) + self.assertEqual( + str(exception), + "HttpError accessing : " + "response: <{'status': '404'}>, content ") + + def test_from_response(self): + RESPONSE = {'status': '404'} + CONTENT = b'CONTENT' + URL = 'http://www.example.com' + + class _Response(object): + info = RESPONSE + content = CONTENT + request_url = URL + + klass = self._getTargetClass() + exception = klass.from_response(_Response()) + self.assertIsInstance(exception, klass) + self.assertEqual(exception.response, RESPONSE) + self.assertEqual(exception.content, CONTENT) + self.assertEqual(exception.url, URL) + + +class Test_RetryAfterError(unittest.TestCase): + + def _getTargetClass(self): + from google.cloud.streaming.exceptions import RetryAfterError + return RetryAfterError + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor(self): + RESPONSE = {'status': '404'} + CONTENT = b'CONTENT' + URL = 'http://www.example.com' + RETRY_AFTER = 60 + exception = self._makeOne(RESPONSE, CONTENT, URL, RETRY_AFTER) + self.assertEqual(exception.response, RESPONSE) + self.assertEqual(exception.content, CONTENT) + self.assertEqual(exception.url, URL) + self.assertEqual(exception.retry_after, RETRY_AFTER) + self.assertEqual( + str(exception), + "HttpError accessing : " + "response: <{'status': '404'}>, content ") + + def test_from_response(self): + RESPONSE = {'status': '404'} + CONTENT = b'CONTENT' + URL = 'http://www.example.com' + RETRY_AFTER = 60 + + class _Response(object): + info = RESPONSE + content = CONTENT + request_url = URL + retry_after = RETRY_AFTER + + klass = self._getTargetClass() + exception = klass.from_response(_Response()) + self.assertIsInstance(exception, klass) + self.assertEqual(exception.response, RESPONSE) + self.assertEqual(exception.content, CONTENT) + self.assertEqual(exception.url, URL) + self.assertEqual(exception.retry_after, RETRY_AFTER) diff --git a/packages/google-cloud-core/unit_tests/streaming/test_http_wrapper.py b/packages/google-cloud-core/unit_tests/streaming/test_http_wrapper.py new file mode 100644 index 000000000000..8aad20cfb2f1 --- /dev/null +++ b/packages/google-cloud-core/unit_tests/streaming/test_http_wrapper.py @@ -0,0 +1,482 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class Test__httplib2_debug_level(unittest.TestCase): + + def _getTargetClass(self): + from google.cloud.streaming.http_wrapper import _httplib2_debug_level + return _httplib2_debug_level + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_wo_loggable_body_wo_http(self): + from google.cloud._testing import _Monkey + from google.cloud.streaming import http_wrapper as MUT + + request = _Request() + LEVEL = 1 + _httplib2 = _Dummy(debuglevel=0) + with _Monkey(MUT, httplib2=_httplib2): + with self._makeOne(request, LEVEL): + self.assertEqual(_httplib2.debuglevel, 0) + + def test_w_loggable_body_wo_http(self): + from google.cloud._testing import _Monkey + from google.cloud.streaming import http_wrapper as MUT + + request = _Request(loggable_body=object()) + LEVEL = 1 + _httplib2 = _Dummy(debuglevel=0) + with _Monkey(MUT, httplib2=_httplib2): + with self._makeOne(request, LEVEL): + self.assertEqual(_httplib2.debuglevel, LEVEL) + self.assertEqual(_httplib2.debuglevel, 0) + + def test_w_loggable_body_w_http(self): + from google.cloud._testing import _Monkey + from google.cloud.streaming import http_wrapper as MUT + + class _Connection(object): + debuglevel = 0 + + def set_debuglevel(self, value): + self.debuglevel = value + + request = _Request(loggable_body=object()) + LEVEL = 1 + _httplib2 = _Dummy(debuglevel=0) + update_me = _Connection() + skip_me = _Connection() + connections = {'update:me': update_me, 'skip_me': skip_me} + _http = _Dummy(connections=connections) + with _Monkey(MUT, httplib2=_httplib2): + with self._makeOne(request, LEVEL, _http): + self.assertEqual(_httplib2.debuglevel, LEVEL) + self.assertEqual(update_me.debuglevel, LEVEL) + self.assertEqual(skip_me.debuglevel, 0) + self.assertEqual(_httplib2.debuglevel, 0) + self.assertEqual(update_me.debuglevel, 0) + self.assertEqual(skip_me.debuglevel, 0) + + +class Test_Request(unittest.TestCase): + + def _getTargetClass(self): + from google.cloud.streaming.http_wrapper import Request + return Request + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_defaults(self): + request = self._makeOne() + self.assertEqual(request.url, '') + self.assertEqual(request.http_method, 'GET') + self.assertEqual(request.headers, {'content-length': '0'}) + self.assertEqual(request.body, '') + self.assertIsNone(request.loggable_body) + + def test_loggable_body_setter_w_body_None(self): + from google.cloud.streaming.exceptions import RequestError + request = self._makeOne(body=None) + with self.assertRaises(RequestError): + request.loggable_body = 'abc' + + def test_body_setter_w_None(self): + request = self._makeOne() + request.loggable_body = 'abc' + request.body = None + self.assertEqual(request.headers, {}) + self.assertIsNone(request.body) + self.assertEqual(request.loggable_body, 'abc') + + def test_body_setter_w_non_string(self): + request = self._makeOne() + request.loggable_body = 'abc' + request.body = body = _Dummy(length=123) + self.assertEqual(request.headers, {'content-length': '123'}) + self.assertIs(request.body, body) + self.assertEqual(request.loggable_body, '') + + +class Test_Response(unittest.TestCase): + + def _getTargetClass(self): + from google.cloud.streaming.http_wrapper import Response + return Response + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor(self): + CONTENT = 'CONTENT' + URL = 'http://example.com/api' + info = {'status': '200'} + response = self._makeOne(info, CONTENT, URL) + self.assertEqual(len(response), len(CONTENT)) + self.assertEqual(response.status_code, 200) + self.assertIsNone(response.retry_after) + self.assertFalse(response.is_redirect) + + def test_length_w_content_encoding_w_content_range(self): + CONTENT = 'CONTENT' + URL = 'http://example.com/api' + RANGE = 'bytes 0-122/5678' + info = { + 'status': '200', + 'content-length': len(CONTENT), + 'content-encoding': 'testing', + 'content-range': RANGE, + } + response = self._makeOne(info, CONTENT, URL) + self.assertEqual(len(response), 123) + + def test_length_w_content_encoding_wo_content_range(self): + CONTENT = 'CONTENT' + URL = 'http://example.com/api' + info = { + 'status': '200', + 'content-length': len(CONTENT), + 'content-encoding': 'testing', + } + response = self._makeOne(info, CONTENT, URL) + self.assertEqual(len(response), len(CONTENT)) + + def test_length_w_content_length_w_content_range(self): + CONTENT = 'CONTENT' + URL = 'http://example.com/api' + RANGE = 'bytes 0-12/5678' + info = { + 'status': '200', + 'content-length': len(CONTENT) * 2, + 'content-range': RANGE, + } + response = self._makeOne(info, CONTENT, URL) + self.assertEqual(len(response), len(CONTENT) * 2) + + def test_length_wo_content_length_w_content_range(self): + CONTENT = 'CONTENT' + URL = 'http://example.com/api' + RANGE = 'bytes 0-122/5678' + info = { + 'status': '200', + 'content-range': RANGE, + } + response = self._makeOne(info, CONTENT, URL) + self.assertEqual(len(response), 123) + + def test_retry_after_w_header(self): + CONTENT = 'CONTENT' + URL = 'http://example.com/api' + info = { + 'status': '200', + 'retry-after': '123', + } + response = self._makeOne(info, CONTENT, URL) + self.assertEqual(response.retry_after, 123) + + def test_is_redirect_w_code_wo_location(self): + CONTENT = 'CONTENT' + URL = 'http://example.com/api' + info = { + 'status': '301', + } + response = self._makeOne(info, CONTENT, URL) + self.assertFalse(response.is_redirect) + + def test_is_redirect_w_code_w_location(self): + CONTENT = 'CONTENT' + URL = 'http://example.com/api' + info = { + 'status': '301', + 'location': 'http://example.com/other', + } + response = self._makeOne(info, CONTENT, URL) + self.assertTrue(response.is_redirect) + + +class Test__check_response(unittest.TestCase): + + def _callFUT(self, *args, **kw): + from google.cloud.streaming.http_wrapper import _check_response + return _check_response(*args, **kw) + + def test_w_none(self): + from google.cloud.streaming.exceptions import RequestError + with self.assertRaises(RequestError): + self._callFUT(None) + + def test_w_TOO_MANY_REQUESTS(self): + from google.cloud.streaming.exceptions import BadStatusCodeError + from google.cloud.streaming.http_wrapper import TOO_MANY_REQUESTS + + with self.assertRaises(BadStatusCodeError): + self._callFUT(_Response(TOO_MANY_REQUESTS)) + + def test_w_50x(self): + from google.cloud.streaming.exceptions import BadStatusCodeError + + with self.assertRaises(BadStatusCodeError): + self._callFUT(_Response(500)) + + with self.assertRaises(BadStatusCodeError): + self._callFUT(_Response(503)) + + def test_w_retry_after(self): + from google.cloud.streaming.exceptions import RetryAfterError + + with self.assertRaises(RetryAfterError): + self._callFUT(_Response(200, 20)) + + def test_pass(self): + self._callFUT(_Response(200)) + + +class Test__reset_http_connections(unittest.TestCase): + + def _callFUT(self, *args, **kw): + from google.cloud.streaming.http_wrapper import _reset_http_connections + return _reset_http_connections(*args, **kw) + + def test_wo_connections(self): + http = object() + self._callFUT(http) + + def test_w_connections(self): + connections = {'delete:me': object(), 'skip_me': object()} + http = _Dummy(connections=connections) + self._callFUT(http) + self.assertFalse('delete:me' in connections) + self.assertTrue('skip_me' in connections) + + +class Test___make_api_request_no_retry(unittest.TestCase): + + def _callFUT(self, *args, **kw): + from google.cloud.streaming.http_wrapper import ( + _make_api_request_no_retry) + return _make_api_request_no_retry(*args, **kw) + + def _verify_requested(self, http, request, + redirections=5, connection_type=None): + self.assertEqual(len(http._requested), 1) + url, kw = http._requested[0] + self.assertEqual(url, request.url) + self.assertEqual(kw['method'], request.http_method) + self.assertEqual(kw['body'], request.body) + self.assertEqual(kw['headers'], request.headers) + self.assertEqual(kw['redirections'], redirections) + self.assertEqual(kw['connection_type'], connection_type) + + def test_defaults_wo_connections(self): + from google.cloud._testing import _Monkey + from google.cloud.streaming import http_wrapper as MUT + INFO = {'status': '200'} + CONTENT = 'CONTENT' + _http = _Http((INFO, CONTENT)) + _httplib2 = _Dummy(debuglevel=1) + _request = _Request() + _checked = [] + with _Monkey(MUT, httplib2=_httplib2, + _check_response=_checked.append): + response = self._callFUT(_http, _request) + + self.assertIsInstance(response, MUT.Response) + self.assertEqual(response.info, INFO) + self.assertEqual(response.content, CONTENT) + self.assertEqual(response.request_url, _request.url) + self.assertEqual(_checked, [response]) + self._verify_requested(_http, _request) + + def test_w_http_connections_miss(self): + from google.cloud._testing import _Monkey + from google.cloud.streaming import http_wrapper as MUT + INFO = {'status': '200'} + CONTENT = 'CONTENT' + CONN_TYPE = object() + _http = _Http((INFO, CONTENT)) + _http.connections = {'https': CONN_TYPE} + _httplib2 = _Dummy(debuglevel=1) + _request = _Request() + _checked = [] + with _Monkey(MUT, httplib2=_httplib2, + _check_response=_checked.append): + response = self._callFUT(_http, _request) + + self.assertIsInstance(response, MUT.Response) + self.assertEqual(response.info, INFO) + self.assertEqual(response.content, CONTENT) + self.assertEqual(response.request_url, _request.url) + self.assertEqual(_checked, [response]) + self._verify_requested(_http, _request) + + def test_w_http_connections_hit(self): + from google.cloud._testing import _Monkey + from google.cloud.streaming import http_wrapper as MUT + INFO = {'status': '200'} + CONTENT = 'CONTENT' + CONN_TYPE = object() + _http = _Http((INFO, CONTENT)) + _http.connections = {'http': CONN_TYPE} + _httplib2 = _Dummy(debuglevel=1) + _request = _Request() + _checked = [] + with _Monkey(MUT, httplib2=_httplib2, + _check_response=_checked.append): + response = self._callFUT(_http, _request) + + self.assertIsInstance(response, MUT.Response) + self.assertEqual(response.info, INFO) + self.assertEqual(response.content, CONTENT) + self.assertEqual(response.request_url, _request.url) + self.assertEqual(_checked, [response]) + self._verify_requested(_http, _request, connection_type=CONN_TYPE) + + def test_w_request_returning_None(self): + from google.cloud._testing import _Monkey + from google.cloud.streaming import http_wrapper as MUT + from google.cloud.streaming.exceptions import RequestError + INFO = None + CONTENT = None + CONN_TYPE = object() + _http = _Http((INFO, CONTENT)) + _http.connections = {'http': CONN_TYPE} + _httplib2 = _Dummy(debuglevel=1) + _request = _Request() + with _Monkey(MUT, httplib2=_httplib2): + with self.assertRaises(RequestError): + self._callFUT(_http, _request) + self._verify_requested(_http, _request, connection_type=CONN_TYPE) + + +class Test_make_api_request(unittest.TestCase): + + def _callFUT(self, *args, **kw): + from google.cloud.streaming.http_wrapper import make_api_request + return make_api_request(*args, **kw) + + def test_wo_exception(self): + from google.cloud.streaming import http_wrapper as MUT + from google.cloud._testing import _Monkey + + HTTP, REQUEST, RESPONSE = object(), object(), object() + _created, _checked = [], [] + + def _wo_exception(*args, **kw): + _created.append((args, kw)) + return RESPONSE + + with _Monkey(MUT, _make_api_request_no_retry=_wo_exception, + _check_response=_checked.append): + response = self._callFUT(HTTP, REQUEST) + + self.assertIs(response, RESPONSE) + expected_kw = {'redirections': MUT._REDIRECTIONS} + self.assertEqual(_created, [((HTTP, REQUEST), expected_kw)]) + self.assertEqual(_checked, []) # not called by '_wo_exception' + + def test_w_exceptions_lt_max_retries(self): + from google.cloud.streaming.exceptions import RetryAfterError + from google.cloud.streaming import http_wrapper as MUT + from google.cloud._testing import _Monkey + + HTTP, RESPONSE = object(), object() + REQUEST = _Request() + _created, _checked = [], [] + _counter = [None] * 4 + + def _wo_exception(*args, **kw): + _created.append((args, kw)) + if _counter: + _counter.pop() + raise RetryAfterError(RESPONSE, '', REQUEST.url, 0.1) + return RESPONSE + + with _Monkey(MUT, _make_api_request_no_retry=_wo_exception, + _check_response=_checked.append): + response = self._callFUT(HTTP, REQUEST, retries=5) + + self.assertIs(response, RESPONSE) + self.assertEqual(len(_created), 5) + expected_kw = {'redirections': MUT._REDIRECTIONS} + for attempt in _created: + self.assertEqual(attempt, ((HTTP, REQUEST), expected_kw)) + self.assertEqual(_checked, []) # not called by '_wo_exception' + + def test_w_exceptions_gt_max_retries(self): + from google.cloud._testing import _Monkey + from google.cloud.streaming import http_wrapper as MUT + HTTP = object() + REQUEST = _Request() + _created, _checked = [], [] + + def _wo_exception(*args, **kw): + _created.append((args, kw)) + raise ValueError('Retryable') + + with _Monkey(MUT, calculate_wait_for_retry=lambda *ignored: 0.1, + _make_api_request_no_retry=_wo_exception, + _check_response=_checked.append): + with self.assertRaises(ValueError): + self._callFUT(HTTP, REQUEST, retries=3) + + self.assertEqual(len(_created), 3) + expected_kw = {'redirections': MUT._REDIRECTIONS} + for attempt in _created: + self.assertEqual(attempt, ((HTTP, REQUEST), expected_kw)) + self.assertEqual(_checked, []) # not called by '_wo_exception' + + +class _Dummy(object): + def __init__(self, **kw): + self.__dict__.update(kw) + + +class _Request(object): + __slots__ = ('url', 'http_method', 'body', 'headers', 'loggable_body',) + URL = 'http://example.com/api' + + def __init__(self, url=URL, http_method='GET', body='', + loggable_body=None): + self.url = url + self.http_method = http_method + self.body = body + self.headers = {} + self.loggable_body = loggable_body + + +class _Response(object): + content = '' + request_url = _Request.URL + + def __init__(self, status_code, retry_after=None): + self.info = {'status': status_code} + self.status_code = status_code + self.retry_after = retry_after + + +class _Http(object): + + def __init__(self, *responses): + self._responses = responses + self._requested = [] + + def request(self, url, **kw): + self._requested.append((url, kw)) + response, self._responses = self._responses[0], self._responses[1:] + return response diff --git a/packages/google-cloud-core/unit_tests/streaming/test_stream_slice.py b/packages/google-cloud-core/unit_tests/streaming/test_stream_slice.py new file mode 100644 index 000000000000..10e9d9bbf25b --- /dev/null +++ b/packages/google-cloud-core/unit_tests/streaming/test_stream_slice.py @@ -0,0 +1,82 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class Test_StreamSlice(unittest.TestCase): + + def _getTargetClass(self): + from google.cloud.streaming.stream_slice import StreamSlice + return StreamSlice + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor(self): + from io import BytesIO + CONTENT = b'CONTENT GOES HERE' + MAXSIZE = 4 + stream = BytesIO(CONTENT) + stream_slice = self._makeOne(stream, MAXSIZE) + self.assertIs(stream_slice._stream, stream) + self.assertEqual(stream_slice._remaining_bytes, MAXSIZE) + self.assertEqual(stream_slice._max_bytes, MAXSIZE) + self.assertEqual(len(stream_slice), MAXSIZE) + self.assertEqual(stream_slice.length, MAXSIZE) + + def test___nonzero___empty(self): + from io import BytesIO + CONTENT = b'' + MAXSIZE = 0 + stream = BytesIO(CONTENT) + stream_slice = self._makeOne(stream, MAXSIZE) + self.assertFalse(stream_slice) + + def test___nonzero___nonempty(self): + from io import BytesIO + CONTENT = b'CONTENT GOES HERE' + MAXSIZE = 4 + stream = BytesIO(CONTENT) + stream_slice = self._makeOne(stream, MAXSIZE) + self.assertTrue(stream_slice) + + def test_read_exhausted(self): + from io import BytesIO + from six.moves import http_client + CONTENT = b'' + MAXSIZE = 4 + stream = BytesIO(CONTENT) + stream_slice = self._makeOne(stream, MAXSIZE) + with self.assertRaises(http_client.IncompleteRead): + stream_slice.read() + + def test_read_implicit_size(self): + from io import BytesIO + CONTENT = b'CONTENT GOES HERE' + MAXSIZE = 4 + stream = BytesIO(CONTENT) + stream_slice = self._makeOne(stream, MAXSIZE) + self.assertEqual(stream_slice.read(), CONTENT[:MAXSIZE]) + self.assertEqual(stream_slice._remaining_bytes, 0) + + def test_read_explicit_size(self): + from io import BytesIO + CONTENT = b'CONTENT GOES HERE' + MAXSIZE = 4 + SIZE = 3 + stream = BytesIO(CONTENT) + stream_slice = self._makeOne(stream, MAXSIZE) + self.assertEqual(stream_slice.read(SIZE), CONTENT[:SIZE]) + self.assertEqual(stream_slice._remaining_bytes, MAXSIZE - SIZE) diff --git a/packages/google-cloud-core/unit_tests/streaming/test_transfer.py b/packages/google-cloud-core/unit_tests/streaming/test_transfer.py new file mode 100644 index 000000000000..58681585fd5b --- /dev/null +++ b/packages/google-cloud-core/unit_tests/streaming/test_transfer.py @@ -0,0 +1,1956 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class Test__Transfer(unittest.TestCase): + URL = 'http://example.com/api' + + def _getTargetClass(self): + from google.cloud.streaming.transfer import _Transfer + return _Transfer + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_defaults(self): + from google.cloud.streaming.transfer import _DEFAULT_CHUNKSIZE + stream = _Stream() + xfer = self._makeOne(stream) + self.assertIs(xfer.stream, stream) + self.assertFalse(xfer.close_stream) + self.assertEqual(xfer.chunksize, _DEFAULT_CHUNKSIZE) + self.assertTrue(xfer.auto_transfer) + self.assertIsNone(xfer.bytes_http) + self.assertIsNone(xfer.http) + self.assertEqual(xfer.num_retries, 5) + self.assertIsNone(xfer.url) + self.assertFalse(xfer.initialized) + + def test_ctor_explicit(self): + stream = _Stream() + HTTP = object() + CHUNK_SIZE = 1 << 18 + NUM_RETRIES = 8 + xfer = self._makeOne(stream, + close_stream=True, + chunksize=CHUNK_SIZE, + auto_transfer=False, + http=HTTP, + num_retries=NUM_RETRIES) + self.assertIs(xfer.stream, stream) + self.assertTrue(xfer.close_stream) + self.assertEqual(xfer.chunksize, CHUNK_SIZE) + self.assertFalse(xfer.auto_transfer) + self.assertIs(xfer.bytes_http, HTTP) + self.assertIs(xfer.http, HTTP) + self.assertEqual(xfer.num_retries, NUM_RETRIES) + + def test_bytes_http_fallback_to_http(self): + stream = _Stream() + HTTP = object() + xfer = self._makeOne(stream, http=HTTP) + self.assertIs(xfer.bytes_http, HTTP) + + def test_bytes_http_setter(self): + stream = _Stream() + HTTP = object() + BYTES_HTTP = object() + xfer = self._makeOne(stream, http=HTTP) + xfer.bytes_http = BYTES_HTTP + self.assertIs(xfer.bytes_http, BYTES_HTTP) + + def test_num_retries_setter_invalid(self): + stream = _Stream() + xfer = self._makeOne(stream) + with self.assertRaises(ValueError): + xfer.num_retries = object() + + def test_num_retries_setter_negative(self): + stream = _Stream() + xfer = self._makeOne(stream) + with self.assertRaises(ValueError): + xfer.num_retries = -1 + + def test__initialize_not_already_initialized_w_http(self): + HTTP = object() + stream = _Stream() + xfer = self._makeOne(stream) + xfer._initialize(HTTP, self.URL) + self.assertTrue(xfer.initialized) + self.assertIs(xfer.http, HTTP) + self.assertIs(xfer.url, self.URL) + + def test__initialize_not_already_initialized_wo_http(self): + from httplib2 import Http + stream = _Stream() + xfer = self._makeOne(stream) + xfer._initialize(None, self.URL) + self.assertTrue(xfer.initialized) + self.assertIsInstance(xfer.http, Http) + self.assertIs(xfer.url, self.URL) + + def test__initialize_w_existing_http(self): + HTTP_1, HTTP_2 = object(), object() + stream = _Stream() + xfer = self._makeOne(stream, http=HTTP_1) + xfer._initialize(HTTP_2, self.URL) + self.assertTrue(xfer.initialized) + self.assertIs(xfer.http, HTTP_1) + self.assertIs(xfer.url, self.URL) + + def test__initialize_already_initialized(self): + from google.cloud.streaming.exceptions import TransferInvalidError + URL_2 = 'http://example.com/other' + HTTP_1, HTTP_2 = object(), object() + stream = _Stream() + xfer = self._makeOne(stream) + xfer._initialize(HTTP_1, self.URL) + with self.assertRaises(TransferInvalidError): + xfer._initialize(HTTP_2, URL_2) + + def test__ensure_initialized_hit(self): + HTTP = object() + stream = _Stream() + xfer = self._makeOne(stream) + xfer._initialize(HTTP, self.URL) + xfer._ensure_initialized() # no raise + + def test__ensure_initialized_miss(self): + from google.cloud.streaming.exceptions import TransferInvalidError + stream = _Stream() + xfer = self._makeOne(stream) + with self.assertRaises(TransferInvalidError): + xfer._ensure_initialized() + + def test__ensure_uninitialized_hit(self): + stream = _Stream() + xfer = self._makeOne(stream) + xfer._ensure_uninitialized() # no raise + + def test__ensure_uninitialized_miss(self): + from google.cloud.streaming.exceptions import TransferInvalidError + stream = _Stream() + HTTP = object() + xfer = self._makeOne(stream) + xfer._initialize(HTTP, self.URL) + with self.assertRaises(TransferInvalidError): + xfer._ensure_uninitialized() + + def test___del___closes_stream(self): + + stream = _Stream() + xfer = self._makeOne(stream, close_stream=True) + + self.assertFalse(stream._closed) + del xfer + self.assertTrue(stream._closed) + + +class Test_Download(unittest.TestCase): + URL = "http://example.com/api" + + def _getTargetClass(self): + from google.cloud.streaming.transfer import Download + return Download + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_defaults(self): + stream = _Stream() + download = self._makeOne(stream) + self.assertIs(download.stream, stream) + self.assertIsNone(download._initial_response) + self.assertEqual(download.progress, 0) + self.assertIsNone(download.total_size) + self.assertIsNone(download.encoding) + + def test_ctor_w_kwds(self): + stream = _Stream() + CHUNK_SIZE = 123 + download = self._makeOne(stream, chunksize=CHUNK_SIZE) + self.assertIs(download.stream, stream) + self.assertEqual(download.chunksize, CHUNK_SIZE) + + def test_ctor_w_total_size(self): + stream = _Stream() + SIZE = 123 + download = self._makeOne(stream, total_size=SIZE) + self.assertIs(download.stream, stream) + self.assertEqual(download.total_size, SIZE) + + def test_from_file_w_existing_file_no_override(self): + import os + klass = self._getTargetClass() + with _tempdir() as tempdir: + filename = os.path.join(tempdir, 'file.out') + with open(filename, 'w') as fileobj: + fileobj.write('EXISTING FILE') + with self.assertRaises(ValueError): + klass.from_file(filename) + + def test_from_file_w_existing_file_w_override_wo_auto_transfer(self): + import os + klass = self._getTargetClass() + with _tempdir() as tempdir: + filename = os.path.join(tempdir, 'file.out') + with open(filename, 'w') as fileobj: + fileobj.write('EXISTING FILE') + download = klass.from_file(filename, overwrite=True, + auto_transfer=False) + self.assertFalse(download.auto_transfer) + del download # closes stream + with open(filename, 'rb') as fileobj: + self.assertEqual(fileobj.read(), b'') + + def test_from_stream_defaults(self): + stream = _Stream() + klass = self._getTargetClass() + download = klass.from_stream(stream) + self.assertIs(download.stream, stream) + self.assertTrue(download.auto_transfer) + self.assertIsNone(download.total_size) + + def test_from_stream_explicit(self): + CHUNK_SIZE = 1 << 18 + SIZE = 123 + stream = _Stream() + klass = self._getTargetClass() + download = klass.from_stream(stream, auto_transfer=False, + total_size=SIZE, chunksize=CHUNK_SIZE) + self.assertIs(download.stream, stream) + self.assertFalse(download.auto_transfer) + self.assertEqual(download.total_size, SIZE) + self.assertEqual(download.chunksize, CHUNK_SIZE) + + def test_configure_request(self): + CHUNK_SIZE = 100 + download = self._makeOne(_Stream(), chunksize=CHUNK_SIZE) + request = _Dummy(headers={}) + url_builder = _Dummy(query_params={}) + download.configure_request(request, url_builder) + self.assertEqual(request.headers, {'Range': 'bytes=0-99'}) + self.assertEqual(url_builder.query_params, {'alt': 'media'}) + + def test__set_total_wo_content_range_wo_existing_total(self): + info = {} + download = self._makeOne(_Stream()) + download._set_total(info) + self.assertEqual(download.total_size, 0) + + def test__set_total_wo_content_range_w_existing_total(self): + SIZE = 123 + info = {} + download = self._makeOne(_Stream(), total_size=SIZE) + download._set_total(info) + self.assertEqual(download.total_size, SIZE) + + def test__set_total_w_content_range_w_existing_total(self): + SIZE = 123 + info = {'content-range': 'bytes 123-234/4567'} + download = self._makeOne(_Stream(), total_size=SIZE) + download._set_total(info) + self.assertEqual(download.total_size, 4567) + + def test__set_total_w_content_range_w_asterisk_total(self): + info = {'content-range': 'bytes 123-234/*'} + download = self._makeOne(_Stream()) + download._set_total(info) + self.assertEqual(download.total_size, 0) + + def test_initialize_download_already_initialized(self): + from google.cloud.streaming.exceptions import TransferInvalidError + request = _Request() + download = self._makeOne(_Stream()) + download._initialize(None, self.URL) + with self.assertRaises(TransferInvalidError): + download.initialize_download(request, http=object()) + + def test_initialize_download_wo_autotransfer(self): + request = _Request() + http = object() + download = self._makeOne(_Stream(), auto_transfer=False) + download.initialize_download(request, http) + self.assertIs(download.http, http) + self.assertEqual(download.url, request.url) + + def test_initialize_download_w_autotransfer_failing(self): + from six.moves import http_client + from google.cloud._testing import _Monkey + from google.cloud.streaming import transfer as MUT + from google.cloud.streaming.exceptions import HttpError + request = _Request() + http = object() + download = self._makeOne(_Stream(), auto_transfer=True) + + response = _makeResponse(http_client.BAD_REQUEST) + requester = _MakeRequest(response) + + with _Monkey(MUT, make_api_request=requester): + with self.assertRaises(HttpError): + download.initialize_download(request, http) + + self.assertTrue(len(requester._requested), 1) + self.assertIs(requester._requested[0][0], request) + + def test_initialize_download_w_autotransfer_w_content_location(self): + from six.moves import http_client + from google.cloud._testing import _Monkey + from google.cloud.streaming import transfer as MUT + REDIRECT_URL = 'http://example.com/other' + request = _Request() + http = object() + info = {'content-location': REDIRECT_URL} + download = self._makeOne(_Stream(), auto_transfer=True) + + response = _makeResponse(http_client.NO_CONTENT, info) + requester = _MakeRequest(response) + + with _Monkey(MUT, make_api_request=requester): + download.initialize_download(request, http) + + self.assertIsNone(download._initial_response) + self.assertEqual(download.total_size, 0) + self.assertIs(download.http, http) + self.assertEqual(download.url, REDIRECT_URL) + self.assertTrue(len(requester._requested), 1) + self.assertIs(requester._requested[0][0], request) + + def test__normalize_start_end_w_end_w_start_lt_0(self): + from google.cloud.streaming.exceptions import TransferInvalidError + download = self._makeOne(_Stream()) + + with self.assertRaises(TransferInvalidError): + download._normalize_start_end(-1, 0) + + def test__normalize_start_end_w_end_w_start_gt_total(self): + from google.cloud.streaming.exceptions import TransferInvalidError + download = self._makeOne(_Stream()) + download._set_total({'content-range': 'bytes 0-1/2'}) + + with self.assertRaises(TransferInvalidError): + download._normalize_start_end(3, 0) + + def test__normalize_start_end_w_end_lt_start(self): + from google.cloud.streaming.exceptions import TransferInvalidError + download = self._makeOne(_Stream()) + download._set_total({'content-range': 'bytes 0-1/2'}) + + with self.assertRaises(TransferInvalidError): + download._normalize_start_end(1, 0) + + def test__normalize_start_end_w_end_gt_start(self): + download = self._makeOne(_Stream()) + download._set_total({'content-range': 'bytes 0-1/2'}) + self.assertEqual(download._normalize_start_end(1, 2), (1, 1)) + + def test__normalize_start_end_wo_end_w_start_lt_0(self): + download = self._makeOne(_Stream()) + download._set_total({'content-range': 'bytes 0-1/2'}) + self.assertEqual(download._normalize_start_end(-2), (0, 1)) + self.assertEqual(download._normalize_start_end(-1), (1, 1)) + + def test__normalize_start_end_wo_end_w_start_ge_0(self): + download = self._makeOne(_Stream()) + download._set_total({'content-range': 'bytes 0-1/100'}) + self.assertEqual(download._normalize_start_end(0), (0, 99)) + self.assertEqual(download._normalize_start_end(1), (1, 99)) + + def test__set_range_header_w_start_lt_0(self): + request = _Request() + download = self._makeOne(_Stream()) + download._set_range_header(request, -1) + self.assertEqual(request.headers['range'], 'bytes=-1') + + def test__set_range_header_w_start_ge_0_wo_end(self): + request = _Request() + download = self._makeOne(_Stream()) + download._set_range_header(request, 0) + self.assertEqual(request.headers['range'], 'bytes=0-') + + def test__set_range_header_w_start_ge_0_w_end(self): + request = _Request() + download = self._makeOne(_Stream()) + download._set_range_header(request, 0, 1) + self.assertEqual(request.headers['range'], 'bytes=0-1') + + def test__compute_end_byte_w_start_lt_0_w_end(self): + download = self._makeOne(_Stream()) + self.assertEqual(download._compute_end_byte(-1, 1), 1) + + def test__compute_end_byte_w_start_ge_0_wo_end_w_use_chunks(self): + CHUNK_SIZE = 5 + download = self._makeOne(_Stream(), chunksize=CHUNK_SIZE) + self.assertEqual(download._compute_end_byte(0, use_chunks=True), 4) + + def test__compute_end_byte_w_start_ge_0_w_end_w_use_chunks(self): + CHUNK_SIZE = 5 + download = self._makeOne(_Stream(), chunksize=CHUNK_SIZE) + self.assertEqual(download._compute_end_byte(0, 3, use_chunks=True), 3) + self.assertEqual(download._compute_end_byte(0, 5, use_chunks=True), 4) + + def test__compute_end_byte_w_start_ge_0_w_end_w_total_size(self): + CHUNK_SIZE = 50 + download = self._makeOne(_Stream(), chunksize=CHUNK_SIZE) + download._set_total({'content-range': 'bytes 0-1/10'}) + self.assertEqual(download._compute_end_byte(0, 100, use_chunks=False), + 9) + self.assertEqual(download._compute_end_byte(0, 8, use_chunks=False), 8) + + def test__compute_end_byte_w_start_ge_0_wo_end_w_total_size(self): + CHUNK_SIZE = 50 + download = self._makeOne(_Stream(), chunksize=CHUNK_SIZE) + download._set_total({'content-range': 'bytes 0-1/10'}) + self.assertEqual(download._compute_end_byte(0, use_chunks=False), 9) + + def test__get_chunk_not_initialized(self): + from google.cloud.streaming.exceptions import TransferInvalidError + download = self._makeOne(_Stream()) + + with self.assertRaises(TransferInvalidError): + download._get_chunk(0, 10) + + def test__get_chunk(self): + from six.moves import http_client + from google.cloud._testing import _Monkey + from google.cloud.streaming import transfer as MUT + http = object() + download = self._makeOne(_Stream()) + download._initialize(http, self.URL) + response = _makeResponse(http_client.OK) + requester = _MakeRequest(response) + + with _Monkey(MUT, + Request=_Request, + make_api_request=requester): + found = download._get_chunk(0, 10) + + self.assertIs(found, response) + self.assertTrue(len(requester._requested), 1) + request = requester._requested[0][0] + self.assertEqual(request.headers['range'], 'bytes=0-10') + + def test__process_response_w_FORBIDDEN(self): + from google.cloud.streaming.exceptions import HttpError + from six.moves import http_client + download = self._makeOne(_Stream()) + response = _makeResponse(http_client.FORBIDDEN) + with self.assertRaises(HttpError): + download._process_response(response) + + def test__process_response_w_NOT_FOUND(self): + from google.cloud.streaming.exceptions import HttpError + from six.moves import http_client + download = self._makeOne(_Stream()) + response = _makeResponse(http_client.NOT_FOUND) + with self.assertRaises(HttpError): + download._process_response(response) + + def test__process_response_w_other_error(self): + from google.cloud.streaming.exceptions import TransferRetryError + from six.moves import http_client + download = self._makeOne(_Stream()) + response = _makeResponse(http_client.BAD_REQUEST) + with self.assertRaises(TransferRetryError): + download._process_response(response) + + def test__process_response_w_OK_wo_encoding(self): + from six.moves import http_client + stream = _Stream() + download = self._makeOne(stream) + response = _makeResponse(http_client.OK, content='OK') + found = download._process_response(response) + self.assertIs(found, response) + self.assertEqual(stream._written, ['OK']) + self.assertEqual(download.progress, 2) + self.assertIsNone(download.encoding) + + def test__process_response_w_PARTIAL_CONTENT_w_encoding(self): + from six.moves import http_client + stream = _Stream() + download = self._makeOne(stream) + info = {'content-encoding': 'blah'} + response = _makeResponse(http_client.OK, info, 'PARTIAL') + found = download._process_response(response) + self.assertIs(found, response) + self.assertEqual(stream._written, ['PARTIAL']) + self.assertEqual(download.progress, 7) + self.assertEqual(download.encoding, 'blah') + + def test__process_response_w_REQUESTED_RANGE_NOT_SATISFIABLE(self): + from six.moves import http_client + stream = _Stream() + download = self._makeOne(stream) + response = _makeResponse( + http_client.REQUESTED_RANGE_NOT_SATISFIABLE) + found = download._process_response(response) + self.assertIs(found, response) + self.assertEqual(stream._written, []) + self.assertEqual(download.progress, 0) + self.assertIsNone(download.encoding) + + def test__process_response_w_NO_CONTENT(self): + from six.moves import http_client + stream = _Stream() + download = self._makeOne(stream) + response = _makeResponse(status_code=http_client.NO_CONTENT) + found = download._process_response(response) + self.assertIs(found, response) + self.assertEqual(stream._written, ['']) + self.assertEqual(download.progress, 0) + self.assertIsNone(download.encoding) + + def test_get_range_not_initialized(self): + from google.cloud.streaming.exceptions import TransferInvalidError + download = self._makeOne(_Stream()) + with self.assertRaises(TransferInvalidError): + download.get_range(0, 10) + + def test_get_range_wo_total_size_complete(self): + from six.moves import http_client + from google.cloud._testing import _Monkey + from google.cloud.streaming import transfer as MUT + CONTENT = b'ABCDEFGHIJ' + LEN = len(CONTENT) + REQ_RANGE = 'bytes=0-%d' % (LEN,) + RESP_RANGE = 'bytes 0-%d/%d' % (LEN - 1, LEN) + http = object() + stream = _Stream() + download = self._makeOne(stream) + download._initialize(http, self.URL) + info = {'content-range': RESP_RANGE} + response = _makeResponse(http_client.OK, info, CONTENT) + requester = _MakeRequest(response) + + with _Monkey(MUT, + Request=_Request, + make_api_request=requester): + download.get_range(0, LEN) + + self.assertTrue(len(requester._requested), 1) + request = requester._requested[0][0] + self.assertEqual(request.headers, {'range': REQ_RANGE}) + self.assertEqual(stream._written, [CONTENT]) + self.assertEqual(download.total_size, LEN) + + def test_get_range_wo_total_size_wo_end(self): + from six.moves import http_client + from google.cloud._testing import _Monkey + from google.cloud.streaming import transfer as MUT + CONTENT = b'ABCDEFGHIJ' + LEN = len(CONTENT) + START = 5 + CHUNK_SIZE = 123 + REQ_RANGE = 'bytes=%d-%d' % (START, START + CHUNK_SIZE - 1,) + RESP_RANGE = 'bytes %d-%d/%d' % (START, LEN - 1, LEN) + http = object() + stream = _Stream() + download = self._makeOne(stream, chunksize=CHUNK_SIZE) + download._initialize(http, self.URL) + info = {'content-range': RESP_RANGE} + response = _makeResponse(http_client.OK, info, CONTENT[START:]) + requester = _MakeRequest(response) + + with _Monkey(MUT, + Request=_Request, + make_api_request=requester): + download.get_range(START) + + self.assertTrue(len(requester._requested), 1) + request = requester._requested[0][0] + self.assertEqual(request.headers, {'range': REQ_RANGE}) + self.assertEqual(stream._written, [CONTENT[START:]]) + self.assertEqual(download.total_size, LEN) + + def test_get_range_w_total_size_partial(self): + from six.moves import http_client + from google.cloud._testing import _Monkey + from google.cloud.streaming import transfer as MUT + CONTENT = b'ABCDEFGHIJ' + LEN = len(CONTENT) + PARTIAL_LEN = 5 + REQ_RANGE = 'bytes=0-%d' % (PARTIAL_LEN,) + RESP_RANGE = 'bytes 0-%d/%d' % (PARTIAL_LEN, LEN,) + http = object() + stream = _Stream() + download = self._makeOne(stream, total_size=LEN) + download._initialize(http, self.URL) + info = {'content-range': RESP_RANGE} + response = _makeResponse(http_client.OK, info, CONTENT[:PARTIAL_LEN]) + response.length = LEN + requester = _MakeRequest(response) + + with _Monkey(MUT, + Request=_Request, + make_api_request=requester): + download.get_range(0, PARTIAL_LEN) + + self.assertTrue(len(requester._requested), 1) + request = requester._requested[0][0] + self.assertEqual(request.headers, {'range': REQ_RANGE}) + self.assertEqual(stream._written, [CONTENT[:PARTIAL_LEN]]) + self.assertEqual(download.total_size, LEN) + + def test_get_range_w_empty_chunk(self): + from six.moves import http_client + from google.cloud._testing import _Monkey + from google.cloud.streaming import transfer as MUT + from google.cloud.streaming.exceptions import TransferRetryError + CONTENT = b'ABCDEFGHIJ' + LEN = len(CONTENT) + START = 5 + CHUNK_SIZE = 123 + REQ_RANGE = 'bytes=%d-%d' % (START, START + CHUNK_SIZE - 1,) + RESP_RANGE = 'bytes %d-%d/%d' % (START, LEN - 1, LEN) + http = object() + stream = _Stream() + download = self._makeOne(stream, chunksize=CHUNK_SIZE) + download._initialize(http, self.URL) + info = {'content-range': RESP_RANGE} + response = _makeResponse(http_client.OK, info) + requester = _MakeRequest(response) + + with _Monkey(MUT, + Request=_Request, + make_api_request=requester): + with self.assertRaises(TransferRetryError): + download.get_range(START) + + self.assertTrue(len(requester._requested), 1) + request = requester._requested[0][0] + self.assertEqual(request.headers, {'range': REQ_RANGE}) + self.assertEqual(stream._written, ['']) + self.assertEqual(download.total_size, LEN) + + def test_get_range_w_total_size_wo_use_chunks(self): + from six.moves import http_client + from google.cloud._testing import _Monkey + from google.cloud.streaming import transfer as MUT + CONTENT = b'ABCDEFGHIJ' + LEN = len(CONTENT) + CHUNK_SIZE = 3 + REQ_RANGE = 'bytes=0-%d' % (LEN - 1,) + RESP_RANGE = 'bytes 0-%d/%d' % (LEN - 1, LEN,) + http = object() + stream = _Stream() + download = self._makeOne(stream, total_size=LEN, chunksize=CHUNK_SIZE) + download._initialize(http, self.URL) + info = {'content-range': RESP_RANGE} + response = _makeResponse(http_client.OK, info, CONTENT) + requester = _MakeRequest(response) + + with _Monkey(MUT, + Request=_Request, + make_api_request=requester): + download.get_range(0, use_chunks=False) + + self.assertTrue(len(requester._requested), 1) + request = requester._requested[0][0] + self.assertEqual(request.headers, {'range': REQ_RANGE}) + self.assertEqual(stream._written, [CONTENT]) + self.assertEqual(download.total_size, LEN) + + def test_get_range_w_multiple_chunks(self): + from six.moves import http_client + from google.cloud._testing import _Monkey + from google.cloud.streaming import transfer as MUT + CONTENT = b'ABCDE' + LEN = len(CONTENT) + CHUNK_SIZE = 3 + REQ_RANGE_1 = 'bytes=0-%d' % (CHUNK_SIZE - 1,) + RESP_RANGE_1 = 'bytes 0-%d/%d' % (CHUNK_SIZE - 1, LEN) + REQ_RANGE_2 = 'bytes=%d-%d' % (CHUNK_SIZE, LEN - 1) + RESP_RANGE_2 = 'bytes %d-%d/%d' % (CHUNK_SIZE, LEN - 1, LEN) + http = object() + stream = _Stream() + download = self._makeOne(stream, chunksize=CHUNK_SIZE) + download._initialize(http, self.URL) + info_1 = {'content-range': RESP_RANGE_1} + response_1 = _makeResponse(http_client.PARTIAL_CONTENT, info_1, + CONTENT[:CHUNK_SIZE]) + info_2 = {'content-range': RESP_RANGE_2} + response_2 = _makeResponse(http_client.OK, info_2, + CONTENT[CHUNK_SIZE:]) + requester = _MakeRequest(response_1, response_2) + + with _Monkey(MUT, + Request=_Request, + make_api_request=requester): + download.get_range(0) + + self.assertTrue(len(requester._requested), 2) + request_1 = requester._requested[0][0] + self.assertEqual(request_1.headers, {'range': REQ_RANGE_1}) + request_2 = requester._requested[1][0] + self.assertEqual(request_2.headers, {'range': REQ_RANGE_2}) + self.assertEqual(stream._written, [b'ABC', b'DE']) + self.assertEqual(download.total_size, LEN) + + def test_stream_file_not_initialized(self): + from google.cloud.streaming.exceptions import TransferInvalidError + download = self._makeOne(_Stream()) + + with self.assertRaises(TransferInvalidError): + download.stream_file() + + def test_stream_file_w_initial_response_complete(self): + from six.moves import http_client + CONTENT = b'ABCDEFGHIJ' + LEN = len(CONTENT) + RESP_RANGE = 'bytes 0-%d/%d' % (LEN - 1, LEN,) + stream = _Stream() + download = self._makeOne(stream, total_size=LEN) + info = {'content-range': RESP_RANGE} + download._initial_response = _makeResponse( + http_client.OK, info, CONTENT) + http = object() + download._initialize(http, _Request.URL) + + download.stream_file() + + self.assertEqual(stream._written, [CONTENT]) + self.assertEqual(download.total_size, LEN) + + def test_stream_file_w_initial_response_incomplete(self): + from six.moves import http_client + from google.cloud._testing import _Monkey + from google.cloud.streaming import transfer as MUT + CHUNK_SIZE = 3 + CONTENT = b'ABCDEF' + LEN = len(CONTENT) + RESP_RANGE_1 = 'bytes 0-%d/%d' % (CHUNK_SIZE - 1, LEN,) + REQ_RANGE_2 = 'bytes=%d-%d' % (CHUNK_SIZE, LEN - 1) + RESP_RANGE_2 = 'bytes %d-%d/%d' % (CHUNK_SIZE, LEN - 1, LEN,) + stream = _Stream() + http = object() + download = self._makeOne(stream, chunksize=CHUNK_SIZE) + info_1 = {'content-range': RESP_RANGE_1} + download._initial_response = _makeResponse( + http_client.PARTIAL_CONTENT, info_1, CONTENT[:CHUNK_SIZE]) + info_2 = {'content-range': RESP_RANGE_2} + response_2 = _makeResponse( + http_client.OK, info_2, CONTENT[CHUNK_SIZE:]) + requester = _MakeRequest(response_2) + + download._initialize(http, _Request.URL) + + request = _Request() + + with _Monkey(MUT, + Request=_Request, + make_api_request=requester): + download.stream_file() + + self.assertTrue(len(requester._requested), 1) + request = requester._requested[0][0] + self.assertEqual(request.headers, {'range': REQ_RANGE_2}) + self.assertEqual(stream._written, + [CONTENT[:CHUNK_SIZE], CONTENT[CHUNK_SIZE:]]) + self.assertEqual(download.total_size, LEN) + + def test_stream_file_wo_initial_response_wo_total_size(self): + from six.moves import http_client + from google.cloud._testing import _Monkey + from google.cloud.streaming import transfer as MUT + CONTENT = b'ABCDEFGHIJ' + LEN = len(CONTENT) + CHUNK_SIZE = 123 + REQ_RANGE = 'bytes=0-%d' % (CHUNK_SIZE - 1) + RESP_RANGE = 'bytes 0-%d/%d' % (LEN - 1, LEN,) + stream = _Stream() + http = object() + download = self._makeOne(stream, chunksize=CHUNK_SIZE) + info = {'content-range': RESP_RANGE} + response = _makeResponse(http_client.OK, info, CONTENT) + requester = _MakeRequest(response) + download._initialize(http, _Request.URL) + + request = _Request() + + with _Monkey(MUT, + Request=_Request, + make_api_request=requester): + download.stream_file() + + self.assertTrue(len(requester._requested), 1) + request = requester._requested[0][0] + self.assertEqual(request.headers, {'range': REQ_RANGE}) + self.assertEqual(stream._written, [CONTENT]) + self.assertEqual(download.total_size, LEN) + + +class Test_Upload(unittest.TestCase): + URL = "http://example.com/api" + MIME_TYPE = 'application/octet-stream' + UPLOAD_URL = 'http://example.com/upload/id=foobar' + + def _getTargetClass(self): + from google.cloud.streaming.transfer import Upload + return Upload + + def _makeOne(self, stream, mime_type=MIME_TYPE, *args, **kw): + return self._getTargetClass()(stream, mime_type, *args, **kw) + + def test_ctor_defaults(self): + from google.cloud.streaming.transfer import _DEFAULT_CHUNKSIZE + stream = _Stream() + upload = self._makeOne(stream) + self.assertIs(upload.stream, stream) + self.assertIsNone(upload._final_response) + self.assertIsNone(upload._server_chunk_granularity) + self.assertFalse(upload.complete) + self.assertEqual(upload.mime_type, self.MIME_TYPE) + self.assertEqual(upload.progress, 0) + self.assertIsNone(upload.strategy) + self.assertIsNone(upload.total_size) + self.assertEqual(upload.chunksize, _DEFAULT_CHUNKSIZE) + + def test_ctor_w_kwds(self): + stream = _Stream() + CHUNK_SIZE = 123 + upload = self._makeOne(stream, chunksize=CHUNK_SIZE) + self.assertIs(upload.stream, stream) + self.assertEqual(upload.mime_type, self.MIME_TYPE) + self.assertEqual(upload.chunksize, CHUNK_SIZE) + + def test_from_file_w_nonesuch_file(self): + klass = self._getTargetClass() + filename = '~nosuchuser/file.txt' + with self.assertRaises(OSError): + klass.from_file(filename) + + def test_from_file_wo_mimetype_w_unguessable_filename(self): + import os + klass = self._getTargetClass() + CONTENT = b'EXISTING FILE W/ UNGUESSABLE MIMETYPE' + with _tempdir() as tempdir: + filename = os.path.join(tempdir, 'file.unguessable') + with open(filename, 'wb') as fileobj: + fileobj.write(CONTENT) + with self.assertRaises(ValueError): + klass.from_file(filename) + + def test_from_file_wo_mimetype_w_guessable_filename(self): + import os + klass = self._getTargetClass() + CONTENT = b'EXISTING FILE W/ GUESSABLE MIMETYPE' + with _tempdir() as tempdir: + filename = os.path.join(tempdir, 'file.txt') + with open(filename, 'wb') as fileobj: + fileobj.write(CONTENT) + upload = klass.from_file(filename) + self.assertEqual(upload.mime_type, 'text/plain') + self.assertTrue(upload.auto_transfer) + self.assertEqual(upload.total_size, len(CONTENT)) + upload._stream.close() + + def test_from_file_w_mimetype_w_auto_transfer_w_kwds(self): + import os + klass = self._getTargetClass() + CONTENT = b'EXISTING FILE W/ GUESSABLE MIMETYPE' + CHUNK_SIZE = 3 + with _tempdir() as tempdir: + filename = os.path.join(tempdir, 'file.unguessable') + with open(filename, 'wb') as fileobj: + fileobj.write(CONTENT) + upload = klass.from_file( + filename, + mime_type=self.MIME_TYPE, + auto_transfer=False, + chunksize=CHUNK_SIZE) + self.assertEqual(upload.mime_type, self.MIME_TYPE) + self.assertFalse(upload.auto_transfer) + self.assertEqual(upload.total_size, len(CONTENT)) + self.assertEqual(upload.chunksize, CHUNK_SIZE) + upload._stream.close() + + def test_from_stream_wo_mimetype(self): + klass = self._getTargetClass() + stream = _Stream() + with self.assertRaises(ValueError): + klass.from_stream(stream, mime_type=None) + + def test_from_stream_defaults(self): + klass = self._getTargetClass() + stream = _Stream() + upload = klass.from_stream(stream, mime_type=self.MIME_TYPE) + self.assertEqual(upload.mime_type, self.MIME_TYPE) + self.assertTrue(upload.auto_transfer) + self.assertIsNone(upload.total_size) + + def test_from_stream_explicit(self): + klass = self._getTargetClass() + stream = _Stream() + SIZE = 10 + CHUNK_SIZE = 3 + upload = klass.from_stream( + stream, + mime_type=self.MIME_TYPE, + auto_transfer=False, + total_size=SIZE, + chunksize=CHUNK_SIZE) + self.assertEqual(upload.mime_type, self.MIME_TYPE) + self.assertFalse(upload.auto_transfer) + self.assertEqual(upload.total_size, SIZE) + self.assertEqual(upload.chunksize, CHUNK_SIZE) + + def test_strategy_setter_invalid(self): + upload = self._makeOne(_Stream()) + with self.assertRaises(ValueError): + upload.strategy = object() + with self.assertRaises(ValueError): + upload.strategy = 'unknown' + + def test_strategy_setter_SIMPLE_UPLOAD(self): + from google.cloud.streaming.transfer import SIMPLE_UPLOAD + upload = self._makeOne(_Stream()) + upload.strategy = SIMPLE_UPLOAD + self.assertEqual(upload.strategy, SIMPLE_UPLOAD) + + def test_strategy_setter_RESUMABLE_UPLOAD(self): + from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + upload = self._makeOne(_Stream()) + upload.strategy = RESUMABLE_UPLOAD + self.assertEqual(upload.strategy, RESUMABLE_UPLOAD) + + def test_total_size_setter_initialized(self): + from google.cloud.streaming.exceptions import TransferInvalidError + SIZE = 123 + upload = self._makeOne(_Stream) + http = object() + upload._initialize(http, _Request.URL) + with self.assertRaises(TransferInvalidError): + upload.total_size = SIZE + + def test_total_size_setter_not_initialized(self): + SIZE = 123 + upload = self._makeOne(_Stream) + upload.total_size = SIZE + self.assertEqual(upload.total_size, SIZE) + + def test__set_default_strategy_w_existing_strategy(self): + from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + config = _Dummy( + resumable_path='/resumable/endpoint', + simple_multipart=True, + simple_path='/upload/endpoint', + ) + request = _Request() + upload = self._makeOne(_Stream) + upload.strategy = RESUMABLE_UPLOAD + upload._set_default_strategy(config, request) + self.assertEqual(upload.strategy, RESUMABLE_UPLOAD) + + def test__set_default_strategy_wo_resumable_path(self): + from google.cloud.streaming.transfer import SIMPLE_UPLOAD + config = _Dummy( + resumable_path=None, + simple_multipart=True, + simple_path='/upload/endpoint', + ) + request = _Request() + upload = self._makeOne(_Stream()) + upload._set_default_strategy(config, request) + self.assertEqual(upload.strategy, SIMPLE_UPLOAD) + + def test__set_default_strategy_w_total_size_gt_threshhold(self): + from google.cloud.streaming.transfer import RESUMABLE_UPLOAD_THRESHOLD + from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + config = _UploadConfig() + request = _Request() + upload = self._makeOne( + _Stream(), total_size=RESUMABLE_UPLOAD_THRESHOLD + 1) + upload._set_default_strategy(config, request) + self.assertEqual(upload.strategy, RESUMABLE_UPLOAD) + + def test__set_default_strategy_w_body_wo_multipart(self): + from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'ABCDEFGHIJ' + config = _UploadConfig() + config.simple_multipart = False + request = _Request(body=CONTENT) + upload = self._makeOne(_Stream(), total_size=len(CONTENT)) + upload._set_default_strategy(config, request) + self.assertEqual(upload.strategy, RESUMABLE_UPLOAD) + + def test__set_default_strategy_w_body_w_multipart_wo_simple_path(self): + from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'ABCDEFGHIJ' + config = _UploadConfig() + config.simple_path = None + request = _Request(body=CONTENT) + upload = self._makeOne(_Stream(), total_size=len(CONTENT)) + upload._set_default_strategy(config, request) + self.assertEqual(upload.strategy, RESUMABLE_UPLOAD) + + def test__set_default_strategy_w_body_w_multipart_w_simple_path(self): + from google.cloud.streaming.transfer import SIMPLE_UPLOAD + CONTENT = b'ABCDEFGHIJ' + config = _UploadConfig() + request = _Request(body=CONTENT) + upload = self._makeOne(_Stream(), total_size=len(CONTENT)) + upload._set_default_strategy(config, request) + self.assertEqual(upload.strategy, SIMPLE_UPLOAD) + + def test_configure_request_w_total_size_gt_max_size(self): + MAX_SIZE = 1000 + config = _UploadConfig() + config.max_size = MAX_SIZE + request = _Request() + url_builder = _Dummy() + upload = self._makeOne(_Stream(), total_size=MAX_SIZE + 1) + with self.assertRaises(ValueError): + upload.configure_request(config, request, url_builder) + + def test_configure_request_w_invalid_mimetype(self): + config = _UploadConfig() + config.accept = ('text/*',) + request = _Request() + url_builder = _Dummy() + upload = self._makeOne(_Stream()) + with self.assertRaises(ValueError): + upload.configure_request(config, request, url_builder) + + def test_configure_request_w_simple_wo_body(self): + from google.cloud.streaming.transfer import SIMPLE_UPLOAD + CONTENT = b'CONTENT' + config = _UploadConfig() + request = _Request() + url_builder = _Dummy(query_params={}) + upload = self._makeOne(_Stream(CONTENT)) + upload.strategy = SIMPLE_UPLOAD + + upload.configure_request(config, request, url_builder) + + self.assertEqual(url_builder.query_params, {'uploadType': 'media'}) + self.assertEqual(url_builder.relative_path, config.simple_path) + + self.assertEqual(request.headers, {'content-type': self.MIME_TYPE}) + self.assertEqual(request.body, CONTENT) + self.assertEqual(request.loggable_body, '') + + def test_configure_request_w_simple_w_body(self): + from google.cloud._helpers import _to_bytes + from google.cloud.streaming.transfer import SIMPLE_UPLOAD + CONTENT = b'CONTENT' + BODY = b'BODY' + config = _UploadConfig() + request = _Request(body=BODY) + request.headers['content-type'] = 'text/plain' + url_builder = _Dummy(query_params={}) + upload = self._makeOne(_Stream(CONTENT)) + upload.strategy = SIMPLE_UPLOAD + + upload.configure_request(config, request, url_builder) + + self.assertEqual(url_builder.query_params, {'uploadType': 'multipart'}) + self.assertEqual(url_builder.relative_path, config.simple_path) + + self.assertEqual(list(request.headers), ['content-type']) + ctype, boundary = [x.strip() + for x in request.headers['content-type'].split(';')] + self.assertEqual(ctype, 'multipart/related') + self.assertTrue(boundary.startswith('boundary="==')) + self.assertTrue(boundary.endswith('=="')) + + divider = b'--' + _to_bytes(boundary[len('boundary="'):-1]) + chunks = request.body.split(divider)[1:-1] # discard prolog / epilog + self.assertEqual(len(chunks), 2) + + parse_chunk = _email_chunk_parser() + text_msg = parse_chunk(chunks[0].strip()) + self.assertEqual(dict(text_msg._headers), + {'Content-Type': 'text/plain', + 'MIME-Version': '1.0'}) + self.assertEqual(text_msg._payload, BODY.decode('ascii')) + + app_msg = parse_chunk(chunks[1].strip()) + self.assertEqual(dict(app_msg._headers), + {'Content-Type': self.MIME_TYPE, + 'Content-Transfer-Encoding': 'binary', + 'MIME-Version': '1.0'}) + self.assertEqual(app_msg._payload, CONTENT.decode('ascii')) + self.assertTrue(b'' in request.loggable_body) + + def test_configure_request_w_resumable_wo_total_size(self): + from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'CONTENT' + config = _UploadConfig() + request = _Request() + url_builder = _Dummy(query_params={}) + upload = self._makeOne(_Stream(CONTENT)) + upload.strategy = RESUMABLE_UPLOAD + + upload.configure_request(config, request, url_builder) + + self.assertEqual(url_builder.query_params, {'uploadType': 'resumable'}) + self.assertEqual(url_builder.relative_path, config.resumable_path) + + self.assertEqual(request.headers, + {'X-Upload-Content-Type': self.MIME_TYPE}) + + def test_configure_request_w_resumable_w_total_size(self): + from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'CONTENT' + LEN = len(CONTENT) + config = _UploadConfig() + request = _Request() + url_builder = _Dummy(query_params={}) + upload = self._makeOne(_Stream(CONTENT)) + upload.total_size = LEN + upload.strategy = RESUMABLE_UPLOAD + + upload.configure_request(config, request, url_builder) + + self.assertEqual(url_builder.query_params, {'uploadType': 'resumable'}) + self.assertEqual(url_builder.relative_path, config.resumable_path) + + self.assertEqual(request.headers, + {'X-Upload-Content-Type': self.MIME_TYPE, + 'X-Upload-Content-Length': '%d' % (LEN,)}) + + def test_refresh_upload_state_w_simple_strategy(self): + from google.cloud.streaming.transfer import SIMPLE_UPLOAD + upload = self._makeOne(_Stream()) + upload.strategy = SIMPLE_UPLOAD + upload.refresh_upload_state() # no-op + + def test_refresh_upload_state_not_initialized(self): + from google.cloud.streaming.exceptions import TransferInvalidError + from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + upload = self._makeOne(_Stream()) + upload.strategy = RESUMABLE_UPLOAD + with self.assertRaises(TransferInvalidError): + upload.refresh_upload_state() + + def test_refresh_upload_state_w_OK(self): + from six.moves import http_client + from google.cloud._testing import _Monkey + from google.cloud.streaming import transfer as MUT + from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'ABCDEFGHIJ' + LEN = len(CONTENT) + RESP_RANGE = 'bytes 0-%d/%d' % (LEN - 1, LEN,) + http = object() + stream = _Stream() + upload = self._makeOne(stream, total_size=LEN) + upload.strategy = RESUMABLE_UPLOAD + upload._initialize(http, _Request.URL) + info = {'content-range': RESP_RANGE} + response = _makeResponse(http_client.OK, info, CONTENT) + requester = _MakeRequest(response) + + with _Monkey(MUT, + Request=_Request, + make_api_request=requester): + upload.refresh_upload_state() + + self.assertTrue(upload.complete) + self.assertEqual(upload.progress, LEN) + self.assertEqual(stream.tell(), LEN) + self.assertIs(upload._final_response, response) + + def test_refresh_upload_state_w_CREATED(self): + from six.moves import http_client + from google.cloud._testing import _Monkey + from google.cloud.streaming import transfer as MUT + from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'ABCDEFGHIJ' + LEN = len(CONTENT) + RESP_RANGE = 'bytes 0-%d/%d' % (LEN - 1, LEN,) + http = object() + stream = _Stream() + upload = self._makeOne(stream, total_size=LEN) + upload.strategy = RESUMABLE_UPLOAD + upload._initialize(http, _Request.URL) + info = {'content-range': RESP_RANGE} + response = _makeResponse(http_client.CREATED, info, CONTENT) + requester = _MakeRequest(response) + + with _Monkey(MUT, + Request=_Request, + make_api_request=requester): + upload.refresh_upload_state() + + self.assertTrue(upload.complete) + self.assertEqual(upload.progress, LEN) + self.assertEqual(stream.tell(), LEN) + self.assertIs(upload._final_response, response) + + def test_refresh_upload_state_w_RESUME_INCOMPLETE_w_range(self): + from google.cloud.streaming import transfer as MUT + from google.cloud.streaming.http_wrapper import RESUME_INCOMPLETE + from google.cloud._testing import _Monkey + from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'ABCDEFGHIJ' + LEN = len(CONTENT) + LAST = 5 + http = object() + stream = _Stream() + upload = self._makeOne(stream, total_size=LEN) + upload.strategy = RESUMABLE_UPLOAD + upload._initialize(http, _Request.URL) + info = {'range': '0-%d' % (LAST - 1,)} + response = _makeResponse(RESUME_INCOMPLETE, info, CONTENT) + requester = _MakeRequest(response) + + with _Monkey(MUT, + Request=_Request, + make_api_request=requester): + upload.refresh_upload_state() + + self.assertFalse(upload.complete) + self.assertEqual(upload.progress, LAST) + self.assertEqual(stream.tell(), LAST) + self.assertIsNot(upload._final_response, response) + + def test_refresh_upload_state_w_RESUME_INCOMPLETE_wo_range(self): + from google.cloud.streaming import transfer as MUT + from google.cloud.streaming.http_wrapper import RESUME_INCOMPLETE + from google.cloud._testing import _Monkey + from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'ABCDEFGHIJ' + LEN = len(CONTENT) + http = object() + stream = _Stream() + upload = self._makeOne(stream, total_size=LEN) + upload.strategy = RESUMABLE_UPLOAD + upload._initialize(http, _Request.URL) + response = _makeResponse(RESUME_INCOMPLETE, content=CONTENT) + requester = _MakeRequest(response) + + with _Monkey(MUT, + Request=_Request, + make_api_request=requester): + upload.refresh_upload_state() + + self.assertFalse(upload.complete) + self.assertEqual(upload.progress, 0) + self.assertEqual(stream.tell(), 0) + self.assertIsNot(upload._final_response, response) + + def test_refresh_upload_state_w_error(self): + from six.moves import http_client + from google.cloud._testing import _Monkey + from google.cloud.streaming import transfer as MUT + from google.cloud.streaming.exceptions import HttpError + from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'ABCDEFGHIJ' + LEN = len(CONTENT) + http = object() + stream = _Stream() + upload = self._makeOne(stream, total_size=LEN) + upload.strategy = RESUMABLE_UPLOAD + upload._initialize(http, _Request.URL) + response = _makeResponse(http_client.FORBIDDEN) + requester = _MakeRequest(response) + + with _Monkey(MUT, + Request=_Request, + make_api_request=requester): + with self.assertRaises(HttpError): + upload.refresh_upload_state() + + def test__get_range_header_miss(self): + upload = self._makeOne(_Stream()) + response = _makeResponse(None) + self.assertIsNone(upload._get_range_header(response)) + + def test__get_range_header_w_Range(self): + upload = self._makeOne(_Stream()) + response = _makeResponse(None, {'Range': '123'}) + self.assertEqual(upload._get_range_header(response), '123') + + def test__get_range_header_w_range(self): + upload = self._makeOne(_Stream()) + response = _makeResponse(None, {'range': '123'}) + self.assertEqual(upload._get_range_header(response), '123') + + def test_initialize_upload_no_strategy(self): + request = _Request() + upload = self._makeOne(_Stream()) + with self.assertRaises(ValueError): + upload.initialize_upload(request, http=object()) + + def test_initialize_upload_simple_w_http(self): + from google.cloud.streaming.transfer import SIMPLE_UPLOAD + request = _Request() + upload = self._makeOne(_Stream()) + upload.strategy = SIMPLE_UPLOAD + upload.initialize_upload(request, http=object()) # no-op + + def test_initialize_upload_resumable_already_initialized(self): + from google.cloud.streaming.exceptions import TransferInvalidError + from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + request = _Request() + upload = self._makeOne(_Stream()) + upload.strategy = RESUMABLE_UPLOAD + upload._initialize(None, self.URL) + with self.assertRaises(TransferInvalidError): + upload.initialize_upload(request, http=object()) + + def test_initialize_upload_w_http_resumable_not_initialized_w_error(self): + from six.moves import http_client + from google.cloud._testing import _Monkey + from google.cloud.streaming import transfer as MUT + from google.cloud.streaming.exceptions import HttpError + from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + request = _Request() + upload = self._makeOne(_Stream()) + upload.strategy = RESUMABLE_UPLOAD + response = _makeResponse(http_client.FORBIDDEN) + requester = _MakeRequest(response) + + with _Monkey(MUT, make_api_request=requester): + with self.assertRaises(HttpError): + upload.initialize_upload(request, http=object()) + + def test_initialize_upload_w_http_wo_auto_transfer_w_OK(self): + from six.moves import http_client + from google.cloud._testing import _Monkey + from google.cloud.streaming import transfer as MUT + from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + request = _Request() + upload = self._makeOne(_Stream(), auto_transfer=False) + upload.strategy = RESUMABLE_UPLOAD + info = {'location': self.UPLOAD_URL} + response = _makeResponse(http_client.OK, info) + requester = _MakeRequest(response) + + with _Monkey(MUT, make_api_request=requester): + upload.initialize_upload(request, http=object()) + + self.assertIsNone(upload._server_chunk_granularity) + self.assertEqual(upload.url, self.UPLOAD_URL) + self.assertEqual(requester._responses, []) + self.assertEqual(len(requester._requested), 1) + self.assertIs(requester._requested[0][0], request) + + def test_initialize_upload_w_granularity_w_auto_transfer_w_OK(self): + from six.moves import http_client + from google.cloud._testing import _Monkey + from google.cloud.streaming import transfer as MUT + from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'ABCDEFGHIJ' + http = object() + request = _Request() + upload = self._makeOne(_Stream(CONTENT), chunksize=1000) + upload.strategy = RESUMABLE_UPLOAD + info = {'X-Goog-Upload-Chunk-Granularity': '100', + 'location': self.UPLOAD_URL} + response = _makeResponse(http_client.OK, info) + chunk_response = _makeResponse(http_client.OK) + requester = _MakeRequest(response, chunk_response) + + with _Monkey(MUT, + Request=_Request, + make_api_request=requester): + upload.initialize_upload(request, http) + + self.assertEqual(upload._server_chunk_granularity, 100) + self.assertEqual(upload.url, self.UPLOAD_URL) + self.assertEqual(requester._responses, []) + self.assertEqual(len(requester._requested), 2) + self.assertIs(requester._requested[0][0], request) + chunk_request = requester._requested[1][0] + self.assertIsInstance(chunk_request, _Request) + self.assertEqual(chunk_request.url, self.UPLOAD_URL) + self.assertEqual(chunk_request.http_method, 'PUT') + self.assertEqual(chunk_request.body, CONTENT) + + def test__last_byte(self): + upload = self._makeOne(_Stream()) + self.assertEqual(upload._last_byte('123-456'), 456) + + def test__validate_chunksize_wo__server_chunk_granularity(self): + upload = self._makeOne(_Stream()) + upload._validate_chunksize(123) # no-op + + def test__validate_chunksize_w__server_chunk_granularity_miss(self): + upload = self._makeOne(_Stream()) + upload._server_chunk_granularity = 100 + with self.assertRaises(ValueError): + upload._validate_chunksize(123) + + def test__validate_chunksize_w__server_chunk_granularity_hit(self): + upload = self._makeOne(_Stream()) + upload._server_chunk_granularity = 100 + upload._validate_chunksize(400) + + def test_stream_file_w_simple_strategy(self): + from google.cloud.streaming.transfer import SIMPLE_UPLOAD + upload = self._makeOne(_Stream()) + upload.strategy = SIMPLE_UPLOAD + with self.assertRaises(ValueError): + upload.stream_file() + + def test_stream_file_w_use_chunks_invalid_chunk_size(self): + from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + upload = self._makeOne(_Stream(), chunksize=1024) + upload.strategy = RESUMABLE_UPLOAD + upload._server_chunk_granularity = 100 + with self.assertRaises(ValueError): + upload.stream_file(use_chunks=True) + + def test_stream_file_not_initialized(self): + from google.cloud.streaming.exceptions import TransferInvalidError + from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + upload = self._makeOne(_Stream(), chunksize=1024) + upload.strategy = RESUMABLE_UPLOAD + upload._server_chunk_granularity = 128 + with self.assertRaises(TransferInvalidError): + upload.stream_file() + + def test_stream_file_already_complete_w_unseekable_stream(self): + from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + http = object() + stream = object() + response = object() + upload = self._makeOne(stream, chunksize=1024) + upload.strategy = RESUMABLE_UPLOAD + upload._server_chunk_granularity = 128 + upload._initialize(http, _Request.URL) + upload._final_response = response + upload._complete = True + self.assertIs(upload.stream_file(), response) + + def test_stream_file_already_complete_w_seekable_stream_unsynced(self): + from google.cloud.streaming.exceptions import CommunicationError + from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'ABCDEFGHIJ' + http = object() + stream = _Stream(CONTENT) + response = object() + upload = self._makeOne(stream, chunksize=1024) + upload.strategy = RESUMABLE_UPLOAD + upload._server_chunk_granularity = 128 + upload._initialize(http, _Request.URL) + upload._final_response = response + upload._complete = True + with self.assertRaises(CommunicationError): + upload.stream_file() + + def test_stream_file_already_complete_wo_seekable_method_synced(self): + import os + from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'ABCDEFGHIJ' + http = object() + stream = _Stream(CONTENT) + stream.seek(0, os.SEEK_END) + response = object() + upload = self._makeOne(stream, chunksize=1024) + upload.strategy = RESUMABLE_UPLOAD + upload._server_chunk_granularity = 128 + upload._initialize(http, _Request.URL) + upload._final_response = response + upload._complete = True + self.assertIs(upload.stream_file(use_chunks=False), response) + + def test_stream_file_already_complete_w_seekable_method_true_synced(self): + import os + from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'ABCDEFGHIJ' + http = object() + stream = _StreamWithSeekableMethod(CONTENT, True) + stream.seek(0, os.SEEK_END) + response = object() + upload = self._makeOne(stream, chunksize=1024) + upload.strategy = RESUMABLE_UPLOAD + upload._server_chunk_granularity = 128 + upload._initialize(http, _Request.URL) + upload._final_response = response + upload._complete = True + self.assertIs(upload.stream_file(use_chunks=False), response) + + def test_stream_file_already_complete_w_seekable_method_false(self): + import os + from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'ABCDEFGHIJ' + http = object() + stream = _StreamWithSeekableMethod(CONTENT, False) + stream.seek(0, os.SEEK_END) + response = object() + upload = self._makeOne(stream, chunksize=1024) + upload.strategy = RESUMABLE_UPLOAD + upload._server_chunk_granularity = 128 + upload._initialize(http, _Request.URL) + upload._final_response = response + upload._complete = True + self.assertIs(upload.stream_file(use_chunks=False), response) + + def test_stream_file_incomplete(self): + from six.moves import http_client + from google.cloud._testing import _Monkey + from google.cloud.streaming import transfer as MUT + from google.cloud.streaming.http_wrapper import RESUME_INCOMPLETE + from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'ABCDEFGHIJ' + http = object() + stream = _Stream(CONTENT) + upload = self._makeOne(stream, chunksize=6) + upload.strategy = RESUMABLE_UPLOAD + upload._server_chunk_granularity = 6 + upload._initialize(http, self.UPLOAD_URL) + + info_1 = {'content-length': '0', 'range': 'bytes=0-5'} + response_1 = _makeResponse(RESUME_INCOMPLETE, info_1) + info_2 = {'content-length': '0', 'range': 'bytes=6-9'} + response_2 = _makeResponse(http_client.OK, info_2) + requester = _MakeRequest(response_1, response_2) + + with _Monkey(MUT, + Request=_Request, + make_api_request=requester): + response = upload.stream_file() + + self.assertIs(response, response_2) + self.assertEqual(len(requester._responses), 0) + self.assertEqual(len(requester._requested), 2) + + request_1 = requester._requested[0][0] + self.assertEqual(request_1.url, self.UPLOAD_URL) + self.assertEqual(request_1.http_method, 'PUT') + self.assertEqual(request_1.headers, + {'Content-Range': 'bytes 0-5/*', + 'Content-Type': self.MIME_TYPE}) + self.assertEqual(request_1.body, CONTENT[:6]) + + request_2 = requester._requested[1][0] + self.assertEqual(request_2.url, self.UPLOAD_URL) + self.assertEqual(request_2.http_method, 'PUT') + self.assertEqual(request_2.headers, + {'Content-Range': 'bytes 6-9/10', + 'Content-Type': self.MIME_TYPE}) + self.assertEqual(request_2.body, CONTENT[6:]) + + def test_stream_file_incomplete_w_transfer_error(self): + from google.cloud._testing import _Monkey + from google.cloud.streaming import transfer as MUT + from google.cloud.streaming.exceptions import CommunicationError + from google.cloud.streaming.http_wrapper import RESUME_INCOMPLETE + from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'ABCDEFGHIJ' + http = object() + stream = _Stream(CONTENT) + upload = self._makeOne(stream, chunksize=6) + upload.strategy = RESUMABLE_UPLOAD + upload._server_chunk_granularity = 6 + upload._initialize(http, self.UPLOAD_URL) + + info = { + 'content-length': '0', + 'range': 'bytes=0-4', # simulate error, s.b. '0-5' + } + response = _makeResponse(RESUME_INCOMPLETE, info) + requester = _MakeRequest(response) + + with _Monkey(MUT, + Request=_Request, + make_api_request=requester): + with self.assertRaises(CommunicationError): + upload.stream_file() + + self.assertEqual(len(requester._responses), 0) + self.assertEqual(len(requester._requested), 1) + + request = requester._requested[0][0] + self.assertEqual(request.url, self.UPLOAD_URL) + self.assertEqual(request.http_method, 'PUT') + self.assertEqual(request.headers, + {'Content-Range': 'bytes 0-5/*', + 'Content-Type': self.MIME_TYPE}) + self.assertEqual(request.body, CONTENT[:6]) + + def test__send_media_request_wo_error(self): + from google.cloud._testing import _Monkey + from google.cloud.streaming import transfer as MUT + from google.cloud.streaming.http_wrapper import RESUME_INCOMPLETE + CONTENT = b'ABCDEFGHIJ' + bytes_http = object() + stream = _Stream(CONTENT) + upload = self._makeOne(stream) + upload.bytes_http = bytes_http + + headers = {'Content-Range': 'bytes 0-9/10', + 'Content-Type': self.MIME_TYPE} + request = _Request(self.UPLOAD_URL, 'PUT', CONTENT, headers) + info = {'content-length': '0', 'range': 'bytes=0-4'} + response = _makeResponse(RESUME_INCOMPLETE, info) + requester = _MakeRequest(response) + + with _Monkey(MUT, make_api_request=requester): + upload._send_media_request(request, 9) + + self.assertEqual(len(requester._responses), 0) + self.assertEqual(len(requester._requested), 1) + used_request, used_http, _ = requester._requested[0] + self.assertIs(used_request, request) + self.assertIs(used_http, bytes_http) + self.assertEqual(stream.tell(), 4) + + def test__send_media_request_w_error(self): + from six.moves import http_client + from google.cloud._testing import _Monkey + from google.cloud.streaming import transfer as MUT + from google.cloud.streaming.exceptions import HttpError + from google.cloud.streaming.http_wrapper import RESUME_INCOMPLETE + from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'ABCDEFGHIJ' + bytes_http = object() + http = object() + stream = _Stream(CONTENT) + upload = self._makeOne(stream) + upload.strategy = RESUMABLE_UPLOAD + upload._initialize(http, self.UPLOAD_URL) + upload.bytes_http = bytes_http + + headers = {'Content-Range': 'bytes 0-9/10', + 'Content-Type': self.MIME_TYPE} + request = _Request(self.UPLOAD_URL, 'PUT', CONTENT, headers) + info_1 = {'content-length': '0', 'range': 'bytes=0-4'} + response_1 = _makeResponse(http_client.FORBIDDEN, info_1) + info_2 = {'Content-Length': '0', 'Range': 'bytes=0-4'} + response_2 = _makeResponse(RESUME_INCOMPLETE, info_2) + requester = _MakeRequest(response_1, response_2) + + with _Monkey(MUT, Request=_Request, make_api_request=requester): + with self.assertRaises(HttpError): + upload._send_media_request(request, 9) + + self.assertEqual(len(requester._responses), 0) + self.assertEqual(len(requester._requested), 2) + first_request, first_http, _ = requester._requested[0] + self.assertIs(first_request, request) + self.assertIs(first_http, bytes_http) + second_request, second_http, _ = requester._requested[1] + self.assertEqual(second_request.url, self.UPLOAD_URL) + self.assertEqual(second_request.http_method, 'PUT') # ACK! + self.assertEqual(second_request.headers, + {'Content-Range': 'bytes */*'}) + self.assertIs(second_http, http) + + def test__send_media_body_not_initialized(self): + from google.cloud.streaming.exceptions import TransferInvalidError + upload = self._makeOne(_Stream()) + with self.assertRaises(TransferInvalidError): + upload._send_media_body(0) + + def test__send_media_body_wo_total_size(self): + from google.cloud.streaming.exceptions import TransferInvalidError + http = object() + upload = self._makeOne(_Stream()) + upload._initialize(http, _Request.URL) + with self.assertRaises(TransferInvalidError): + upload._send_media_body(0) + + def test__send_media_body_start_lt_total_size(self): + from google.cloud.streaming.stream_slice import StreamSlice + SIZE = 1234 + http = object() + stream = _Stream() + upload = self._makeOne(stream, total_size=SIZE) + upload._initialize(http, self.UPLOAD_URL) + response = object() + streamer = _MediaStreamer(response) + upload._send_media_request = streamer + + found = upload._send_media_body(0) + + self.assertIs(found, response) + request, end = streamer._called_with + self.assertEqual(request.url, self.UPLOAD_URL) + self.assertEqual(request.http_method, 'PUT') + body_stream = request.body + self.assertIsInstance(body_stream, StreamSlice) + self.assertIs(body_stream._stream, stream) + self.assertEqual(len(body_stream), SIZE) + self.assertEqual(request.headers, + {'content-length': '%d' % (SIZE,), # speling! + 'Content-Type': self.MIME_TYPE, + 'Content-Range': 'bytes 0-%d/%d' % (SIZE - 1, SIZE)}) + self.assertEqual(end, SIZE) + + def test__send_media_body_start_eq_total_size(self): + from google.cloud.streaming.stream_slice import StreamSlice + SIZE = 1234 + http = object() + stream = _Stream() + upload = self._makeOne(stream, total_size=SIZE) + upload._initialize(http, self.UPLOAD_URL) + response = object() + streamer = _MediaStreamer(response) + upload._send_media_request = streamer + + found = upload._send_media_body(SIZE) + + self.assertIs(found, response) + request, end = streamer._called_with + self.assertEqual(request.url, self.UPLOAD_URL) + self.assertEqual(request.http_method, 'PUT') + body_stream = request.body + self.assertIsInstance(body_stream, StreamSlice) + self.assertIs(body_stream._stream, stream) + self.assertEqual(len(body_stream), 0) + self.assertEqual(request.headers, + {'content-length': '0', # speling! + 'Content-Type': self.MIME_TYPE, + 'Content-Range': 'bytes */%d' % (SIZE,)}) + self.assertEqual(end, SIZE) + + def test__send_chunk_not_initialized(self): + from google.cloud.streaming.exceptions import TransferInvalidError + upload = self._makeOne(_Stream()) + with self.assertRaises(TransferInvalidError): + upload._send_chunk(0) + + def test__send_chunk_wo_total_size_stream_exhausted(self): + CONTENT = b'ABCDEFGHIJ' + SIZE = len(CONTENT) + http = object() + upload = self._makeOne(_Stream(CONTENT), chunksize=1000) + upload._initialize(http, self.UPLOAD_URL) + response = object() + streamer = _MediaStreamer(response) + upload._send_media_request = streamer + self.assertIsNone(upload.total_size) + + found = upload._send_chunk(0) + + self.assertIs(found, response) + self.assertEqual(upload.total_size, SIZE) + request, end = streamer._called_with + self.assertEqual(request.url, self.UPLOAD_URL) + self.assertEqual(request.http_method, 'PUT') + self.assertEqual(request.body, CONTENT) + self.assertEqual(request.headers, + {'content-length': '%d' % SIZE, # speling! + 'Content-Type': self.MIME_TYPE, + 'Content-Range': 'bytes 0-%d/%d' % (SIZE - 1, SIZE)}) + self.assertEqual(end, SIZE) + + def test__send_chunk_wo_total_size_stream_not_exhausted(self): + CONTENT = b'ABCDEFGHIJ' + SIZE = len(CONTENT) + CHUNK_SIZE = SIZE - 5 + http = object() + upload = self._makeOne(_Stream(CONTENT), chunksize=CHUNK_SIZE) + upload._initialize(http, self.UPLOAD_URL) + response = object() + streamer = _MediaStreamer(response) + upload._send_media_request = streamer + self.assertIsNone(upload.total_size) + + found = upload._send_chunk(0) + + self.assertIs(found, response) + self.assertIsNone(upload.total_size) + request, end = streamer._called_with + self.assertEqual(request.url, self.UPLOAD_URL) + self.assertEqual(request.http_method, 'PUT') + self.assertEqual(request.body, CONTENT[:CHUNK_SIZE]) + expected_headers = { + 'content-length': '%d' % CHUNK_SIZE, # speling! + 'Content-Type': self.MIME_TYPE, + 'Content-Range': 'bytes 0-%d/*' % (CHUNK_SIZE - 1,), + } + self.assertEqual(request.headers, expected_headers) + self.assertEqual(end, CHUNK_SIZE) + + def test__send_chunk_w_total_size_stream_not_exhausted(self): + from google.cloud.streaming.stream_slice import StreamSlice + CONTENT = b'ABCDEFGHIJ' + SIZE = len(CONTENT) + CHUNK_SIZE = SIZE - 5 + http = object() + stream = _Stream(CONTENT) + upload = self._makeOne(stream, total_size=SIZE, chunksize=CHUNK_SIZE) + upload._initialize(http, self.UPLOAD_URL) + response = object() + streamer = _MediaStreamer(response) + upload._send_media_request = streamer + + found = upload._send_chunk(0) + + self.assertIs(found, response) + request, end = streamer._called_with + self.assertEqual(request.url, self.UPLOAD_URL) + self.assertEqual(request.http_method, 'PUT') + body_stream = request.body + self.assertIsInstance(body_stream, StreamSlice) + self.assertIs(body_stream._stream, stream) + self.assertEqual(len(body_stream), CHUNK_SIZE) + expected_headers = { + 'content-length': '%d' % CHUNK_SIZE, # speling! + 'Content-Type': self.MIME_TYPE, + 'Content-Range': 'bytes 0-%d/%d' % (CHUNK_SIZE - 1, SIZE), + } + self.assertEqual(request.headers, expected_headers) + self.assertEqual(end, CHUNK_SIZE) + + def test__send_chunk_w_total_size_stream_exhausted(self): + from google.cloud.streaming.stream_slice import StreamSlice + CONTENT = b'ABCDEFGHIJ' + SIZE = len(CONTENT) + CHUNK_SIZE = 1000 + http = object() + stream = _Stream(CONTENT) + upload = self._makeOne(stream, total_size=SIZE, chunksize=CHUNK_SIZE) + upload._initialize(http, self.UPLOAD_URL) + response = object() + streamer = _MediaStreamer(response) + upload._send_media_request = streamer + + found = upload._send_chunk(SIZE) + + self.assertIs(found, response) + request, end = streamer._called_with + self.assertEqual(request.url, self.UPLOAD_URL) + self.assertEqual(request.http_method, 'PUT') + body_stream = request.body + self.assertIsInstance(body_stream, StreamSlice) + self.assertIs(body_stream._stream, stream) + self.assertEqual(len(body_stream), 0) + self.assertEqual(request.headers, + {'content-length': '0', # speling! + 'Content-Type': self.MIME_TYPE, + 'Content-Range': 'bytes */%d' % (SIZE,)}) + self.assertEqual(end, SIZE) + + +def _email_chunk_parser(): + import six + if six.PY3: # pragma: NO COVER Python3 + from email.parser import BytesParser + parser = BytesParser() + return parser.parsebytes + else: + from email.parser import Parser + parser = Parser() + return parser.parsestr + + +class _Dummy(object): + def __init__(self, **kw): + self.__dict__.update(kw) + + +class _UploadConfig(object): + accept = ('*/*',) + max_size = None + resumable_path = '/resumable/endpoint' + simple_multipart = True + simple_path = '/upload/endpoint' + + +class _Stream(object): + _closed = False + + def __init__(self, to_read=b''): + import io + self._written = [] + self._to_read = io.BytesIO(to_read) + + def write(self, to_write): + self._written.append(to_write) + + def seek(self, offset, whence=0): + self._to_read.seek(offset, whence) + + def read(self, size=None): + if size is not None: + return self._to_read.read(size) + return self._to_read.read() + + def tell(self): + return self._to_read.tell() + + def close(self): + self._closed = True + + +class _StreamWithSeekableMethod(_Stream): + + def __init__(self, to_read=b'', seekable=True): + super(_StreamWithSeekableMethod, self).__init__(to_read) + self._seekable = seekable + + def seekable(self): + return self._seekable + + +class _Request(object): + __slots__ = ('url', 'http_method', 'body', 'headers', 'loggable_body') + URL = 'http://example.com/api' + + def __init__(self, url=URL, http_method='GET', body='', headers=None): + self.url = url + self.http_method = http_method + self.body = self.loggable_body = body + if headers is None: + headers = {} + self.headers = headers + + +class _MakeRequest(object): + + def __init__(self, *responses): + self._responses = list(responses) + self._requested = [] + + def __call__(self, http, request, **kw): + self._requested.append((request, http, kw)) + return self._responses.pop(0) + + +def _makeResponse(status_code, info=None, content='', + request_url=_Request.URL): + if info is None: + info = {} + return _Dummy(status_code=status_code, + info=info, + content=content, + length=len(content), + request_url=request_url) + + +class _MediaStreamer(object): + + _called_with = None + + def __init__(self, response): + self._response = response + + def __call__(self, request, end): + assert self._called_with is None + self._called_with = (request, end) + return self._response + + +def _tempdir_maker(): + import contextlib + import shutil + import tempfile + + @contextlib.contextmanager + def _tempdir_mgr(): + temp_dir = tempfile.mkdtemp() + yield temp_dir + shutil.rmtree(temp_dir) + + return _tempdir_mgr + +_tempdir = _tempdir_maker() +del _tempdir_maker diff --git a/packages/google-cloud-core/unit_tests/streaming/test_util.py b/packages/google-cloud-core/unit_tests/streaming/test_util.py new file mode 100644 index 000000000000..c760a1955610 --- /dev/null +++ b/packages/google-cloud-core/unit_tests/streaming/test_util.py @@ -0,0 +1,62 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class Test_calculate_wait_for_retry(unittest.TestCase): + + def _callFUT(self, *args, **kw): + from google.cloud.streaming.util import calculate_wait_for_retry + return calculate_wait_for_retry(*args, **kw) + + def test_w_negative_jitter_lt_max_wait(self): + import random + from google.cloud._testing import _Monkey + with _Monkey(random, uniform=lambda lower, upper: lower): + self.assertEqual(self._callFUT(1), 1.5) + + def test_w_positive_jitter_gt_max_wait(self): + import random + from google.cloud._testing import _Monkey + with _Monkey(random, uniform=lambda lower, upper: upper): + self.assertEqual(self._callFUT(4), 20) + + +class Test_acceptable_mime_type(unittest.TestCase): + + def _callFUT(self, *args, **kw): + from google.cloud.streaming.util import acceptable_mime_type + return acceptable_mime_type(*args, **kw) + + def test_pattern_wo_slash(self): + with self.assertRaises(ValueError) as err: + self._callFUT(['text/*'], 'BOGUS') + self.assertEqual( + err.exception.args, + ('Invalid MIME type: "BOGUS"',)) + + def test_accept_pattern_w_semicolon(self): + with self.assertRaises(ValueError) as err: + self._callFUT(['text/*;charset=utf-8'], 'text/plain') + self.assertEqual( + err.exception.args, + ('MIME patterns with parameter unsupported: ' + '"text/*;charset=utf-8"',)) + + def test_miss(self): + self.assertFalse(self._callFUT(['image/*'], 'text/plain')) + + def test_hit(self): + self.assertTrue(self._callFUT(['text/*'], 'text/plain')) diff --git a/packages/google-cloud-core/unit_tests/test__helpers.py b/packages/google-cloud-core/unit_tests/test__helpers.py new file mode 100644 index 000000000000..4dd512ee9701 --- /dev/null +++ b/packages/google-cloud-core/unit_tests/test__helpers.py @@ -0,0 +1,1093 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import unittest + + +class Test__LocalStack(unittest.TestCase): + + def _getTargetClass(self): + from google.cloud._helpers import _LocalStack + + return _LocalStack + + def _makeOne(self): + return self._getTargetClass()() + + def test_it(self): + batch1, batch2 = object(), object() + batches = self._makeOne() + self.assertEqual(list(batches), []) + self.assertIsNone(batches.top) + batches.push(batch1) + self.assertIs(batches.top, batch1) + batches.push(batch2) + self.assertIs(batches.top, batch2) + popped = batches.pop() + self.assertIs(popped, batch2) + self.assertIs(batches.top, batch1) + self.assertEqual(list(batches), [batch1]) + popped = batches.pop() + self.assertIsNone(batches.top) + self.assertEqual(list(batches), []) + + +class Test__UTC(unittest.TestCase): + + def _getTargetClass(self): + from google.cloud._helpers import _UTC + return _UTC + + def _makeOne(self): + return self._getTargetClass()() + + def test_module_property(self): + from google.cloud import _helpers as MUT + klass = self._getTargetClass() + try: + import pytz + except ImportError: + self.assertIsInstance(MUT.UTC, klass) + else: + self.assertIs(MUT.UTC, pytz.UTC) # pragma: NO COVER + + def test_dst(self): + import datetime + + tz = self._makeOne() + self.assertEqual(tz.dst(None), datetime.timedelta(0)) + + def test_fromutc(self): + import datetime + + naive_epoch = datetime.datetime.utcfromtimestamp(0) + self.assertIsNone(naive_epoch.tzinfo) + tz = self._makeOne() + epoch = tz.fromutc(naive_epoch) + self.assertEqual(epoch.tzinfo, tz) + + def test_tzname(self): + tz = self._makeOne() + self.assertEqual(tz.tzname(None), 'UTC') + + def test_utcoffset(self): + import datetime + + tz = self._makeOne() + self.assertEqual(tz.utcoffset(None), datetime.timedelta(0)) + + def test___repr__(self): + tz = self._makeOne() + self.assertEqual(repr(tz), '') + + def test___str__(self): + tz = self._makeOne() + self.assertEqual(str(tz), 'UTC') + + +class Test__ensure_tuple_or_list(unittest.TestCase): + + def _callFUT(self, arg_name, tuple_or_list): + from google.cloud._helpers import _ensure_tuple_or_list + return _ensure_tuple_or_list(arg_name, tuple_or_list) + + def test_valid_tuple(self): + valid_tuple_or_list = ('a', 'b', 'c', 'd') + result = self._callFUT('ARGNAME', valid_tuple_or_list) + self.assertEqual(result, ['a', 'b', 'c', 'd']) + + def test_valid_list(self): + valid_tuple_or_list = ['a', 'b', 'c', 'd'] + result = self._callFUT('ARGNAME', valid_tuple_or_list) + self.assertEqual(result, valid_tuple_or_list) + + def test_invalid(self): + invalid_tuple_or_list = object() + with self.assertRaises(TypeError): + self._callFUT('ARGNAME', invalid_tuple_or_list) + + def test_invalid_iterable(self): + invalid_tuple_or_list = 'FOO' + with self.assertRaises(TypeError): + self._callFUT('ARGNAME', invalid_tuple_or_list) + + +class Test__app_engine_id(unittest.TestCase): + + def _callFUT(self): + from google.cloud._helpers import _app_engine_id + return _app_engine_id() + + def test_no_value(self): + from google.cloud._testing import _Monkey + from google.cloud import _helpers + + with _Monkey(_helpers, app_identity=None): + dataset_id = self._callFUT() + self.assertIsNone(dataset_id) + + def test_value_set(self): + from google.cloud._testing import _Monkey + from google.cloud import _helpers + + APP_ENGINE_ID = object() + APP_IDENTITY = _AppIdentity(APP_ENGINE_ID) + with _Monkey(_helpers, app_identity=APP_IDENTITY): + dataset_id = self._callFUT() + self.assertEqual(dataset_id, APP_ENGINE_ID) + + +class Test__file_project_id(unittest.TestCase): + + def _callFUT(self): + from google.cloud._helpers import _file_project_id + return _file_project_id() + + def test_success(self): + from google.cloud.environment_vars import CREDENTIALS + from google.cloud._testing import _Monkey + from google.cloud._testing import _NamedTemporaryFile + + project_id = 'test-project-id' + payload = '{"%s":"%s"}' % ('project_id', project_id) + with _NamedTemporaryFile() as temp: + with open(temp.name, 'w') as creds_file: + creds_file.write(payload) + + environ = {CREDENTIALS: temp.name} + with _Monkey(os, getenv=environ.get): + result = self._callFUT() + + self.assertEqual(result, project_id) + + def test_no_environment_variable_set(self): + from google.cloud._testing import _Monkey + + environ = {} + with _Monkey(os, getenv=environ.get): + result = self._callFUT() + + self.assertIsNone(result) + + +class Test__get_nix_config_path(unittest.TestCase): + + def _callFUT(self): + from google.cloud._helpers import _get_nix_config_path + return _get_nix_config_path() + + def test_it(self): + from google.cloud import _helpers as MUT + from google.cloud._testing import _Monkey + + user_root = 'a' + config_file = 'b' + with _Monkey(MUT, _USER_ROOT=user_root, + _GCLOUD_CONFIG_FILE=config_file): + result = self._callFUT() + + expected = os.path.join(user_root, '.config', config_file) + self.assertEqual(result, expected) + + +class Test__get_windows_config_path(unittest.TestCase): + + def _callFUT(self): + from google.cloud._helpers import _get_windows_config_path + return _get_windows_config_path() + + def test_it(self): + from google.cloud import _helpers as MUT + from google.cloud._testing import _Monkey + + appdata_dir = 'a' + environ = {'APPDATA': appdata_dir} + config_file = 'b' + with _Monkey(os, getenv=environ.get): + with _Monkey(MUT, _GCLOUD_CONFIG_FILE=config_file): + result = self._callFUT() + + expected = os.path.join(appdata_dir, config_file) + self.assertEqual(result, expected) + + +class Test__default_service_project_id(unittest.TestCase): + + CONFIG_TEMPLATE = '[%s]\n%s = %s\n' + + def _callFUT(self): + from google.cloud._helpers import _default_service_project_id + return _default_service_project_id() + + def test_nix(self): + from google.cloud import _helpers as MUT + from google.cloud._testing import _Monkey + from google.cloud._testing import _NamedTemporaryFile + + project_id = 'test-project-id' + with _NamedTemporaryFile() as temp: + config_value = self.CONFIG_TEMPLATE % ( + MUT._GCLOUD_CONFIG_SECTION, + MUT._GCLOUD_CONFIG_KEY, project_id) + with open(temp.name, 'w') as config_file: + config_file.write(config_value) + + def mock_get_path(): + return temp.name + + with _Monkey(os, name='not-nt'): + with _Monkey(MUT, _get_nix_config_path=mock_get_path, + _USER_ROOT='not-None'): + result = self._callFUT() + + self.assertEqual(result, project_id) + + def test_windows(self): + from google.cloud import _helpers as MUT + from google.cloud._testing import _Monkey + from google.cloud._testing import _NamedTemporaryFile + + project_id = 'test-project-id' + with _NamedTemporaryFile() as temp: + config_value = self.CONFIG_TEMPLATE % ( + MUT._GCLOUD_CONFIG_SECTION, + MUT._GCLOUD_CONFIG_KEY, project_id) + with open(temp.name, 'w') as config_file: + config_file.write(config_value) + + def mock_get_path(): + return temp.name + + with _Monkey(os, name='nt'): + with _Monkey(MUT, _get_windows_config_path=mock_get_path, + _USER_ROOT=None): + result = self._callFUT() + + self.assertEqual(result, project_id) + + def test_gae(self): + from google.cloud import _helpers as MUT + from google.cloud._testing import _Monkey + + with _Monkey(os, name='not-nt'): + with _Monkey(MUT, _USER_ROOT=None): + result = self._callFUT() + + self.assertIsNone(result) + + +class Test__compute_engine_id(unittest.TestCase): + + def _callFUT(self): + from google.cloud._helpers import _compute_engine_id + return _compute_engine_id() + + def _monkeyConnection(self, connection): + from six.moves import http_client + from google.cloud._testing import _Monkey + + def _connection_factory(host, timeout): + connection.host = host + connection.timeout = timeout + return connection + + return _Monkey(http_client, HTTPConnection=_connection_factory) + + def test_bad_status(self): + connection = _HTTPConnection(404, None) + with self._monkeyConnection(connection): + dataset_id = self._callFUT() + self.assertIsNone(dataset_id) + + def test_success(self): + COMPUTE_ENGINE_ID = object() + connection = _HTTPConnection(200, COMPUTE_ENGINE_ID) + with self._monkeyConnection(connection): + dataset_id = self._callFUT() + self.assertEqual(dataset_id, COMPUTE_ENGINE_ID) + + def test_socket_raises(self): + connection = _TimeoutHTTPConnection() + with self._monkeyConnection(connection): + dataset_id = self._callFUT() + self.assertIsNone(dataset_id) + + +class Test__get_production_project(unittest.TestCase): + + def _callFUT(self): + from google.cloud._helpers import _get_production_project + return _get_production_project() + + def test_no_value(self): + from google.cloud._testing import _Monkey + + environ = {} + with _Monkey(os, getenv=environ.get): + project = self._callFUT() + self.assertIsNone(project) + + def test_value_set(self): + from google.cloud._testing import _Monkey + from google.cloud._helpers import PROJECT + + MOCK_PROJECT = object() + environ = {PROJECT: MOCK_PROJECT} + with _Monkey(os, getenv=environ.get): + project = self._callFUT() + self.assertEqual(project, MOCK_PROJECT) + + +class Test__determine_default_project(unittest.TestCase): + + def _callFUT(self, project=None): + from google.cloud._helpers import _determine_default_project + return _determine_default_project(project=project) + + def _determine_default_helper(self, prod=None, gae=None, gce=None, + file_id=None, srv_id=None, project=None): + from google.cloud._testing import _Monkey + from google.cloud import _helpers + + _callers = [] + + def prod_mock(): + _callers.append('prod_mock') + return prod + + def file_id_mock(): + _callers.append('file_id_mock') + return file_id + + def srv_id_mock(): + _callers.append('srv_id_mock') + return srv_id + + def gae_mock(): + _callers.append('gae_mock') + return gae + + def gce_mock(): + _callers.append('gce_mock') + return gce + + patched_methods = { + '_get_production_project': prod_mock, + '_file_project_id': file_id_mock, + '_default_service_project_id': srv_id_mock, + '_app_engine_id': gae_mock, + '_compute_engine_id': gce_mock, + } + + with _Monkey(_helpers, **patched_methods): + returned_project = self._callFUT(project) + + return returned_project, _callers + + def test_no_value(self): + project, callers = self._determine_default_helper() + self.assertIsNone(project) + self.assertEqual(callers, ['prod_mock', 'file_id_mock', 'srv_id_mock', + 'gae_mock', 'gce_mock']) + + def test_explicit(self): + PROJECT = object() + project, callers = self._determine_default_helper(project=PROJECT) + self.assertEqual(project, PROJECT) + self.assertEqual(callers, []) + + def test_prod(self): + PROJECT = object() + project, callers = self._determine_default_helper(prod=PROJECT) + self.assertEqual(project, PROJECT) + self.assertEqual(callers, ['prod_mock']) + + def test_gae(self): + PROJECT = object() + project, callers = self._determine_default_helper(gae=PROJECT) + self.assertEqual(project, PROJECT) + self.assertEqual(callers, ['prod_mock', 'file_id_mock', + 'srv_id_mock', 'gae_mock']) + + def test_gce(self): + PROJECT = object() + project, callers = self._determine_default_helper(gce=PROJECT) + self.assertEqual(project, PROJECT) + self.assertEqual(callers, ['prod_mock', 'file_id_mock', 'srv_id_mock', + 'gae_mock', 'gce_mock']) + + +class Test__millis(unittest.TestCase): + + def _callFUT(self, value): + from google.cloud._helpers import _millis + return _millis(value) + + def test_one_second_from_epoch(self): + import datetime + from google.cloud._helpers import UTC + + WHEN = datetime.datetime(1970, 1, 1, 0, 0, 1, tzinfo=UTC) + self.assertEqual(self._callFUT(WHEN), 1000) + + +class Test__microseconds_from_datetime(unittest.TestCase): + + def _callFUT(self, value): + from google.cloud._helpers import _microseconds_from_datetime + return _microseconds_from_datetime(value) + + def test_it(self): + import datetime + + microseconds = 314159 + timestamp = datetime.datetime(1970, 1, 1, hour=0, + minute=0, second=0, + microsecond=microseconds) + result = self._callFUT(timestamp) + self.assertEqual(result, microseconds) + + +class Test__millis_from_datetime(unittest.TestCase): + + def _callFUT(self, value): + from google.cloud._helpers import _millis_from_datetime + return _millis_from_datetime(value) + + def test_w_none(self): + self.assertIsNone(self._callFUT(None)) + + def test_w_utc_datetime(self): + import datetime + import six + from google.cloud._helpers import UTC + from google.cloud._helpers import _microseconds_from_datetime + + NOW = datetime.datetime.utcnow().replace(tzinfo=UTC) + NOW_MICROS = _microseconds_from_datetime(NOW) + MILLIS = NOW_MICROS // 1000 + result = self._callFUT(NOW) + self.assertIsInstance(result, six.integer_types) + self.assertEqual(result, MILLIS) + + def test_w_non_utc_datetime(self): + import datetime + import six + from google.cloud._helpers import _UTC + from google.cloud._helpers import _microseconds_from_datetime + + class CET(_UTC): + _tzname = 'CET' + _utcoffset = datetime.timedelta(hours=-1) + + zone = CET() + NOW = datetime.datetime(2015, 7, 28, 16, 34, 47, tzinfo=zone) + NOW_MICROS = _microseconds_from_datetime(NOW) + MILLIS = NOW_MICROS // 1000 + result = self._callFUT(NOW) + self.assertIsInstance(result, six.integer_types) + self.assertEqual(result, MILLIS) + + def test_w_naive_datetime(self): + import datetime + import six + from google.cloud._helpers import UTC + from google.cloud._helpers import _microseconds_from_datetime + + NOW = datetime.datetime.utcnow() + UTC_NOW = NOW.replace(tzinfo=UTC) + UTC_NOW_MICROS = _microseconds_from_datetime(UTC_NOW) + MILLIS = UTC_NOW_MICROS // 1000 + result = self._callFUT(NOW) + self.assertIsInstance(result, six.integer_types) + self.assertEqual(result, MILLIS) + + +class Test__datetime_from_microseconds(unittest.TestCase): + + def _callFUT(self, value): + from google.cloud._helpers import _datetime_from_microseconds + return _datetime_from_microseconds(value) + + def test_it(self): + import datetime + from google.cloud._helpers import UTC + from google.cloud._helpers import _microseconds_from_datetime + + NOW = datetime.datetime(2015, 7, 29, 17, 45, 21, 123456, + tzinfo=UTC) + NOW_MICROS = _microseconds_from_datetime(NOW) + self.assertEqual(self._callFUT(NOW_MICROS), NOW) + + +class Test___date_from_iso8601_date(unittest.TestCase): + + def _callFUT(self, value): + from google.cloud._helpers import _date_from_iso8601_date + return _date_from_iso8601_date(value) + + def test_todays_date(self): + import datetime + TODAY = datetime.date.today() + self.assertEqual(self._callFUT(TODAY.strftime("%Y-%m-%d")), TODAY) + + +class Test__rfc3339_to_datetime(unittest.TestCase): + + def _callFUT(self, dt_str): + from google.cloud._helpers import _rfc3339_to_datetime + return _rfc3339_to_datetime(dt_str) + + def test_w_bogus_zone(self): + year = 2009 + month = 12 + day = 17 + hour = 12 + minute = 44 + seconds = 32 + micros = 123456789 + + dt_str = '%d-%02d-%02dT%02d:%02d:%02d.%06dBOGUS' % ( + year, month, day, hour, minute, seconds, micros) + with self.assertRaises(ValueError): + self._callFUT(dt_str) + + def test_w_microseconds(self): + import datetime + from google.cloud._helpers import UTC + + year = 2009 + month = 12 + day = 17 + hour = 12 + minute = 44 + seconds = 32 + micros = 123456 + + dt_str = '%d-%02d-%02dT%02d:%02d:%02d.%06dZ' % ( + year, month, day, hour, minute, seconds, micros) + result = self._callFUT(dt_str) + expected_result = datetime.datetime( + year, month, day, hour, minute, seconds, micros, UTC) + self.assertEqual(result, expected_result) + + def test_w_naonseconds(self): + year = 2009 + month = 12 + day = 17 + hour = 12 + minute = 44 + seconds = 32 + nanos = 123456789 + + dt_str = '%d-%02d-%02dT%02d:%02d:%02d.%09dZ' % ( + year, month, day, hour, minute, seconds, nanos) + with self.assertRaises(ValueError): + self._callFUT(dt_str) + + +class Test__rfc3339_nanos_to_datetime(unittest.TestCase): + + def _callFUT(self, dt_str): + from google.cloud._helpers import _rfc3339_nanos_to_datetime + return _rfc3339_nanos_to_datetime(dt_str) + + def test_w_bogus_zone(self): + year = 2009 + month = 12 + day = 17 + hour = 12 + minute = 44 + seconds = 32 + micros = 123456789 + + dt_str = '%d-%02d-%02dT%02d:%02d:%02d.%06dBOGUS' % ( + year, month, day, hour, minute, seconds, micros) + with self.assertRaises(ValueError): + self._callFUT(dt_str) + + def test_w_truncated_nanos(self): + import datetime + from google.cloud._helpers import UTC + + year = 2009 + month = 12 + day = 17 + hour = 12 + minute = 44 + seconds = 32 + truncateds_and_micros = [ + ('12345678', 123456), + ('1234567', 123456), + ('123456', 123456), + ('12345', 123450), + ('1234', 123400), + ('123', 123000), + ('12', 120000), + ('1', 100000), + ] + + for truncated, micros in truncateds_and_micros: + dt_str = '%d-%02d-%02dT%02d:%02d:%02d.%sZ' % ( + year, month, day, hour, minute, seconds, truncated) + result = self._callFUT(dt_str) + expected_result = datetime.datetime( + year, month, day, hour, minute, seconds, micros, UTC) + self.assertEqual(result, expected_result) + + def test_w_naonseconds(self): + import datetime + from google.cloud._helpers import UTC + + year = 2009 + month = 12 + day = 17 + hour = 12 + minute = 44 + seconds = 32 + nanos = 123456789 + micros = nanos // 1000 + + dt_str = '%d-%02d-%02dT%02d:%02d:%02d.%09dZ' % ( + year, month, day, hour, minute, seconds, nanos) + result = self._callFUT(dt_str) + expected_result = datetime.datetime( + year, month, day, hour, minute, seconds, micros, UTC) + self.assertEqual(result, expected_result) + + +class Test__datetime_to_rfc3339(unittest.TestCase): + + def _callFUT(self, *args, **kwargs): + from google.cloud._helpers import _datetime_to_rfc3339 + return _datetime_to_rfc3339(*args, **kwargs) + + @staticmethod + def _make_timezone(offset): + from google.cloud._helpers import _UTC + + class CET(_UTC): + _tzname = 'CET' + _utcoffset = offset + + return CET() + + def test_w_utc_datetime(self): + import datetime + from google.cloud._helpers import UTC + + TIMESTAMP = datetime.datetime(2016, 4, 5, 13, 30, 0, tzinfo=UTC) + result = self._callFUT(TIMESTAMP, ignore_zone=False) + self.assertEqual(result, '2016-04-05T13:30:00.000000Z') + + def test_w_non_utc_datetime(self): + import datetime + from google.cloud._helpers import _UTC + + zone = self._make_timezone(offset=datetime.timedelta(hours=-1)) + TIMESTAMP = datetime.datetime(2016, 4, 5, 13, 30, 0, tzinfo=zone) + result = self._callFUT(TIMESTAMP, ignore_zone=False) + self.assertEqual(result, '2016-04-05T14:30:00.000000Z') + + def test_w_non_utc_datetime_and_ignore_zone(self): + import datetime + from google.cloud._helpers import _UTC + + zone = self._make_timezone(offset=datetime.timedelta(hours=-1)) + TIMESTAMP = datetime.datetime(2016, 4, 5, 13, 30, 0, tzinfo=zone) + result = self._callFUT(TIMESTAMP) + self.assertEqual(result, '2016-04-05T13:30:00.000000Z') + + def test_w_naive_datetime(self): + import datetime + + TIMESTAMP = datetime.datetime(2016, 4, 5, 13, 30, 0) + result = self._callFUT(TIMESTAMP) + self.assertEqual(result, '2016-04-05T13:30:00.000000Z') + + +class Test__to_bytes(unittest.TestCase): + + def _callFUT(self, *args, **kwargs): + from google.cloud._helpers import _to_bytes + return _to_bytes(*args, **kwargs) + + def test_with_bytes(self): + value = b'bytes-val' + self.assertEqual(self._callFUT(value), value) + + def test_with_unicode(self): + value = u'string-val' + encoded_value = b'string-val' + self.assertEqual(self._callFUT(value), encoded_value) + + def test_unicode_non_ascii(self): + value = u'\u2013' # Long hyphen + encoded_value = b'\xe2\x80\x93' + self.assertRaises(UnicodeEncodeError, self._callFUT, value) + self.assertEqual(self._callFUT(value, encoding='utf-8'), + encoded_value) + + def test_with_nonstring_type(self): + value = object() + self.assertRaises(TypeError, self._callFUT, value) + + +class Test__bytes_to_unicode(unittest.TestCase): + + def _callFUT(self, *args, **kwargs): + from google.cloud._helpers import _bytes_to_unicode + return _bytes_to_unicode(*args, **kwargs) + + def test_with_bytes(self): + value = b'bytes-val' + encoded_value = 'bytes-val' + self.assertEqual(self._callFUT(value), encoded_value) + + def test_with_unicode(self): + value = u'string-val' + encoded_value = 'string-val' + self.assertEqual(self._callFUT(value), encoded_value) + + def test_with_nonstring_type(self): + value = object() + self.assertRaises(ValueError, self._callFUT, value) + + +class Test__pb_timestamp_to_datetime(unittest.TestCase): + + def _callFUT(self, timestamp): + from google.cloud._helpers import _pb_timestamp_to_datetime + return _pb_timestamp_to_datetime(timestamp) + + def test_it(self): + import datetime + from google.protobuf.timestamp_pb2 import Timestamp + from google.cloud._helpers import UTC + + # Epoch is midnight on January 1, 1970 ... + dt_stamp = datetime.datetime(1970, month=1, day=1, hour=0, + minute=1, second=1, microsecond=1234, + tzinfo=UTC) + # ... so 1 minute and 1 second after is 61 seconds and 1234 + # microseconds is 1234000 nanoseconds. + timestamp = Timestamp(seconds=61, nanos=1234000) + self.assertEqual(self._callFUT(timestamp), dt_stamp) + + +class Test__pb_timestamp_to_rfc3339(unittest.TestCase): + + def _callFUT(self, timestamp): + from google.cloud._helpers import _pb_timestamp_to_rfc3339 + return _pb_timestamp_to_rfc3339(timestamp) + + def test_it(self): + from google.protobuf.timestamp_pb2 import Timestamp + + # Epoch is midnight on January 1, 1970 ... + # ... so 1 minute and 1 second after is 61 seconds and 1234 + # microseconds is 1234000 nanoseconds. + timestamp = Timestamp(seconds=61, nanos=1234000) + self.assertEqual(self._callFUT(timestamp), + '1970-01-01T00:01:01.001234Z') + + +class Test__datetime_to_pb_timestamp(unittest.TestCase): + + def _callFUT(self, when): + from google.cloud._helpers import _datetime_to_pb_timestamp + return _datetime_to_pb_timestamp(when) + + def test_it(self): + import datetime + from google.protobuf.timestamp_pb2 import Timestamp + from google.cloud._helpers import UTC + + # Epoch is midnight on January 1, 1970 ... + dt_stamp = datetime.datetime(1970, month=1, day=1, hour=0, + minute=1, second=1, microsecond=1234, + tzinfo=UTC) + # ... so 1 minute and 1 second after is 61 seconds and 1234 + # microseconds is 1234000 nanoseconds. + timestamp = Timestamp(seconds=61, nanos=1234000) + self.assertEqual(self._callFUT(dt_stamp), timestamp) + + +class Test__name_from_project_path(unittest.TestCase): + + PROJECT = 'PROJECT' + THING_NAME = 'THING_NAME' + TEMPLATE = r'projects/(?P\w+)/things/(?P\w+)' + + def _callFUT(self, path, project, template): + from google.cloud._helpers import _name_from_project_path + return _name_from_project_path(path, project, template) + + def test_w_invalid_path_length(self): + PATH = 'projects/foo' + with self.assertRaises(ValueError): + self._callFUT(PATH, None, self.TEMPLATE) + + def test_w_invalid_path_segments(self): + PATH = 'foo/%s/bar/%s' % (self.PROJECT, self.THING_NAME) + with self.assertRaises(ValueError): + self._callFUT(PATH, self.PROJECT, self.TEMPLATE) + + def test_w_mismatched_project(self): + PROJECT1 = 'PROJECT1' + PROJECT2 = 'PROJECT2' + PATH = 'projects/%s/things/%s' % (PROJECT1, self.THING_NAME) + with self.assertRaises(ValueError): + self._callFUT(PATH, PROJECT2, self.TEMPLATE) + + def test_w_valid_data_w_compiled_regex(self): + import re + template = re.compile(self.TEMPLATE) + PATH = 'projects/%s/things/%s' % (self.PROJECT, self.THING_NAME) + name = self._callFUT(PATH, self.PROJECT, template) + self.assertEqual(name, self.THING_NAME) + + def test_w_project_passed_as_none(self): + PROJECT1 = 'PROJECT1' + PATH = 'projects/%s/things/%s' % (PROJECT1, self.THING_NAME) + self._callFUT(PATH, None, self.TEMPLATE) + name = self._callFUT(PATH, None, self.TEMPLATE) + self.assertEqual(name, self.THING_NAME) + + +class TestMetadataPlugin(unittest.TestCase): + + def _getTargetClass(self): + from google.cloud._helpers import MetadataPlugin + return MetadataPlugin + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_constructor(self): + credentials = object() + plugin = self._makeOne(credentials) + self.assertIs(plugin._credentials, credentials) + + def test___call__(self): + access_token_expected = 'FOOBARBAZ' + credentials = _Credentials(access_token=access_token_expected) + callback_args = [] + + def callback(*args): + callback_args.append(args) + + transformer = self._makeOne(credentials) + result = transformer(None, callback) + cb_headers = [ + ('authorization', 'Bearer ' + access_token_expected), + ] + self.assertIsNone(result) + self.assertEqual(callback_args, [(cb_headers, None)]) + self.assertEqual(len(credentials._tokens), 1) + + +class Test_make_secure_stub(unittest.TestCase): + + def _callFUT(self, *args, **kwargs): + from google.cloud._helpers import make_secure_stub + return make_secure_stub(*args, **kwargs) + + def test_it(self): + from six.moves import http_client + from google.cloud._testing import _Monkey + from google.cloud import _helpers as MUT + + mock_result = object() + stub_inputs = [] + + SSL_CREDS = object() + METADATA_CREDS = object() + COMPOSITE_CREDS = object() + CHANNEL = object() + + class _GRPCModule(object): + + def __init__(self): + self.ssl_channel_credentials_args = None + self.metadata_call_credentials_args = None + self.composite_channel_credentials_args = None + self.secure_channel_args = None + + def ssl_channel_credentials(self, *args): + self.ssl_channel_credentials_args = args + return SSL_CREDS + + def metadata_call_credentials(self, *args, **kwargs): + self.metadata_call_credentials_args = (args, kwargs) + return METADATA_CREDS + + def composite_channel_credentials(self, *args): + self.composite_channel_credentials_args = args + return COMPOSITE_CREDS + + def secure_channel(self, *args, **kwargs): + self.secure_channel_args = (args, kwargs) + return CHANNEL + + grpc_mod = _GRPCModule() + + def mock_stub_class(channel): + stub_inputs.append(channel) + return mock_result + + metadata_plugin = object() + plugin_args = [] + + def mock_plugin(*args): + plugin_args.append(args) + return metadata_plugin + + host = 'HOST' + credentials = object() + user_agent = 'USER_AGENT' + with _Monkey(MUT, grpc=grpc_mod, + MetadataPlugin=mock_plugin): + result = self._callFUT(credentials, user_agent, + mock_stub_class, host) + + self.assertIs(result, mock_result) + self.assertEqual(stub_inputs, [CHANNEL]) + self.assertEqual(plugin_args, [(credentials,)]) + self.assertEqual(grpc_mod.ssl_channel_credentials_args, ()) + self.assertEqual(grpc_mod.metadata_call_credentials_args, + ((metadata_plugin,), {'name': 'google_creds'})) + self.assertEqual( + grpc_mod.composite_channel_credentials_args, + (SSL_CREDS, METADATA_CREDS)) + target = '%s:%d' % (host, http_client.HTTPS_PORT) + secure_args = (target, COMPOSITE_CREDS) + secure_kwargs = { + 'options': (('grpc.primary_user_agent', user_agent),) + } + self.assertEqual(grpc_mod.secure_channel_args, + (secure_args, secure_kwargs)) + + +class Test_make_insecure_stub(unittest.TestCase): + + def _callFUT(self, *args, **kwargs): + from google.cloud._helpers import make_insecure_stub + return make_insecure_stub(*args, **kwargs) + + def _helper(self, target, host, port=None): + from google.cloud._testing import _Monkey + from google.cloud import _helpers as MUT + + mock_result = object() + stub_inputs = [] + CHANNEL = object() + + class _GRPCModule(object): + + def insecure_channel(self, *args): + self.insecure_channel_args = args + return CHANNEL + + grpc_mod = _GRPCModule() + + def mock_stub_class(channel): + stub_inputs.append(channel) + return mock_result + + with _Monkey(MUT, grpc=grpc_mod): + result = self._callFUT(mock_stub_class, host, port=port) + + self.assertIs(result, mock_result) + self.assertEqual(stub_inputs, [CHANNEL]) + self.assertEqual(grpc_mod.insecure_channel_args, (target,)) + + def test_with_port_argument(self): + host = 'HOST' + port = 1025 + target = '%s:%d' % (host, port) + self._helper(target, host, port=port) + + def test_without_port_argument(self): + host = 'HOST:1114' + self._helper(host, host) + + +class _AppIdentity(object): + + def __init__(self, app_id): + self.app_id = app_id + + def get_application_id(self): + return self.app_id + + +class _HTTPResponse(object): + + def __init__(self, status, data): + self.status = status + self.data = data + + def read(self): + return self.data + + +class _BaseHTTPConnection(object): + + host = timeout = None + + def __init__(self): + self._close_count = 0 + self._called_args = [] + self._called_kwargs = [] + + def request(self, method, uri, **kwargs): + self._called_args.append((method, uri)) + self._called_kwargs.append(kwargs) + + def close(self): + self._close_count += 1 + + +class _HTTPConnection(_BaseHTTPConnection): + + def __init__(self, status, project): + super(_HTTPConnection, self).__init__() + self.status = status + self.project = project + + def getresponse(self): + return _HTTPResponse(self.status, self.project) + + +class _TimeoutHTTPConnection(_BaseHTTPConnection): + + def getresponse(self): + import socket + raise socket.timeout('timed out') + + +class _Credentials(object): + + def __init__(self, access_token=None): + self._access_token = access_token + self._tokens = [] + + def get_access_token(self): + from oauth2client.client import AccessTokenInfo + token = AccessTokenInfo(access_token=self._access_token, + expires_in=None) + self._tokens.append(token) + return token diff --git a/packages/google-cloud-core/unit_tests/test_client.py b/packages/google-cloud-core/unit_tests/test_client.py new file mode 100644 index 000000000000..c7e4c6536e1b --- /dev/null +++ b/packages/google-cloud-core/unit_tests/test_client.py @@ -0,0 +1,228 @@ +# Copyright 2015 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class Test_ClientFactoryMixin(unittest.TestCase): + + def _getTargetClass(self): + from google.cloud.client import _ClientFactoryMixin + return _ClientFactoryMixin + + def test_virtual(self): + klass = self._getTargetClass() + self.assertFalse('__init__' in klass.__dict__) + + +class TestClient(unittest.TestCase): + + def setUp(self): + KLASS = self._getTargetClass() + self.original_cnxn_class = KLASS._connection_class + KLASS._connection_class = _MockConnection + + def tearDown(self): + KLASS = self._getTargetClass() + KLASS._connection_class = self.original_cnxn_class + + def _getTargetClass(self): + from google.cloud.client import Client + return Client + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_defaults(self): + from google.cloud._testing import _Monkey + from google.cloud import client + + CREDENTIALS = object() + FUNC_CALLS = [] + + def mock_get_credentials(): + FUNC_CALLS.append('get_credentials') + return CREDENTIALS + + with _Monkey(client, get_credentials=mock_get_credentials): + client_obj = self._makeOne() + + self.assertIsInstance(client_obj.connection, _MockConnection) + self.assertIs(client_obj.connection.credentials, CREDENTIALS) + self.assertEqual(FUNC_CALLS, ['get_credentials']) + + def test_ctor_explicit(self): + CREDENTIALS = object() + HTTP = object() + client_obj = self._makeOne(credentials=CREDENTIALS, http=HTTP) + + self.assertIsInstance(client_obj.connection, _MockConnection) + self.assertIs(client_obj.connection.credentials, CREDENTIALS) + self.assertIs(client_obj.connection.http, HTTP) + + def test_from_service_account_json(self): + from google.cloud._testing import _Monkey + from google.cloud import client + + KLASS = self._getTargetClass() + MOCK_FILENAME = 'foo.path' + mock_creds = _MockServiceAccountCredentials() + with _Monkey(client, ServiceAccountCredentials=mock_creds): + client_obj = KLASS.from_service_account_json(MOCK_FILENAME) + + self.assertIs(client_obj.connection.credentials, mock_creds._result) + self.assertEqual(mock_creds.json_called, [MOCK_FILENAME]) + + def test_from_service_account_json_fail(self): + KLASS = self._getTargetClass() + CREDENTIALS = object() + self.assertRaises(TypeError, KLASS.from_service_account_json, None, + credentials=CREDENTIALS) + + def test_from_service_account_p12(self): + from google.cloud._testing import _Monkey + from google.cloud import client + + KLASS = self._getTargetClass() + CLIENT_EMAIL = 'phred@example.com' + MOCK_FILENAME = 'foo.path' + mock_creds = _MockServiceAccountCredentials() + with _Monkey(client, ServiceAccountCredentials=mock_creds): + client_obj = KLASS.from_service_account_p12(CLIENT_EMAIL, + MOCK_FILENAME) + + self.assertIs(client_obj.connection.credentials, mock_creds._result) + self.assertEqual(mock_creds.p12_called, + [(CLIENT_EMAIL, MOCK_FILENAME)]) + + def test_from_service_account_p12_fail(self): + KLASS = self._getTargetClass() + CREDENTIALS = object() + self.assertRaises(TypeError, KLASS.from_service_account_p12, None, + None, credentials=CREDENTIALS) + + +class TestJSONClient(unittest.TestCase): + + def setUp(self): + KLASS = self._getTargetClass() + self.original_cnxn_class = KLASS._connection_class + KLASS._connection_class = _MockConnection + + def tearDown(self): + KLASS = self._getTargetClass() + KLASS._connection_class = self.original_cnxn_class + + def _getTargetClass(self): + from google.cloud.client import JSONClient + return JSONClient + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_defaults(self): + from google.cloud._testing import _Monkey + from google.cloud import client + + PROJECT = 'PROJECT' + CREDENTIALS = object() + FUNC_CALLS = [] + + def mock_determine_proj(project): + FUNC_CALLS.append((project, '_determine_default_project')) + return PROJECT + + def mock_get_credentials(): + FUNC_CALLS.append('get_credentials') + return CREDENTIALS + + with _Monkey(client, get_credentials=mock_get_credentials, + _determine_default_project=mock_determine_proj): + client_obj = self._makeOne() + + self.assertEqual(client_obj.project, PROJECT) + self.assertIsInstance(client_obj.connection, _MockConnection) + self.assertIs(client_obj.connection.credentials, CREDENTIALS) + self.assertEqual( + FUNC_CALLS, + [(None, '_determine_default_project'), 'get_credentials']) + + def test_ctor_missing_project(self): + from google.cloud._testing import _Monkey + from google.cloud import client + + FUNC_CALLS = [] + + def mock_determine_proj(project): + FUNC_CALLS.append((project, '_determine_default_project')) + return None + + with _Monkey(client, _determine_default_project=mock_determine_proj): + self.assertRaises(EnvironmentError, self._makeOne) + + self.assertEqual(FUNC_CALLS, [(None, '_determine_default_project')]) + + def test_ctor_w_invalid_project(self): + CREDENTIALS = object() + HTTP = object() + with self.assertRaises(ValueError): + self._makeOne(project=object(), credentials=CREDENTIALS, http=HTTP) + + def _explicit_ctor_helper(self, project): + import six + + CREDENTIALS = object() + HTTP = object() + + client_obj = self._makeOne(project=project, credentials=CREDENTIALS, + http=HTTP) + + if isinstance(project, six.binary_type): + self.assertEqual(client_obj.project, project.decode('utf-8')) + else: + self.assertEqual(client_obj.project, project) + self.assertIsInstance(client_obj.connection, _MockConnection) + self.assertIs(client_obj.connection.credentials, CREDENTIALS) + self.assertIs(client_obj.connection.http, HTTP) + + def test_ctor_explicit_bytes(self): + PROJECT = b'PROJECT' + self._explicit_ctor_helper(PROJECT) + + def test_ctor_explicit_unicode(self): + PROJECT = u'PROJECT' + self._explicit_ctor_helper(PROJECT) + + +class _MockConnection(object): + + def __init__(self, credentials=None, http=None): + self.credentials = credentials + self.http = http + + +class _MockServiceAccountCredentials(object): + + def __init__(self): + self.p12_called = [] + self.json_called = [] + self._result = object() + + def from_p12_keyfile(self, email, path): + self.p12_called.append((email, path)) + return self._result + + def from_json_keyfile_name(self, path): + self.json_called.append(path) + return self._result diff --git a/packages/google-cloud-core/unit_tests/test_connection.py b/packages/google-cloud-core/unit_tests/test_connection.py new file mode 100644 index 000000000000..f63101f756fa --- /dev/null +++ b/packages/google-cloud-core/unit_tests/test_connection.py @@ -0,0 +1,390 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class TestConnection(unittest.TestCase): + + def _getTargetClass(self): + from google.cloud.connection import Connection + return Connection + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_defaults(self): + conn = self._makeOne() + self.assertIsNone(conn.credentials) + + def test_ctor_explicit(self): + credentials = _Credentials() + self.assertEqual(credentials._create_scoped_calls, 0) + conn = self._makeOne(credentials) + self.assertEqual(credentials._create_scoped_calls, 1) + self.assertIs(conn.credentials, credentials) + self.assertIsNone(conn._http) + + def test_ctor_explicit_http(self): + http = object() + conn = self._makeOne(http=http) + self.assertIsNone(conn.credentials) + self.assertIs(conn.http, http) + + def test_ctor_credentials_wo_create_scoped(self): + credentials = object() + conn = self._makeOne(credentials) + self.assertIs(conn.credentials, credentials) + self.assertIsNone(conn._http) + + def test_http_w_existing(self): + conn = self._makeOne() + conn._http = http = object() + self.assertIs(conn.http, http) + + def test_http_wo_creds(self): + import httplib2 + conn = self._makeOne() + self.assertIsInstance(conn.http, httplib2.Http) + + def test_http_w_creds(self): + import httplib2 + + authorized = object() + credentials = _Credentials(authorized) + conn = self._makeOne(credentials) + self.assertIs(conn.http, authorized) + self.assertIsInstance(credentials._called_with, httplib2.Http) + + def test_user_agent_format(self): + from pkg_resources import get_distribution + expected_ua = 'gcloud-python/{0}'.format( + get_distribution('google-cloud-core').version) + conn = self._makeOne() + self.assertEqual(conn.USER_AGENT, expected_ua) + + +class TestJSONConnection(unittest.TestCase): + + def _getTargetClass(self): + from google.cloud.connection import JSONConnection + return JSONConnection + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def _makeMockOne(self, *args, **kw): + class MockConnection(self._getTargetClass()): + API_URL_TEMPLATE = '{api_base_url}/mock/{api_version}{path}' + API_BASE_URL = 'http://mock' + API_VERSION = 'vMOCK' + return MockConnection(*args, **kw) + + def test_class_defaults(self): + klass = self._getTargetClass() + self.assertIsNone(klass.API_URL_TEMPLATE) + self.assertIsNone(klass.API_BASE_URL) + self.assertIsNone(klass.API_VERSION) + + def test_ctor_defaults(self): + conn = self._makeOne() + self.assertIsNone(conn.credentials) + + def test_ctor_explicit(self): + credentials = _Credentials() + conn = self._makeOne(credentials) + self.assertIs(conn.credentials, credentials) + + def test_http_w_existing(self): + conn = self._makeOne() + conn._http = http = object() + self.assertIs(conn.http, http) + + def test_http_wo_creds(self): + import httplib2 + conn = self._makeOne() + self.assertIsInstance(conn.http, httplib2.Http) + + def test_http_w_creds(self): + import httplib2 + + authorized = object() + credentials = _Credentials(authorized) + conn = self._makeOne(credentials) + self.assertIs(conn.http, authorized) + self.assertIsInstance(credentials._called_with, httplib2.Http) + + def test_build_api_url_no_extra_query_params(self): + conn = self._makeMockOne() + # Intended to emulate self.mock_template + URI = '/'.join([ + conn.API_BASE_URL, + 'mock', + conn.API_VERSION, + 'foo', + ]) + self.assertEqual(conn.build_api_url('/foo'), URI) + + def test_build_api_url_w_extra_query_params(self): + from six.moves.urllib.parse import parse_qsl + from six.moves.urllib.parse import urlsplit + conn = self._makeMockOne() + uri = conn.build_api_url('/foo', {'bar': 'baz'}) + + scheme, netloc, path, qs, _ = urlsplit(uri) + self.assertEqual('%s://%s' % (scheme, netloc), conn.API_BASE_URL) + # Intended to emulate mock_template + PATH = '/'.join([ + '', + 'mock', + conn.API_VERSION, + 'foo', + ]) + self.assertEqual(path, PATH) + parms = dict(parse_qsl(qs)) + self.assertEqual(parms['bar'], 'baz') + + def test__make_request_no_data_no_content_type_no_headers(self): + conn = self._makeOne() + URI = 'http://example.com/test' + http = conn._http = _Http( + {'status': '200', 'content-type': 'text/plain'}, + b'', + ) + headers, content = conn._make_request('GET', URI) + self.assertEqual(headers['status'], '200') + self.assertEqual(headers['content-type'], 'text/plain') + self.assertEqual(content, b'') + self.assertEqual(http._called_with['method'], 'GET') + self.assertEqual(http._called_with['uri'], URI) + self.assertIsNone(http._called_with['body']) + expected_headers = { + 'Accept-Encoding': 'gzip', + 'Content-Length': '0', + 'User-Agent': conn.USER_AGENT, + } + self.assertEqual(http._called_with['headers'], expected_headers) + + def test__make_request_w_data_no_extra_headers(self): + conn = self._makeOne() + URI = 'http://example.com/test' + http = conn._http = _Http( + {'status': '200', 'content-type': 'text/plain'}, + b'', + ) + conn._make_request('GET', URI, {}, 'application/json') + self.assertEqual(http._called_with['method'], 'GET') + self.assertEqual(http._called_with['uri'], URI) + self.assertEqual(http._called_with['body'], {}) + expected_headers = { + 'Accept-Encoding': 'gzip', + 'Content-Length': '0', + 'Content-Type': 'application/json', + 'User-Agent': conn.USER_AGENT, + } + self.assertEqual(http._called_with['headers'], expected_headers) + + def test__make_request_w_extra_headers(self): + conn = self._makeOne() + URI = 'http://example.com/test' + http = conn._http = _Http( + {'status': '200', 'content-type': 'text/plain'}, + b'', + ) + conn._make_request('GET', URI, headers={'X-Foo': 'foo'}) + self.assertEqual(http._called_with['method'], 'GET') + self.assertEqual(http._called_with['uri'], URI) + self.assertIsNone(http._called_with['body']) + expected_headers = { + 'Accept-Encoding': 'gzip', + 'Content-Length': '0', + 'X-Foo': 'foo', + 'User-Agent': conn.USER_AGENT, + } + self.assertEqual(http._called_with['headers'], expected_headers) + + def test_api_request_defaults(self): + PATH = '/path/required' + conn = self._makeMockOne() + # Intended to emulate self.mock_template + URI = '/'.join([ + conn.API_BASE_URL, + 'mock', + '%s%s' % (conn.API_VERSION, PATH), + ]) + http = conn._http = _Http( + {'status': '200', 'content-type': 'application/json'}, + b'{}', + ) + self.assertEqual(conn.api_request('GET', PATH), {}) + self.assertEqual(http._called_with['method'], 'GET') + self.assertEqual(http._called_with['uri'], URI) + self.assertIsNone(http._called_with['body']) + expected_headers = { + 'Accept-Encoding': 'gzip', + 'Content-Length': '0', + 'User-Agent': conn.USER_AGENT, + } + self.assertEqual(http._called_with['headers'], expected_headers) + + def test_api_request_w_non_json_response(self): + conn = self._makeMockOne() + conn._http = _Http( + {'status': '200', 'content-type': 'text/plain'}, + b'CONTENT', + ) + + self.assertRaises(TypeError, conn.api_request, 'GET', '/') + + def test_api_request_wo_json_expected(self): + conn = self._makeMockOne() + conn._http = _Http( + {'status': '200', 'content-type': 'text/plain'}, + b'CONTENT', + ) + self.assertEqual(conn.api_request('GET', '/', expect_json=False), + b'CONTENT') + + def test_api_request_w_query_params(self): + from six.moves.urllib.parse import parse_qsl + from six.moves.urllib.parse import urlsplit + conn = self._makeMockOne() + http = conn._http = _Http( + {'status': '200', 'content-type': 'application/json'}, + b'{}', + ) + self.assertEqual(conn.api_request('GET', '/', {'foo': 'bar'}), {}) + self.assertEqual(http._called_with['method'], 'GET') + uri = http._called_with['uri'] + scheme, netloc, path, qs, _ = urlsplit(uri) + self.assertEqual('%s://%s' % (scheme, netloc), conn.API_BASE_URL) + # Intended to emulate self.mock_template + PATH = '/'.join([ + '', + 'mock', + conn.API_VERSION, + '', + ]) + self.assertEqual(path, PATH) + parms = dict(parse_qsl(qs)) + self.assertEqual(parms['foo'], 'bar') + self.assertIsNone(http._called_with['body']) + expected_headers = { + 'Accept-Encoding': 'gzip', + 'Content-Length': '0', + 'User-Agent': conn.USER_AGENT, + } + self.assertEqual(http._called_with['headers'], expected_headers) + + def test_api_request_w_data(self): + import json + DATA = {'foo': 'bar'} + DATAJ = json.dumps(DATA) + conn = self._makeMockOne() + # Intended to emulate self.mock_template + URI = '/'.join([ + conn.API_BASE_URL, + 'mock', + conn.API_VERSION, + '', + ]) + http = conn._http = _Http( + {'status': '200', 'content-type': 'application/json'}, + b'{}', + ) + self.assertEqual(conn.api_request('POST', '/', data=DATA), {}) + self.assertEqual(http._called_with['method'], 'POST') + self.assertEqual(http._called_with['uri'], URI) + self.assertEqual(http._called_with['body'], DATAJ) + expected_headers = { + 'Accept-Encoding': 'gzip', + 'Content-Length': str(len(DATAJ)), + 'Content-Type': 'application/json', + 'User-Agent': conn.USER_AGENT, + } + self.assertEqual(http._called_with['headers'], expected_headers) + + def test_api_request_w_404(self): + from google.cloud.exceptions import NotFound + conn = self._makeMockOne() + conn._http = _Http( + {'status': '404', 'content-type': 'text/plain'}, + b'{}' + ) + self.assertRaises(NotFound, conn.api_request, 'GET', '/') + + def test_api_request_w_500(self): + from google.cloud.exceptions import InternalServerError + conn = self._makeMockOne() + conn._http = _Http( + {'status': '500', 'content-type': 'text/plain'}, + b'{}', + ) + self.assertRaises(InternalServerError, conn.api_request, 'GET', '/') + + def test_api_request_non_binary_response(self): + conn = self._makeMockOne() + http = conn._http = _Http( + {'status': '200', 'content-type': 'application/json'}, + u'{}', + ) + result = conn.api_request('GET', '/') + # Intended to emulate self.mock_template + URI = '/'.join([ + conn.API_BASE_URL, + 'mock', + conn.API_VERSION, + '', + ]) + self.assertEqual(result, {}) + self.assertEqual(http._called_with['method'], 'GET') + self.assertEqual(http._called_with['uri'], URI) + self.assertIsNone(http._called_with['body']) + expected_headers = { + 'Accept-Encoding': 'gzip', + 'Content-Length': '0', + 'User-Agent': conn.USER_AGENT, + } + self.assertEqual(http._called_with['headers'], expected_headers) + + +class _Http(object): + + _called_with = None + + def __init__(self, headers, content): + from httplib2 import Response + self._response = Response(headers) + self._content = content + + def request(self, **kw): + self._called_with = kw + return self._response, self._content + + +class _Credentials(object): + + _scopes = None + + def __init__(self, authorized=None): + self._authorized = authorized + self._create_scoped_calls = 0 + + def authorize(self, http): + self._called_with = http + return self._authorized + + def create_scoped_required(self): + self._create_scoped_calls += 1 + return False diff --git a/packages/google-cloud-core/unit_tests/test_credentials.py b/packages/google-cloud-core/unit_tests/test_credentials.py new file mode 100644 index 000000000000..9fc10dcca4e8 --- /dev/null +++ b/packages/google-cloud-core/unit_tests/test_credentials.py @@ -0,0 +1,256 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class Test_get_credentials(unittest.TestCase): + + def _callFUT(self): + from google.cloud import credentials + return credentials.get_credentials() + + def test_it(self): + from google.cloud._testing import _Monkey + from google.cloud import credentials as MUT + + client = _Client() + with _Monkey(MUT, client=client): + found = self._callFUT() + self.assertIsInstance(found, _Credentials) + self.assertIs(found, client._signed) + self.assertTrue(client._get_app_default_called) + + +class Test_generate_signed_url(unittest.TestCase): + + def _callFUT(self, *args, **kwargs): + from google.cloud.credentials import generate_signed_url + return generate_signed_url(*args, **kwargs) + + def _generate_helper(self, response_type=None, response_disposition=None, + generation=None): + import base64 + from six.moves.urllib.parse import parse_qs + from six.moves.urllib.parse import urlsplit + from google.cloud._testing import _Monkey + from google.cloud import credentials as MUT + + ENDPOINT = 'http://api.example.com' + RESOURCE = '/name/path' + SIGNED = base64.b64encode(b'DEADBEEF') + CREDENTIALS = _Credentials() + + def _get_signed_query_params(*args): + credentials, expiration = args[:2] + return { + 'GoogleAccessId': credentials.service_account_email, + 'Expires': str(expiration), + 'Signature': SIGNED, + } + + with _Monkey(MUT, _get_signed_query_params=_get_signed_query_params): + url = self._callFUT(CREDENTIALS, RESOURCE, 1000, + api_access_endpoint=ENDPOINT, + response_type=response_type, + response_disposition=response_disposition, + generation=generation) + + scheme, netloc, path, qs, frag = urlsplit(url) + self.assertEqual(scheme, 'http') + self.assertEqual(netloc, 'api.example.com') + self.assertEqual(path, RESOURCE) + params = parse_qs(qs) + # In Py3k, parse_qs gives us text values: + self.assertEqual(params.pop('Signature'), [SIGNED.decode('ascii')]) + self.assertEqual(params.pop('Expires'), ['1000']) + self.assertEqual(params.pop('GoogleAccessId'), + [CREDENTIALS.service_account_email]) + if response_type is not None: + self.assertEqual(params.pop('response-content-type'), + [response_type]) + if response_disposition is not None: + self.assertEqual(params.pop('response-content-disposition'), + [response_disposition]) + if generation is not None: + self.assertEqual(params.pop('generation'), [generation]) + # Make sure we have checked them all. + self.assertEqual(len(params), 0) + self.assertEqual(frag, '') + + def test_w_expiration_int(self): + self._generate_helper() + + def test_w_custom_fields(self): + response_type = 'text/plain' + response_disposition = 'attachment; filename=blob.png' + generation = '123' + self._generate_helper(response_type=response_type, + response_disposition=response_disposition, + generation=generation) + + +class Test_generate_signed_url_exception(unittest.TestCase): + def test_with_google_credentials(self): + import time + from google.cloud.credentials import generate_signed_url + RESOURCE = '/name/path' + + credentials = _GoogleCredentials() + expiration = int(time.time() + 5) + self.assertRaises(AttributeError, generate_signed_url, credentials, + resource=RESOURCE, expiration=expiration) + + +class Test__get_signed_query_params(unittest.TestCase): + + def _callFUT(self, credentials, expiration, string_to_sign): + from google.cloud.credentials import _get_signed_query_params + return _get_signed_query_params(credentials, expiration, + string_to_sign) + + def test_it(self): + import base64 + + SIG_BYTES = b'DEADBEEF' + ACCOUNT_NAME = object() + CREDENTIALS = _Credentials(sign_result=SIG_BYTES, + service_account_email=ACCOUNT_NAME) + EXPIRATION = 100 + STRING_TO_SIGN = 'dummy_signature' + result = self._callFUT(CREDENTIALS, EXPIRATION, + STRING_TO_SIGN) + + self.assertEqual(result, { + 'GoogleAccessId': ACCOUNT_NAME, + 'Expires': str(EXPIRATION), + 'Signature': base64.b64encode(b'DEADBEEF'), + }) + self.assertEqual(CREDENTIALS._signed, [STRING_TO_SIGN]) + + +class Test__get_expiration_seconds(unittest.TestCase): + + def _callFUT(self, expiration): + from google.cloud.credentials import _get_expiration_seconds + return _get_expiration_seconds(expiration) + + def _utc_seconds(self, when): + import calendar + return int(calendar.timegm(when.timetuple())) + + def test_w_invalid(self): + self.assertRaises(TypeError, self._callFUT, object()) + self.assertRaises(TypeError, self._callFUT, None) + + def test_w_int(self): + self.assertEqual(self._callFUT(123), 123) + + def test_w_long(self): + try: + long + except NameError: # pragma: NO COVER Py3K + pass + else: + self.assertEqual(self._callFUT(long(123)), 123) + + def test_w_naive_datetime(self): + import datetime + + expiration_no_tz = datetime.datetime(2004, 8, 19, 0, 0, 0, 0) + utc_seconds = self._utc_seconds(expiration_no_tz) + self.assertEqual(self._callFUT(expiration_no_tz), utc_seconds) + + def test_w_utc_datetime(self): + import datetime + from google.cloud._helpers import UTC + + expiration_utc = datetime.datetime(2004, 8, 19, 0, 0, 0, 0, UTC) + utc_seconds = self._utc_seconds(expiration_utc) + self.assertEqual(self._callFUT(expiration_utc), utc_seconds) + + def test_w_other_zone_datetime(self): + import datetime + from google.cloud._helpers import _UTC + + class CET(_UTC): + _tzname = 'CET' + _utcoffset = datetime.timedelta(hours=1) + + zone = CET() + expiration_other = datetime.datetime(2004, 8, 19, 0, 0, 0, 0, zone) + utc_seconds = self._utc_seconds(expiration_other) + cet_seconds = utc_seconds - (60 * 60) # CET one hour earlier than UTC + self.assertEqual(self._callFUT(expiration_other), cet_seconds) + + def test_w_timedelta_seconds(self): + import datetime + from google.cloud._testing import _Monkey + from google.cloud import credentials as MUT + + dummy_utcnow = datetime.datetime(2004, 8, 19, 0, 0, 0, 0) + utc_seconds = self._utc_seconds(dummy_utcnow) + expiration_as_delta = datetime.timedelta(seconds=10) + + with _Monkey(MUT, _NOW=lambda: dummy_utcnow): + result = self._callFUT(expiration_as_delta) + + self.assertEqual(result, utc_seconds + 10) + + def test_w_timedelta_days(self): + import datetime + from google.cloud._testing import _Monkey + from google.cloud import credentials as MUT + + dummy_utcnow = datetime.datetime(2004, 8, 19, 0, 0, 0, 0) + utc_seconds = self._utc_seconds(dummy_utcnow) + expiration_as_delta = datetime.timedelta(days=1) + + with _Monkey(MUT, _NOW=lambda: dummy_utcnow): + result = self._callFUT(expiration_as_delta) + + self.assertEqual(result, utc_seconds + 86400) + + +class _Credentials(object): + + def __init__(self, service_account_email='testing@example.com', + sign_result=''): + self.service_account_email = service_account_email + self._sign_result = sign_result + self._signed = [] + + def sign_blob(self, bytes_to_sign): + self._signed.append(bytes_to_sign) + return None, self._sign_result + + +class _GoogleCredentials(object): + + def __init__(self, service_account_email='testing@example.com'): + self.service_account_email = service_account_email + + +class _Client(object): + + def __init__(self): + self._signed = _Credentials() + + class GoogleCredentials(object): + @staticmethod + def get_application_default(): + self._get_app_default_called = True + return self._signed + + self.GoogleCredentials = GoogleCredentials diff --git a/packages/google-cloud-core/unit_tests/test_exceptions.py b/packages/google-cloud-core/unit_tests/test_exceptions.py new file mode 100644 index 000000000000..56d8581be036 --- /dev/null +++ b/packages/google-cloud-core/unit_tests/test_exceptions.py @@ -0,0 +1,131 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class Test_GoogleCloudError(unittest.TestCase): + + def _getTargetClass(self): + from google.cloud.exceptions import GoogleCloudError + return GoogleCloudError + + def _makeOne(self, message, errors=()): + return self._getTargetClass()(message, errors=errors) + + def test_ctor_defaults(self): + e = self._makeOne('Testing') + e.code = 600 + self.assertEqual(str(e), '600 Testing') + self.assertEqual(e.message, 'Testing') + self.assertEqual(list(e.errors), []) + + def test_ctor_explicit(self): + ERROR = { + 'domain': 'global', + 'location': 'test', + 'locationType': 'testing', + 'message': 'Testing', + 'reason': 'test', + } + e = self._makeOne('Testing', [ERROR]) + e.code = 600 + self.assertEqual(str(e), '600 Testing') + self.assertEqual(e.message, 'Testing') + self.assertEqual(list(e.errors), [ERROR]) + + +class Test_make_exception(unittest.TestCase): + + def _callFUT(self, response, content, error_info=None, use_json=True): + from google.cloud.exceptions import make_exception + return make_exception(response, content, error_info=error_info, + use_json=use_json) + + def test_hit_w_content_as_str(self): + from google.cloud.exceptions import NotFound + response = _Response(404) + content = b'{"error": {"message": "Not Found"}}' + exception = self._callFUT(response, content) + self.assertIsInstance(exception, NotFound) + self.assertEqual(exception.message, 'Not Found') + self.assertEqual(list(exception.errors), []) + + def test_hit_w_content_as_unicode(self): + import six + from google.cloud._helpers import _to_bytes + from google.cloud.exceptions import NotFound + error_message = u'That\u2019s not found.' + expected = u'404 %s' % (error_message,) + + response = _Response(404) + content = u'{"error": {"message": "%s" }}' % (error_message,) + + exception = self._callFUT(response, content) + if six.PY2: + self.assertEqual(str(exception), + _to_bytes(expected, encoding='utf-8')) + else: # pragma: NO COVER + self.assertEqual(str(exception), expected) + + self.assertIsInstance(exception, NotFound) + self.assertEqual(exception.message, error_message) + self.assertEqual(list(exception.errors), []) + + def test_hit_w_content_as_unicode_as_py3(self): + import six + from google.cloud._testing import _Monkey + from google.cloud.exceptions import NotFound + error_message = u'That is not found.' + expected = u'404 %s' % (error_message,) + + with _Monkey(six, PY2=False): + response = _Response(404) + content = u'{"error": {"message": "%s" }}' % (error_message,) + exception = self._callFUT(response, content) + + self.assertIsInstance(exception, NotFound) + self.assertEqual(exception.message, error_message) + self.assertEqual(list(exception.errors), []) + self.assertEqual(str(exception), expected) + + def test_miss_w_content_as_dict(self): + from google.cloud.exceptions import GoogleCloudError + ERROR = { + 'domain': 'global', + 'location': 'test', + 'locationType': 'testing', + 'message': 'Testing', + 'reason': 'test', + } + response = _Response(600) + content = {"error": {"message": "Unknown Error", "errors": [ERROR]}} + exception = self._callFUT(response, content) + self.assertIsInstance(exception, GoogleCloudError) + self.assertEqual(exception.message, 'Unknown Error') + self.assertEqual(list(exception.errors), [ERROR]) + + def test_html_when_json_expected(self): + from google.cloud.exceptions import NotFound + response = _Response(NotFound.code) + content = '404 Not Found' + exception = self._callFUT(response, content, use_json=True) + self.assertIsInstance(exception, NotFound) + self.assertEqual(exception.message, content) + self.assertEqual(list(exception.errors), []) + + +class _Response(object): + def __init__(self, status): + self.status = status diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py new file mode 100644 index 000000000000..ec823d9ccb22 --- /dev/null +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -0,0 +1,278 @@ +# Copyright 2015 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class TestIterator(unittest.TestCase): + + def _getTargetClass(self): + from google.cloud.iterator import Iterator + return Iterator + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor(self): + connection = _Connection() + client = _Client(connection) + PATH = '/foo' + iterator = self._makeOne(client, PATH) + self.assertIs(iterator.client, client) + self.assertEqual(iterator.path, PATH) + self.assertEqual(iterator.page_number, 0) + self.assertIsNone(iterator.next_page_token) + + def test___iter__(self): + PATH = '/foo' + KEY1 = 'key1' + KEY2 = 'key2' + ITEM1, ITEM2 = object(), object() + ITEMS = {KEY1: ITEM1, KEY2: ITEM2} + + def _get_items(response): + for item in response.get('items', []): + yield ITEMS[item['name']] + connection = _Connection({'items': [{'name': KEY1}, {'name': KEY2}]}) + client = _Client(connection) + iterator = self._makeOne(client, PATH) + iterator.get_items_from_response = _get_items + self.assertEqual(list(iterator), [ITEM1, ITEM2]) + kw, = connection._requested + self.assertEqual(kw['method'], 'GET') + self.assertEqual(kw['path'], PATH) + self.assertEqual(kw['query_params'], {}) + + def test_has_next_page_new(self): + connection = _Connection() + client = _Client(connection) + PATH = '/foo' + iterator = self._makeOne(client, PATH) + self.assertTrue(iterator.has_next_page()) + + def test_has_next_page_w_number_no_token(self): + connection = _Connection() + client = _Client(connection) + PATH = '/foo' + iterator = self._makeOne(client, PATH) + iterator.page_number = 1 + self.assertFalse(iterator.has_next_page()) + + def test_has_next_page_w_number_w_token(self): + connection = _Connection() + client = _Client(connection) + PATH = '/foo' + TOKEN = 'token' + iterator = self._makeOne(client, PATH) + iterator.page_number = 1 + iterator.next_page_token = TOKEN + self.assertTrue(iterator.has_next_page()) + + def test_get_query_params_no_token(self): + connection = _Connection() + client = _Client(connection) + PATH = '/foo' + iterator = self._makeOne(client, PATH) + self.assertEqual(iterator.get_query_params(), {}) + + def test_get_query_params_w_token(self): + connection = _Connection() + client = _Client(connection) + PATH = '/foo' + TOKEN = 'token' + iterator = self._makeOne(client, PATH) + iterator.next_page_token = TOKEN + self.assertEqual(iterator.get_query_params(), + {'pageToken': TOKEN}) + + def test_get_query_params_extra_params(self): + connection = _Connection() + client = _Client(connection) + PATH = '/foo' + extra_params = {'key': 'val'} + iterator = self._makeOne(client, PATH, extra_params=extra_params) + self.assertEqual(iterator.get_query_params(), extra_params) + + def test_get_query_params_w_token_and_extra_params(self): + connection = _Connection() + client = _Client(connection) + PATH = '/foo' + TOKEN = 'token' + extra_params = {'key': 'val'} + iterator = self._makeOne(client, PATH, extra_params=extra_params) + iterator.next_page_token = TOKEN + + expected_query = extra_params.copy() + expected_query.update({'pageToken': TOKEN}) + self.assertEqual(iterator.get_query_params(), expected_query) + + def test_get_query_params_w_token_collision(self): + connection = _Connection() + client = _Client(connection) + PATH = '/foo' + extra_params = {'pageToken': 'val'} + self.assertRaises(ValueError, self._makeOne, client, PATH, + extra_params=extra_params) + + def test_get_next_page_response_new_no_token_in_response(self): + PATH = '/foo' + TOKEN = 'token' + KEY1 = 'key1' + KEY2 = 'key2' + connection = _Connection({'items': [{'name': KEY1}, {'name': KEY2}], + 'nextPageToken': TOKEN}) + client = _Client(connection) + iterator = self._makeOne(client, PATH) + response = iterator.get_next_page_response() + self.assertEqual(response['items'], [{'name': KEY1}, {'name': KEY2}]) + self.assertEqual(iterator.page_number, 1) + self.assertEqual(iterator.next_page_token, TOKEN) + kw, = connection._requested + self.assertEqual(kw['method'], 'GET') + self.assertEqual(kw['path'], PATH) + self.assertEqual(kw['query_params'], {}) + + def test_get_next_page_response_no_token(self): + connection = _Connection() + client = _Client(connection) + PATH = '/foo' + iterator = self._makeOne(client, PATH) + iterator.page_number = 1 + self.assertRaises(RuntimeError, iterator.get_next_page_response) + + def test_reset(self): + connection = _Connection() + client = _Client(connection) + PATH = '/foo' + TOKEN = 'token' + iterator = self._makeOne(client, PATH) + iterator.page_number = 1 + iterator.next_page_token = TOKEN + iterator.reset() + self.assertEqual(iterator.page_number, 0) + self.assertIsNone(iterator.next_page_token) + + def test_get_items_from_response_raises_NotImplementedError(self): + PATH = '/foo' + connection = _Connection() + client = _Client(connection) + iterator = self._makeOne(client, PATH) + self.assertRaises(NotImplementedError, + iterator.get_items_from_response, object()) + + +class TestMethodIterator(unittest.TestCase): + + def _getTargetClass(self): + from google.cloud.iterator import MethodIterator + return MethodIterator + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_defaults(self): + wlm = _WithListMethod() + iterator = self._makeOne(wlm.list_foo) + self.assertEqual(iterator._method, wlm.list_foo) + self.assertIsNone(iterator._token) + self.assertIsNone(iterator._page_size) + self.assertEqual(iterator._kw, {}) + self.assertIsNone(iterator._max_calls) + self.assertEqual(iterator._page_num, 0) + + def test_ctor_explicit(self): + wlm = _WithListMethod() + TOKEN = wlm._letters + SIZE = 4 + CALLS = 2 + iterator = self._makeOne(wlm.list_foo, TOKEN, SIZE, CALLS, + foo_type='Bar') + self.assertEqual(iterator._method, wlm.list_foo) + self.assertEqual(iterator._token, TOKEN) + self.assertEqual(iterator._page_size, SIZE) + self.assertEqual(iterator._kw, {'foo_type': 'Bar'}) + self.assertEqual(iterator._max_calls, CALLS) + self.assertEqual(iterator._page_num, 0) + + def test___iter___defaults(self): + import string + wlm = _WithListMethod() + iterator = self._makeOne(wlm.list_foo) + found = [] + for char in iterator: + found.append(char) + self.assertEqual(found, list(string.printable)) + self.assertEqual(len(wlm._called_with), len(found) // 10) + for i, (token, size, kw) in enumerate(wlm._called_with): + if i == 0: + self.assertIsNone(token) + else: + self.assertEqual(token, string.printable[i * 10:]) + self.assertIsNone(size) + self.assertEqual(kw, {}) + + def test___iter___explicit_size_and_maxcalls_and_kw(self): + import string + wlm = _WithListMethod() + iterator = self._makeOne(wlm.list_foo, page_size=2, max_calls=3, + foo_type='Bar') + found = [] + for char in iterator: + found.append(char) + self.assertEqual(found, list(string.printable[:2 * 3])) + self.assertEqual(len(wlm._called_with), len(found) // 2) + for i, (token, size, kw) in enumerate(wlm._called_with): + if i == 0: + self.assertIsNone(token) + else: + self.assertEqual(token, string.printable[i * 2:]) + self.assertEqual(size, 2) + self.assertEqual(kw, {'foo_type': 'Bar'}) + + +class _WithListMethod(object): + + def __init__(self): + import string + self._called_with = [] + self._letters = string.printable + + def list_foo(self, page_token, page_size, **kw): + if page_token is not None: + assert page_token == self._letters + self._called_with.append((page_token, page_size, kw)) + if page_size is None: + page_size = 10 + page, self._letters = ( + self._letters[:page_size], self._letters[page_size:]) + token = self._letters or None + return page, token + + +class _Connection(object): + + def __init__(self, *responses): + self._responses = responses + self._requested = [] + + def api_request(self, **kw): + self._requested.append(kw) + response, self._responses = self._responses[0], self._responses[1:] + return response + + +class _Client(object): + + def __init__(self, connection): + self.connection = connection diff --git a/packages/google-cloud-core/unit_tests/test_operation.py b/packages/google-cloud-core/unit_tests/test_operation.py new file mode 100644 index 000000000000..234b5d93c749 --- /dev/null +++ b/packages/google-cloud-core/unit_tests/test_operation.py @@ -0,0 +1,244 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class Test__compute_type_url(unittest.TestCase): + + def _callFUT(self, klass, prefix=None): + from google.cloud.operation import _compute_type_url + if prefix is None: + return _compute_type_url(klass) + return _compute_type_url(klass, prefix) + + def test_wo_prefix(self): + from google.protobuf.struct_pb2 import Struct + from google.cloud.operation import _GOOGLE_APIS_PREFIX + + type_url = self._callFUT(Struct) + + self.assertEqual( + type_url, + '%s/%s' % (_GOOGLE_APIS_PREFIX, Struct.DESCRIPTOR.full_name)) + + def test_w_prefix(self): + from google.protobuf.struct_pb2 import Struct + PREFIX = 'test.google-cloud-python.com' + + type_url = self._callFUT(Struct, PREFIX) + + self.assertEqual( + type_url, + '%s/%s' % (PREFIX, Struct.DESCRIPTOR.full_name)) + + +class Test__register_type_url(unittest.TestCase): + + def _callFUT(self, type_url, klass): + from google.cloud.operation import _register_type_url + _register_type_url(type_url, klass) + + def test_simple(self): + from google.cloud import operation as MUT + from google.cloud._testing import _Monkey + TYPE_URI = 'testing.google-cloud-python.com/testing' + klass = object() + type_url_map = {} + + with _Monkey(MUT, _TYPE_URL_MAP=type_url_map): + self._callFUT(TYPE_URI, klass) + + self.assertEqual(type_url_map, {TYPE_URI: klass}) + + def test_w_same_class(self): + from google.cloud import operation as MUT + from google.cloud._testing import _Monkey + TYPE_URI = 'testing.google-cloud-python.com/testing' + klass = object() + type_url_map = {TYPE_URI: klass} + + with _Monkey(MUT, _TYPE_URL_MAP=type_url_map): + self._callFUT(TYPE_URI, klass) + + self.assertEqual(type_url_map, {TYPE_URI: klass}) + + def test_w_conflict(self): + from google.cloud import operation as MUT + from google.cloud._testing import _Monkey + TYPE_URI = 'testing.google-cloud-python.com/testing' + klass, other = object(), object() + type_url_map = {TYPE_URI: other} + + with _Monkey(MUT, _TYPE_URL_MAP=type_url_map): + with self.assertRaises(ValueError): + self._callFUT(TYPE_URI, klass) + + self.assertEqual(type_url_map, {TYPE_URI: other}) + + +class OperationTests(unittest.TestCase): + + OPERATION_NAME = 'operations/projects/foo/instances/bar/operations/123' + + def _getTargetClass(self): + from google.cloud.operation import Operation + return Operation + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_defaults(self): + client = _Client() + operation = self._makeOne( + self.OPERATION_NAME, client) + self.assertEqual(operation.name, self.OPERATION_NAME) + self.assertIs(operation.client, client) + self.assertIsNone(operation.target) + self.assertIsNone(operation.pb_metadata) + self.assertEqual(operation.metadata, {}) + + def test_ctor_explicit(self): + client = _Client() + pb_metadata = object() + operation = self._makeOne( + self.OPERATION_NAME, client, pb_metadata, foo='bar') + self.assertEqual(operation.name, self.OPERATION_NAME) + self.assertIs(operation.client, client) + self.assertIsNone(operation.target) + self.assertIs(operation.pb_metadata, pb_metadata) + self.assertEqual(operation.metadata, {'foo': 'bar'}) + + def test_from_pb_wo_metadata_or_kw(self): + from google.longrunning import operations_pb2 + client = _Client() + operation_pb = operations_pb2.Operation(name=self.OPERATION_NAME) + klass = self._getTargetClass() + + operation = klass.from_pb(operation_pb, client) + + self.assertEqual(operation.name, self.OPERATION_NAME) + self.assertIs(operation.client, client) + self.assertIsNone(operation.pb_metadata) + self.assertEqual(operation.metadata, {}) + + def test_from_pb_w_unknown_metadata(self): + from google.longrunning import operations_pb2 + from google.protobuf.any_pb2 import Any + from google.protobuf.struct_pb2 import Struct, Value + TYPE_URI = 'type.googleapis.com/%s' % (Struct.DESCRIPTOR.full_name,) + + client = _Client() + meta = Struct(fields={'foo': Value(string_value=u'Bar')}) + metadata_pb = Any(type_url=TYPE_URI, value=meta.SerializeToString()) + operation_pb = operations_pb2.Operation( + name=self.OPERATION_NAME, metadata=metadata_pb) + klass = self._getTargetClass() + + operation = klass.from_pb(operation_pb, client) + + self.assertEqual(operation.name, self.OPERATION_NAME) + self.assertIs(operation.client, client) + self.assertIsNone(operation.pb_metadata) + self.assertEqual(operation.metadata, {}) + + def test_from_pb_w_metadata_and_kwargs(self): + from google.longrunning import operations_pb2 + from google.protobuf.any_pb2 import Any + from google.protobuf.struct_pb2 import Struct, Value + from google.cloud import operation as MUT + from google.cloud._testing import _Monkey + TYPE_URI = 'type.googleapis.com/%s' % (Struct.DESCRIPTOR.full_name,) + type_url_map = {TYPE_URI: Struct} + + client = _Client() + meta = Struct(fields={'foo': Value(string_value=u'Bar')}) + metadata_pb = Any(type_url=TYPE_URI, value=meta.SerializeToString()) + operation_pb = operations_pb2.Operation( + name=self.OPERATION_NAME, metadata=metadata_pb) + klass = self._getTargetClass() + + with _Monkey(MUT, _TYPE_URL_MAP=type_url_map): + operation = klass.from_pb(operation_pb, client, baz='qux') + + self.assertEqual(operation.name, self.OPERATION_NAME) + self.assertIs(operation.client, client) + pb_metadata = operation.pb_metadata + self.assertIsInstance(pb_metadata, Struct) + self.assertEqual(list(pb_metadata.fields), ['foo']) + self.assertEqual(pb_metadata.fields['foo'].string_value, 'Bar') + self.assertEqual(operation.metadata, {'baz': 'qux'}) + + def test_complete_property(self): + client = _Client() + operation = self._makeOne(self.OPERATION_NAME, client) + self.assertFalse(operation.complete) + operation._complete = True + self.assertTrue(operation.complete) + with self.assertRaises(AttributeError): + operation.complete = False + + def test_poll_already_complete(self): + client = _Client() + operation = self._makeOne(self.OPERATION_NAME, client) + operation._complete = True + + with self.assertRaises(ValueError): + operation.poll() + + def test_poll_false(self): + from google.longrunning.operations_pb2 import GetOperationRequest + response_pb = _GetOperationResponse(False) + client = _Client() + stub = client._operations_stub + stub._get_operation_response = response_pb + operation = self._makeOne(self.OPERATION_NAME, client) + + self.assertFalse(operation.poll()) + + request_pb = stub._get_operation_requested + self.assertIsInstance(request_pb, GetOperationRequest) + self.assertEqual(request_pb.name, self.OPERATION_NAME) + + def test_poll_true(self): + from google.longrunning.operations_pb2 import GetOperationRequest + response_pb = _GetOperationResponse(True) + client = _Client() + stub = client._operations_stub + stub._get_operation_response = response_pb + operation = self._makeOne(self.OPERATION_NAME, client) + + self.assertTrue(operation.poll()) + + request_pb = stub._get_operation_requested + self.assertIsInstance(request_pb, GetOperationRequest) + self.assertEqual(request_pb.name, self.OPERATION_NAME) + + +class _GetOperationResponse(object): + def __init__(self, done): + self.done = done + + +class _OperationsStub(object): + + def GetOperation(self, request_pb): + self._get_operation_requested = request_pb + return self._get_operation_response + + +class _Client(object): + + def __init__(self): + self._operations_stub = _OperationsStub() From f53333e410d1cade4828080bb3b93780da486f46 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 22 Sep 2016 15:59:43 -0700 Subject: [PATCH 005/468] Adding a tox and other configuration files needed in core subpackage. The coverage RC file for the core subpackage intentionally leaves out google.cloud._testing. This is because the core tests don't use the entire functionality of the _testing module, but the umbrella package does. By leaving the module in the google-cloud-core package, every other package can depend on it and have the test helpers ready to access. --- packages/google-cloud-core/.coveragerc | 13 ++++++++++++ packages/google-cloud-core/tox.ini | 29 ++++++++++++++++++++++++++ 2 files changed, 42 insertions(+) create mode 100644 packages/google-cloud-core/.coveragerc create mode 100644 packages/google-cloud-core/tox.ini diff --git a/packages/google-cloud-core/.coveragerc b/packages/google-cloud-core/.coveragerc new file mode 100644 index 000000000000..e72bb1216f10 --- /dev/null +++ b/packages/google-cloud-core/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +omit = + */google/cloud/_testing.py +fail_under = 100 +show_missing = True +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-core/tox.ini b/packages/google-cloud-core/tox.ini new file mode 100644 index 000000000000..182562b6a42f --- /dev/null +++ b/packages/google-cloud-core/tox.ini @@ -0,0 +1,29 @@ +[tox] +envlist = + py27,py34,py35,cover + +[testing] +deps = + pytest +covercmd = + py.test --quiet \ + --cov=google.cloud \ + --cov=unit_tests \ + --cov-config {toxinidir}/.coveragerc \ + unit_tests + +[testenv] +commands = + py.test --quiet {posargs} unit_tests +deps = + {[testing]deps} + +[testenv:cover] +basepython = + python2.7 +commands = + {[testing]covercmd} +deps = + {[testenv]deps} + coverage + pytest-cov From 850884df073fabffdc967302ed6b7d975486f948 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 22 Sep 2016 16:00:36 -0700 Subject: [PATCH 006/468] Making coverage pass in core tox environment. This was necessary because some lines were only tested transitively in the umbrella package, rather than directly by the core tests. --- .../streaming/test_buffered_stream.py | 14 ++++++ .../unit_tests/test_connection.py | 44 +++++++++++++++++-- .../unit_tests/test_exceptions.py | 11 +++++ 3 files changed, 65 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-core/unit_tests/streaming/test_buffered_stream.py b/packages/google-cloud-core/unit_tests/streaming/test_buffered_stream.py index 3304e2bd3cc0..b6f4066b11c2 100644 --- a/packages/google-cloud-core/unit_tests/streaming/test_buffered_stream.py +++ b/packages/google-cloud-core/unit_tests/streaming/test_buffered_stream.py @@ -24,6 +24,20 @@ def _getTargetClass(self): def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) + def test_ctor_closed_stream(self): + class _Stream(object): + closed = True + + start = 0 + bufsize = 4 + bufstream = self._makeOne(_Stream, start, bufsize) + self.assertIs(bufstream._stream, _Stream) + self.assertEqual(bufstream._start_pos, start) + self.assertEqual(bufstream._buffer_pos, 0) + self.assertEqual(bufstream._buffered_data, b'') + self.assertTrue(bufstream._stream_at_end) + self.assertEqual(bufstream._end_pos, 0) + def test_ctor_start_zero_longer_than_buffer(self): from io import BytesIO CONTENT = b'CONTENT GOES HERE' diff --git a/packages/google-cloud-core/unit_tests/test_connection.py b/packages/google-cloud-core/unit_tests/test_connection.py index f63101f756fa..af3802b21a9f 100644 --- a/packages/google-cloud-core/unit_tests/test_connection.py +++ b/packages/google-cloud-core/unit_tests/test_connection.py @@ -74,6 +74,37 @@ def test_user_agent_format(self): conn = self._makeOne() self.assertEqual(conn.USER_AGENT, expected_ua) + def test__create_scoped_credentials_with_scoped_credentials(self): + klass = self._getTargetClass() + scoped_creds = object() + scope = 'google-specific-scope' + credentials = _Credentials(scoped=scoped_creds) + + result = klass._create_scoped_credentials(credentials, scope) + self.assertIs(result, scoped_creds) + self.assertEqual(credentials._create_scoped_calls, 1) + self.assertEqual(credentials._scopes, [scope]) + + def test__create_scoped_credentials_without_scope_required(self): + klass = self._getTargetClass() + credentials = _Credentials() + + result = klass._create_scoped_credentials(credentials, None) + self.assertIs(result, credentials) + self.assertEqual(credentials._create_scoped_calls, 1) + self.assertEqual(credentials._scopes, []) + + def test__create_scoped_credentials_non_scoped_credentials(self): + klass = self._getTargetClass() + credentials = object() + result = klass._create_scoped_credentials(credentials, None) + self.assertIs(result, credentials) + + def test__create_scoped_credentials_no_credentials(self): + klass = self._getTargetClass() + result = klass._create_scoped_credentials(None, None) + self.assertIsNone(result) + class TestJSONConnection(unittest.TestCase): @@ -375,11 +406,12 @@ def request(self, **kw): class _Credentials(object): - _scopes = None - - def __init__(self, authorized=None): + def __init__(self, authorized=None, scoped=None): self._authorized = authorized + self._scoped = scoped + self._scoped_required = scoped is not None self._create_scoped_calls = 0 + self._scopes = [] def authorize(self, http): self._called_with = http @@ -387,4 +419,8 @@ def authorize(self, http): def create_scoped_required(self): self._create_scoped_calls += 1 - return False + return self._scoped_required + + def create_scoped(self, scope): + self._scopes.append(scope) + return self._scoped diff --git a/packages/google-cloud-core/unit_tests/test_exceptions.py b/packages/google-cloud-core/unit_tests/test_exceptions.py index 56d8581be036..8460d6d8f1c4 100644 --- a/packages/google-cloud-core/unit_tests/test_exceptions.py +++ b/packages/google-cloud-core/unit_tests/test_exceptions.py @@ -125,6 +125,17 @@ def test_html_when_json_expected(self): self.assertEqual(exception.message, content) self.assertEqual(list(exception.errors), []) + def test_without_use_json(self): + from google.cloud.exceptions import TooManyRequests + + content = u'error-content' + response = _Response(TooManyRequests.code) + exception = self._callFUT(response, content, use_json=False) + + self.assertIsInstance(exception, TooManyRequests) + self.assertEqual(exception.message, content) + self.assertEqual(list(exception.errors), []) + class _Response(object): def __init__(self, status): From 0d7008ebacd9551c2635ec137eec804e48483f8f Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 23 Sep 2016 12:17:22 -0700 Subject: [PATCH 007/468] Merge pull request #2367 from dhermes/make-core-subpackage Moving core into designated package. --- packages/google-cloud-core/.coveragerc | 6 +- packages/google-cloud-core/.travis.yml | 36 ++ packages/google-cloud-core/CONTRIBUTING.rst | 457 ++++++++++++++++++++ packages/google-cloud-core/tox.ini | 126 +++++- 4 files changed, 621 insertions(+), 4 deletions(-) create mode 100644 packages/google-cloud-core/.travis.yml create mode 100644 packages/google-cloud-core/CONTRIBUTING.rst diff --git a/packages/google-cloud-core/.coveragerc b/packages/google-cloud-core/.coveragerc index e72bb1216f10..dd1524307f5c 100644 --- a/packages/google-cloud-core/.coveragerc +++ b/packages/google-cloud-core/.coveragerc @@ -3,9 +3,9 @@ branch = True [report] omit = - */google/cloud/_testing.py -fail_under = 100 -show_missing = True + */_generated/*.py + # Packages in the "google.cloud" package that we don't own. + */google/cloud/gapic/* exclude_lines = # Re-enable the standard pragma pragma: NO COVER diff --git a/packages/google-cloud-core/.travis.yml b/packages/google-cloud-core/.travis.yml new file mode 100644 index 000000000000..4e5346d52e8b --- /dev/null +++ b/packages/google-cloud-core/.travis.yml @@ -0,0 +1,36 @@ +language: python +sudo: false + +install: + - pip install --upgrade pip tox + +script: + - tox -e py27 + - (cd core && tox -e py27) + - tox -e py34 + - (cd core && tox -e py34) + - tox -e lint + - tox -e cover + - (cd core && tox -e cover) + - tox -e system-tests + - tox -e system-tests3 + - scripts/update_docs.sh + +after_success: + - tox -e coveralls + +deploy: + provider: pypi + user: gcloudpypi + password: + secure: LR0i9Oeu6kpLTYS5xK/zCng4gmdtPvFfD/XYdQhyY5jBibQkC2WUQU6nJA9bDXRxhBP5bUwXFGkbhOcOJgHNrUfmyPzpDbM8BR29KfY0WfdYv72gsGZOaekqCReFmHbqLE7qOQtHR5U3ey6ivcgw+hZO72Uu6qDCc9B8qwoBfAs= + on: + tags: true + repo: GoogleCloudPlatform/google-cloud-python + all_branches: true + # 'bdist_wheel' builds disabled until #1879 et al. are resolved. + distributions: "sdist" + +cache: + directories: + - ${HOME}/.cache/pip diff --git a/packages/google-cloud-core/CONTRIBUTING.rst b/packages/google-cloud-core/CONTRIBUTING.rst new file mode 100644 index 000000000000..8f35d5cf2930 --- /dev/null +++ b/packages/google-cloud-core/CONTRIBUTING.rst @@ -0,0 +1,457 @@ +Contributing +============================ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. + +Here are some guidelines for hacking on ``google-cloud-python``. + +Using a Development Checkout +---------------------------- + +You'll have to create a development environment to hack on ``google-cloud-python``, +using a Git checkout: + +- While logged into your GitHub account, navigate to the ``google-cloud-python`` repo + on GitHub. + + https://github.com/GoogleCloudPlatform/google-cloud-python + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the google-cloud-python + # repository into your local repository. + $ git remote add upstream https://github.com:GoogleCloudPlatform/google-cloud-python + # fetch and merge changes from upstream into master + $ git fetch upstream + $ git merge upstream/master + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``tox``, +but you can also use a ``virtualenv`` of your own creation. + +Using a custom ``virtualenv`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- To create a virtualenv in which to install ``google-cloud-python``:: + + $ cd ${HOME}/hack-on-google-cloud-python + $ virtualenv --python python2.7 ${ENV_NAME} + + You can choose which Python version you want to use by passing a ``--python`` + flag to ``virtualenv``. For example, ``virtualenv --python python2.7`` + chooses the Python 2.7 interpreter to be installed. + +- From here on in within these instructions, the + ``${HOME}/hack-on-google-cloud-python/${ENV_NAME}`` virtual environment you + created above will be referred to as ``${VENV}``. To use the instructions + in the steps that follow literally, use:: + + $ export VENV=${HOME}/hack-on-google-cloud-python/${ENV_NAME} + +- To install ``google-cloud-python`` from your source checkout into + ``${VENV}``, run:: + + $ # Make sure you are in the same directory as setup.py + $ cd ${HOME}/hack-on-google-cloud-python + $ ${VENV}/bin/python setup.py install + + Unfortunately using ``setup.py develop`` is not possible with this + project, because it uses `namespace packages`_. + +Using ``tox`` +~~~~~~~~~~~~~ + +- To test your changes, run unit tests with ``tox``:: + + $ tox -e py27 + $ tox -e py34 + $ ... + +- If you'd like to poke around your code in an interpreter, let + ``tox`` install the environment of your choice:: + + $ # Install only; without running tests + $ tox -e ${ENV} --recreate --notest + + After doing this, you can activate the virtual environment and + use the interpreter from that environment:: + + $ source .tox/${ENV}/bin/activate + (ENV) $ .tox/${ENV}/bin/python + + Unfortunately, your changes to the source tree won't be picked up + by the ``tox`` environment, so if you make changes, you'll need + to again ``--recreate`` the environment. + +Note on Editable Installs / Develop Mode +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- As mentioned previously, using ``setuptools`` in `develop mode`_ + or a ``pip`` `editable install`_ is not possible with this + library. This is because this library uses `namespace packages`_. + For context see `Issue #2316`_ and the relevant `PyPA issue`_. + + Since ``editable`` / ``develop`` mode can't be used, packages + need to be installed directly. Hence your changes to the source + tree don't get incorporated into the **already installed** + package. + +.. _namespace packages: https://www.python.org/dev/peps/pep-0420/ +.. _Issue #2316: https://github.com/GoogleCloudPlatform/google-cloud-python/issues/2316 +.. _PyPA issue: https://github.com/pypa/packaging-problems/issues/12 +.. _develop mode: http://setuptools.readthedocs.io/en/latest/setuptools.html#development-mode +.. _editable install: https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs + +I'm getting weird errors... Can you help? +----------------------------------------- + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +Adding Features +--------------- + +In order to add a feature to ``google-cloud-python``: + +- The feature must be documented in both the API and narrative + documentation (in ``docs/``). + +- The feature must work fully on the following CPython versions: 2.7, + 3.4, and 3.5 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +Coding Style +------------ + +- PEP8 compliance, with exceptions defined in ``tox.ini``. + If you have ``tox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ tox -e lint + +- In order to make ``tox -e lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_REMOTE_FOR_LINT="upstream" + export GOOGLE_CLOUD_BRANCH_FOR_LINT="master" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The the suggested remote name ``upstream`` + should point to the official ``GoogleCloudPlatform`` checkout and the + the branch should be the main branch on that remote (``master``). + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_callFUT`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +Running Tests +-------------- + +- To run all tests for ``google-cloud-python`` on a single Python version, run + ``py.test`` from your development virtualenv (See + *Using a Development Checkout* above). + +- To run the full set of ``google-cloud-python`` tests on all platforms, install + ``tox`` (https://testrun.org/tox/) into a system Python. The ``tox`` console + script will be installed into the scripts location for that Python. While + ``cd``'ed to the ``google-cloud-python`` checkout root directory (it contains + ``tox.ini``), invoke the ``tox`` console script. This will read the + ``tox.ini`` file and execute the tests on multiple Python versions and + platforms; while it runs, it creates a virtualenv for each version/platform + combination. For example:: + + $ sudo --set-home /usr/bin/pip install tox + $ cd ${HOME}/hack-on-google-cloud-python/ + $ /usr/bin/tox + +Running System Tests +-------------------- + +- To run system tests you can execute:: + + $ tox -e system-tests + $ tox -e system-tests3 + + or run only system tests for a particular package via:: + + $ python system_tests/run_system_test.py --package {package} + $ python3 system_tests/run_system_test.py --package {package} + + To run a subset of the system tests:: + + $ tox -e system-tests -- datastore storage + $ python system_tests/attempt_system_tests.py datastore storage + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project and + so you'll need to provide some environment variables to facilitate + authentication to your project: + + - ``GOOGLE_APPLICATION_CREDENTIALS``: The path to a JSON key file; + see ``system_tests/app_credentials.json.sample`` as an example. Such a file + can be downloaded directly from the developer's console by clicking + "Generate new JSON key". See private key + `docs `__ + for more details. In order for Logging system tests to work, the Service Account + will also have to be made a project Owner. This can be changed under "IAM & Admin". + - ``GOOGLE_CLOUD_TESTS_API_KEY``: The API key for your project with + the Google Translate API (and others) enabled. + +- Examples of these can be found in ``system_tests/local_test_setup.sample``. We + recommend copying this to ``system_tests/local_test_setup``, editing the + values and sourcing them into your environment:: + + $ source system_tests/local_test_setup + +- For datastore tests, you'll need to create composite + `indexes `__ + with the ``gcloud`` command line + `tool `__:: + + # Install the app (App Engine Command Line Interface) component. + $ gcloud components install app-engine-python + + # Authenticate the gcloud tool with your account. + $ GOOGLE_APPLICATION_CREDENTIALS="path/to/app_credentials.json" + $ gcloud auth activate-service-account \ + > --key-file=${GOOGLE_APPLICATION_CREDENTIALS} + + # Create the indexes + $ gcloud preview datastore create-indexes system_tests/data/index.yaml + +- For datastore query tests, you'll need stored data in your dataset. + To populate this data, run:: + + $ python system_tests/populate_datastore.py + +- If you make a mistake during development (i.e. a failing test that + prevents clean-up) you can clear all system test data from your + datastore instance via:: + + $ python system_tests/clear_datastore.py + +- System tests can also be run against local `emulators`_ that mock + the production services. To run the system tests with the + ``datastore`` emulator:: + + $ tox -e datastore-emulator + $ GOOGLE_CLOUD_DISABLE_GRPC=true tox -e datastore-emulator + + This also requires that the ``gcloud`` command line tool is + installed. If you'd like to run them directly (outside of a + ``tox`` environment), first start the emulator and + take note of the process ID:: + + $ gcloud beta emulators datastore start --no-legacy 2>&1 > log.txt & + [1] 33333 + + then determine the environment variables needed to interact with + the emulator:: + + $ gcloud beta emulators datastore env-init + export DATASTORE_LOCAL_HOST=localhost:8417 + export DATASTORE_HOST=http://localhost:8417 + export DATASTORE_DATASET=google-cloud-settings-app-id + export DATASTORE_PROJECT_ID=google-cloud-settings-app-id + + using these environment variables run the emulator:: + + $ DATASTORE_HOST=http://localhost:8471 \ + > DATASTORE_DATASET=google-cloud-settings-app-id \ + > GOOGLE_CLOUD_NO_PRINT=true \ + > python system_tests/run_system_test.py \ + > --package=datastore --ignore-requirements + + and after completion stop the emulator and any child + processes it spawned:: + + $ kill -- -33333 + +.. _emulators: https://cloud.google.com/sdk/gcloud/reference/beta/emulators/ + +- To run the system tests with the ``pubsub`` emulator:: + + $ tox -e pubsub-emulator + $ GOOGLE_CLOUD_DISABLE_GRPC=true tox -e pubsub-emulator + + If you'd like to run them directly (outside of a ``tox`` environment), first + start the emulator and take note of the process ID:: + + $ gcloud beta emulators pubsub start 2>&1 > log.txt & + [1] 44444 + + then determine the environment variables needed to interact with + the emulator:: + + $ gcloud beta emulators pubsub env-init + export PUBSUB_EMULATOR_HOST=localhost:8897 + + using these environment variables run the emulator:: + + $ PUBSUB_EMULATOR_HOST=localhost:8897 \ + > python system_tests/run_system_test.py \ + > --package=pubsub + + and after completion stop the emulator and any child + processes it spawned:: + + $ kill -- -44444 + +Test Coverage +------------- + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``tox -e cover``. + +Documentation Coverage and Building HTML Documentation +------------------------------------------------------ + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +To build and review docs (where ``${VENV}`` refers to the virtualenv you're +using to develop ``google-cloud-python``): + +1. After following the steps above in "Using a Development Checkout", install + Sphinx and all development requirements in your virtualenv:: + + $ cd ${HOME}/hack-on-google-cloud-python + $ ${VENV}/bin/pip install Sphinx + +2. Change into the ``docs`` directory within your ``google-cloud-python`` checkout and + execute the ``make`` command with some flags:: + + $ cd ${HOME}/hack-on-google-cloud-python/google-cloud-python/docs + $ make clean html SPHINXBUILD=${VENV}/bin/sphinx-build + + The ``SPHINXBUILD=...`` argument tells Sphinx to use the virtualenv Python, + which will have both Sphinx and ``google-cloud-python`` (for API documentation + generation) installed. + +3. Open the ``docs/_build/html/index.html`` file to see the resulting HTML + rendering. + +As an alternative to 1. and 2. above, if you have ``tox`` installed, you +can build the docs via:: + + $ tox -e docs + +Note About ``README`` as it pertains to PyPI +-------------------------------------------- + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.python.org/pypi/google-cloud + +Travis Configuration and Build Optimizations +-------------------------------------------- + +All build scripts in the ``.travis.yml`` configuration file which have +Python dependencies are specified in the ``tox.ini`` configuration. +They are executed in the Travis build via ``tox -e ${ENV}`` where +``${ENV}`` is the environment being tested. + +If new ``tox`` environments are added to be run in a Travis build, they +should be listed in ``[tox].envlist`` as a default environment. + +We speed up builds by using the Travis `caching feature`_. + +.. _caching feature: https://docs.travis-ci.com/user/caching/#pip-cache + +We intentionally **do not** cache the ``.tox/`` directory. Instead, we +allow the ``tox`` environments to be re-built for every build. This +way, we'll always get the latest versions of our dependencies and any +caching or wheel optimization to be done will be handled automatically +by ``pip``. + +Supported Python Versions +------------------------- + +We support: + +- `Python 2.7`_ +- `Python 3.4`_ +- `Python 3.5`_ + +.. _Python 2.7: https://docs.python.org/2.7/ +.. _Python 3.4: https://docs.python.org/3.4/ +.. _Python 3.5: https://docs.python.org/3.5/ + +Supported versions can be found in our ``tox.ini`` `config`_. + +.. _config: https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/tox.ini + +We explicitly decided not to support `Python 2.5`_ due to `decreased usage`_ +and lack of continuous integration `support`_. + +.. _Python 2.5: https://docs.python.org/2.5/ +.. _decreased usage: https://caremad.io/2013/10/a-look-at-pypi-downloads/ +.. _support: http://blog.travis-ci.com/2013-11-18-upcoming-build-environment-updates/ + +We have `dropped 2.6`_ as a supported version as well since Python 2.6 is no +longer supported by the core development team. + +We also explicitly decided to support Python 3 beginning with version +3.4. Reasons for this include: + +- Encouraging use of newest versions of Python 3 +- Taking the lead of `prominent`_ open-source `projects`_ +- `Unicode literal support`_ which allows for a cleaner codebase that + works in both Python 2 and Python 3 + +.. _prominent: https://docs.djangoproject.com/en/1.9/faq/install/#what-python-version-can-i-use-with-django +.. _projects: http://flask.pocoo.org/docs/0.10/python3/ +.. _Unicode literal support: https://www.python.org/dev/peps/pep-0414/ +.. _dropped 2.6: https://github.com/GoogleCloudPlatform/google-cloud-python/issues/995 + +Versioning +---------- + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +It is currently in major version zero (``0.y.z``), which means that anything +may change at any time and the public API should not be considered +stable. + +Contributor License Agreements +------------------------------ + +Before we can accept your pull requests you'll need to sign a Contributor License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the intellectual property**, then you'll need to sign an `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, then you'll need to sign a `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, we'll be able to accept your pull requests. diff --git a/packages/google-cloud-core/tox.ini b/packages/google-cloud-core/tox.ini index 182562b6a42f..1e2eba979c5f 100644 --- a/packages/google-cloud-core/tox.ini +++ b/packages/google-cloud-core/tox.ini @@ -1,9 +1,10 @@ [tox] envlist = - py27,py34,py35,cover + py27,py34,py35,cover,docs,lint [testing] deps = + {toxinidir}/core pytest covercmd = py.test --quiet \ @@ -11,6 +12,13 @@ covercmd = --cov=unit_tests \ --cov-config {toxinidir}/.coveragerc \ unit_tests + py.test --quiet \ + --cov=google.cloud \ + --cov=unit_tests \ + --cov-append \ + --cov-config {toxinidir}/.coveragerc \ + core/unit_tests + coverage report --show-missing --fail-under=100 [testenv] commands = @@ -18,6 +26,13 @@ commands = deps = {[testing]deps} +[testenv:py27-pandas] +basepython = + python2.7 +deps = + {[testenv]deps} + pandas + [testenv:cover] basepython = python2.7 @@ -27,3 +42,112 @@ deps = {[testenv]deps} coverage pytest-cov + +[testenv:coveralls] +basepython = {[testenv:cover]basepython} +commands = + {[testing]covercmd} + coveralls +ignore_errors = True +deps = + {[testenv:cover]deps} + coveralls +passenv = {[testenv:system-tests]passenv} + +[testenv:json-docs] +basepython = + python2.7 +commands = + python -c \ + "import shutil; shutil.rmtree('docs/_build/json', ignore_errors=True)" + {toxinidir}/scripts/update_json_docs.sh +deps = + parinx + pdoc + Sphinx +passenv = + TRAVIS_TAG + TRAVIS_BRANCH + TRAVIS_PULL_REQUEST + GH_OWNER + GH_OAUTH_TOKEN + GH_PROJECT_NAME + +[testenv:docs] +basepython = + python2.7 +commands = + python -c \ + "import shutil; shutil.rmtree('docs/_build', ignore_errors=True)" + sphinx-build -W -b html -d docs/_build/doctrees docs docs/_build/html + python {toxinidir}/scripts/verify_included_modules.py --build-root _build +deps = + {[testenv]deps} + Sphinx + sphinx_rtd_theme +passenv = {[testenv:system-tests]passenv} SPHINX_RELEASE READTHEDOCS + +[pep8] +exclude = + docs/conf.py, + google/cloud/bigtable/_generated*/*, + google/cloud/datastore/_generated/* +verbose = 1 + +[testenv:lint] +basepython = + python2.7 +commands = + python {toxinidir}/scripts/pycodestyle_on_repo.py + python {toxinidir}/scripts/run_pylint.py +deps = + {[testenv]deps} + pycodestyle + pylint >= 1.6.4 +passenv = {[testenv:system-tests]passenv} + +[testenv:system-tests] +basepython = + python2.7 +commands = + python {toxinidir}/system_tests/attempt_system_tests.py {posargs} +passenv = GOOGLE_* GOOGLE_CLOUD_* TRAVIS* encrypted_* + +[testenv:system-tests3] +basepython = + python3.4 +commands = + python {toxinidir}/system_tests/attempt_system_tests.py {posargs} +passenv = {[testenv:system-tests]passenv} + +[emulator] +deps = + {[testenv]deps} + psutil +setenv = + GOOGLE_CLOUD_NO_PRINT=true +passenv = + GOOGLE_CLOUD_DISABLE_GRPC +emulatorcmd = + python {toxinidir}/system_tests/run_emulator.py + +[testenv:datastore-emulator] +commands = + {[emulator]emulatorcmd} --package=datastore +setenv = {[emulator]setenv} +passenv = {[emulator]passenv} +deps = {[emulator]deps} + +[testenv:pubsub-emulator] +commands = + {[emulator]emulatorcmd} --package=pubsub +setenv = {[emulator]setenv} +passenv = {[emulator]passenv} +deps = {[emulator]deps} + +[testenv:bigtable-emulator] +commands = + {[emulator]emulatorcmd} --package=bigtable +setenv = {[emulator]setenv} +passenv = {[emulator]passenv} +deps = {[emulator]deps} From a0204def96f11037481f2a10d5ec0fbfb0998f99 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 26 Sep 2016 12:27:17 -0700 Subject: [PATCH 008/468] Removing custom generated long-running operations. --- packages/google-cloud-core/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-core/setup.py b/packages/google-cloud-core/setup.py index 97b695e1a71e..9c0204d20b20 100644 --- a/packages/google-cloud-core/setup.py +++ b/packages/google-cloud-core/setup.py @@ -51,7 +51,7 @@ REQUIREMENTS = [ 'httplib2 >= 0.9.1', - 'googleapis-common-protos', + 'googleapis-common-protos >= 1.3.4', 'oauth2client >= 2.0.1, < 3.0.0dev', 'protobuf >= 3.0.0', 'six', From 0234d9c43b9e3c89b2548152077251a3ce199edb Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 26 Sep 2016 10:35:47 -0700 Subject: [PATCH 009/468] Making prints Python 3 friendly. --- packages/google-cloud-core/google/cloud/iterator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index eecaa5759e80..3581fed2601c 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -39,7 +39,7 @@ def get_items_from_response(self, response): requests):: >>> for item in MyIterator(...): - >>> print item.name + >>> print(item.name) >>> if not item.is_valid: >>> break """ From cd75f22325b833be61c16cde556a2f16b0beb77e Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 27 Sep 2016 08:53:24 -0700 Subject: [PATCH 010/468] Preparing for a release of all packages. Towards #2441. - Updating umbrella README to point at all packages - Putting upper bounds on grpcio in dependencies - Putting lower bounds on all google-cloud-* packages listed as dependencies - Adding `setup.cfg` for universal wheels --- packages/google-cloud-core/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-core/setup.py b/packages/google-cloud-core/setup.py index 9c0204d20b20..3926ddbd7d4c 100644 --- a/packages/google-cloud-core/setup.py +++ b/packages/google-cloud-core/setup.py @@ -59,7 +59,7 @@ setup( name='google-cloud-core', - version='0.20.0dev', + version='0.20.0', description='API Client library for Google Cloud: Core Helpers', long_description=README, namespace_packages=[ From 18e19fbfac08db65e5694db6ee7747969f643988 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 27 Sep 2016 08:57:43 -0700 Subject: [PATCH 011/468] Adding setup.cfg to all packages. --- packages/google-cloud-core/setup.cfg | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 packages/google-cloud-core/setup.cfg diff --git a/packages/google-cloud-core/setup.cfg b/packages/google-cloud-core/setup.cfg new file mode 100644 index 000000000000..2a9acf13daa9 --- /dev/null +++ b/packages/google-cloud-core/setup.cfg @@ -0,0 +1,2 @@ +[bdist_wheel] +universal = 1 From 55ce065dc395eb9640b8b2b2b3829ab953611204 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 27 Sep 2016 17:34:27 -0700 Subject: [PATCH 012/468] Upgrading version of oauth2client due to GAPIC/GAX conflict. --- packages/google-cloud-core/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-core/setup.py b/packages/google-cloud-core/setup.py index 3926ddbd7d4c..b59d4f5d2b3c 100644 --- a/packages/google-cloud-core/setup.py +++ b/packages/google-cloud-core/setup.py @@ -52,7 +52,7 @@ REQUIREMENTS = [ 'httplib2 >= 0.9.1', 'googleapis-common-protos >= 1.3.4', - 'oauth2client >= 2.0.1, < 3.0.0dev', + 'oauth2client >= 3.0.0, < 4.0.0dev', 'protobuf >= 3.0.0', 'six', ] From 92ffd1f55fed3af49689e316bd6a80e40df9edcc Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 26 Sep 2016 14:13:40 -0400 Subject: [PATCH 013/468] Disable pylint's 'ungrouped-imports' error. We share the 'google' namespace with third-party pacakages. PEP 8 wants 'local' imports to be separated fro 'third-party' imports, which is more important than pylint's attempt to group them by name alone. --- packages/google-cloud-core/google/cloud/_helpers.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/_helpers.py b/packages/google-cloud-core/google/cloud/_helpers.py index 6e2cdc31ec96..64e2e7b9fc1e 100644 --- a/packages/google-cloud-core/google/cloud/_helpers.py +++ b/packages/google-cloud-core/google/cloud/_helpers.py @@ -38,10 +38,8 @@ from six.moves import http_client from six.moves import configparser -# pylint: disable=ungrouped-imports from google.cloud.environment_vars import PROJECT from google.cloud.environment_vars import CREDENTIALS -# pylint: enable=ungrouped-imports _NOW = datetime.datetime.utcnow # To be replaced by tests. From fec448b54ab71cd41673352ec563167d9ac32b8f Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 29 Sep 2016 16:17:02 -0700 Subject: [PATCH 014/468] General clean-up after rename. - Removing "graft google" from MANIFEST for umbrella package. It isn't needed since the umbrella package has no source - Updating license year on copy-pasted namespace package __init__.py files. Done via: https://gist.github.com/dhermes/a0e88f891ffffc3ecea5c9bb2f13e4f5 - Removing unused HTML context from docs/conf.py - Setting GH_OWNER AND GH_PROJECT_NAME (which together make the REPO_SLUG) manually in the docs update scripts. This way the env. variables don't need to be set in the Travis UI / CLI. Also updating tox.ini to stop passing those variables through - Removing the root package from `verify_included_modules.py` since it no longer has any source - Updated a docstring reference to a moved class in the Bigtable system test - Removing redundant `GOOGLE_CLOUD_*` in `tox` system test `passenv` (already covered by `GOOGLE_*`) --- packages/google-cloud-core/google/cloud/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-core/google/cloud/__init__.py b/packages/google-cloud-core/google/cloud/__init__.py index 8ac7b74af136..b2b833373882 100644 --- a/packages/google-cloud-core/google/cloud/__init__.py +++ b/packages/google-cloud-core/google/cloud/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2014 Google Inc. +# Copyright 2016 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From f57805fc120cb980c7d03641e57ef301b18c9785 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 3 Oct 2016 21:32:48 -0700 Subject: [PATCH 015/468] Updating package README's with more useful doc links. Also removing duplicate "Homepage" links (duplicate of "API Documentation" links). --- packages/google-cloud-core/README.rst | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-core/README.rst b/packages/google-cloud-core/README.rst index d8ec01b781be..28b8431795be 100644 --- a/packages/google-cloud-core/README.rst +++ b/packages/google-cloud-core/README.rst @@ -5,12 +5,9 @@ This library is not meant to stand-alone. Instead it defines common helpers (e.g. base ``Client`` and ``Connection`` classes) used by all of the ``google-cloud-*``. +- `Documentation`_ -- `Homepage`_ -- `API Documentation`_ - -.. _Homepage: https://googlecloudplatform.github.io/google-cloud-python/ -.. _API Documentation: http://googlecloudplatform.github.io/google-cloud-python/ +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/google-cloud-api.html Quick Start ----------- From 75f9e5c2b4be41ef788252659eda24398be69bda Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 4 Oct 2016 01:17:30 -0700 Subject: [PATCH 016/468] Making max_results part of the base Iterator class. In the process, also making sure to lower maxResults on subsequent requests. Fixes #1467. --- .../google/cloud/iterator.py | 72 ++++++++++++++++--- .../unit_tests/test_iterator.py | 69 ++++++++++++++---- 2 files changed, 119 insertions(+), 22 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index 3581fed2601c..b7652e647767 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -45,40 +45,84 @@ def get_items_from_response(self, response): """ +import six + + class Iterator(object): """A generic class for iterating through Cloud JSON APIs list responses. :type client: :class:`google.cloud.client.Client` :param client: The client, which owns a connection to make requests. - :type path: string + :type path: str :param path: The path to query for the list of items. + :type page_token: str + :param page_token: (Optional) A token identifying a page in a result set. + + :type max_results: int + :param max_results: (Optional) The maximum number of results to fetch. + :type extra_params: dict or None :param extra_params: Extra query string parameters for the API call. """ PAGE_TOKEN = 'pageToken' - RESERVED_PARAMS = frozenset([PAGE_TOKEN]) + MAX_RESULTS = 'maxResults' + RESERVED_PARAMS = frozenset([PAGE_TOKEN, MAX_RESULTS]) - def __init__(self, client, path, extra_params=None): + def __init__(self, client, path, page_token=None, + max_results=None, extra_params=None): self.client = client self.path = path self.page_number = 0 - self.next_page_token = None + self.next_page_token = page_token + self.max_results = max_results + self.num_results = 0 self.extra_params = extra_params or {} reserved_in_use = self.RESERVED_PARAMS.intersection( self.extra_params) if reserved_in_use: raise ValueError(('Using a reserved parameter', reserved_in_use)) + self._curr_items = iter(()) def __iter__(self): - """Iterate through the list of items.""" - while self.has_next_page(): + """The :class:`Iterator` is an iterator.""" + return self + + def _update_items(self): + """Replace the current items iterator. + + Intended to be used when the current items iterator is exhausted. + + After replacing the iterator, consumes the first value to make sure + it is valid. + + :rtype: object + :returns: The first item in the next iterator. + :raises: :class:`~exceptions.StopIteration` if there is no next page. + """ + if self.has_next_page(): response = self.get_next_page_response() - for item in self.get_items_from_response(response): - yield item + items = self.get_items_from_response(response) + self._curr_items = iter(items) + return six.next(self._curr_items) + else: + raise StopIteration + + def next(self): + """Get the next value in the iterator.""" + try: + item = six.next(self._curr_items) + except StopIteration: + item = self._update_items() + + self.num_results += 1 + return item + + # Alias needed for Python 2/3 support. + __next__ = next def has_next_page(self): """Determines whether or not this iterator has more pages. @@ -89,6 +133,10 @@ def has_next_page(self): if self.page_number == 0: return True + if self.max_results is not None: + if self.num_results >= self.max_results: + return False + return self.next_page_token is not None def get_query_params(self): @@ -97,8 +145,11 @@ def get_query_params(self): :rtype: dict :returns: A dictionary of query parameters. """ - result = ({self.PAGE_TOKEN: self.next_page_token} - if self.next_page_token else {}) + result = {} + if self.next_page_token is not None: + result[self.PAGE_TOKEN] = self.next_page_token + if self.max_results is not None: + result[self.MAX_RESULTS] = self.max_results - self.num_results result.update(self.extra_params) return result @@ -123,6 +174,7 @@ def reset(self): """Resets the iterator to the beginning.""" self.page_number = 0 self.next_page_token = None + self.num_results = 0 def get_items_from_response(self, response): """Factory method called while iterating. This should be overridden. diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index ec823d9ccb22..44d02d30770e 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -34,7 +34,21 @@ def test_ctor(self): self.assertEqual(iterator.page_number, 0) self.assertIsNone(iterator.next_page_token) + def test_constructor_w_extra_param_collision(self): + connection = _Connection() + client = _Client(connection) + PATH = '/foo' + extra_params = {'pageToken': 'val'} + self.assertRaises(ValueError, self._makeOne, client, PATH, + extra_params=extra_params) + def test___iter__(self): + iterator = self._makeOne(None, None) + self.assertIs(iter(iterator), iterator) + + def test_iterate(self): + import six + PATH = '/foo' KEY1 = 'key1' KEY2 = 'key2' @@ -42,13 +56,27 @@ def test___iter__(self): ITEMS = {KEY1: ITEM1, KEY2: ITEM2} def _get_items(response): - for item in response.get('items', []): - yield ITEMS[item['name']] - connection = _Connection({'items': [{'name': KEY1}, {'name': KEY2}]}) + return [ITEMS[item['name']] + for item in response.get('items', [])] + + connection = _Connection( + {'items': [{'name': KEY1}, {'name': KEY2}]}) client = _Client(connection) iterator = self._makeOne(client, PATH) iterator.get_items_from_response = _get_items - self.assertEqual(list(iterator), [ITEM1, ITEM2]) + self.assertEqual(iterator.num_results, 0) + + val1 = six.next(iterator) + self.assertEqual(val1, ITEM1) + self.assertEqual(iterator.num_results, 1) + + val2 = six.next(iterator) + self.assertEqual(val2, ITEM2) + self.assertEqual(iterator.num_results, 2) + + with self.assertRaises(StopIteration): + six.next(iterator) + kw, = connection._requested self.assertEqual(kw['method'], 'GET') self.assertEqual(kw['path'], PATH) @@ -79,6 +107,19 @@ def test_has_next_page_w_number_w_token(self): iterator.next_page_token = TOKEN self.assertTrue(iterator.has_next_page()) + def test_has_next_page_w_max_results_not_done(self): + iterator = self._makeOne(None, None, max_results=3, + page_token='definitely-not-none') + iterator.page_number = 1 + self.assertLess(iterator.num_results, iterator.max_results) + self.assertTrue(iterator.has_next_page()) + + def test_has_next_page_w_max_results_done(self): + iterator = self._makeOne(None, None, max_results=3) + iterator.page_number = 1 + iterator.num_results = iterator.max_results + self.assertFalse(iterator.has_next_page()) + def test_get_query_params_no_token(self): connection = _Connection() client = _Client(connection) @@ -96,6 +137,18 @@ def test_get_query_params_w_token(self): self.assertEqual(iterator.get_query_params(), {'pageToken': TOKEN}) + def test_get_query_params_w_max_results(self): + connection = _Connection() + client = _Client(connection) + path = '/foo' + max_results = 3 + iterator = self._makeOne(client, path, + max_results=max_results) + iterator.num_results = 1 + local_max = max_results - iterator.num_results + self.assertEqual(iterator.get_query_params(), + {'maxResults': local_max}) + def test_get_query_params_extra_params(self): connection = _Connection() client = _Client(connection) @@ -117,14 +170,6 @@ def test_get_query_params_w_token_and_extra_params(self): expected_query.update({'pageToken': TOKEN}) self.assertEqual(iterator.get_query_params(), expected_query) - def test_get_query_params_w_token_collision(self): - connection = _Connection() - client = _Client(connection) - PATH = '/foo' - extra_params = {'pageToken': 'val'} - self.assertRaises(ValueError, self._makeOne, client, PATH, - extra_params=extra_params) - def test_get_next_page_response_new_no_token_in_response(self): PATH = '/foo' TOKEN = 'token' From b201b5bc770ee3c11b1c9744a05731629b16ba48 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 6 Oct 2016 16:59:04 -0400 Subject: [PATCH 017/468] Allow passing extra headers to 'Connection.api_request'. Needed to support oddball out-of-band encryption headers for 'Blob.rewrite'. --- .../google/cloud/connection.py | 7 +++-- .../unit_tests/test_connection.py | 31 +++++++++++++++++++ 2 files changed, 36 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/connection.py b/packages/google-cloud-core/google/cloud/connection.py index e5893a34630e..45d7aa17388c 100644 --- a/packages/google-cloud-core/google/cloud/connection.py +++ b/packages/google-cloud-core/google/cloud/connection.py @@ -273,7 +273,7 @@ def _do_request(self, method, url, headers, data, body=data) def api_request(self, method, path, query_params=None, - data=None, content_type=None, + data=None, content_type=None, headers=None, api_base_url=None, api_version=None, expect_json=True, _target_object=None): """Make a request over the HTTP transport to the API. @@ -303,6 +303,9 @@ def api_request(self, method, path, query_params=None, :param content_type: The proper MIME type of the data provided. Default is None. + :type headers: dist + :param headers: extra HTTP headers to be sent with the request. + :type api_base_url: string :param api_base_url: The base URL for the API endpoint. Typically you won't have to provide this. @@ -343,7 +346,7 @@ def api_request(self, method, path, query_params=None, response, content = self._make_request( method=method, url=url, data=data, content_type=content_type, - target_object=_target_object) + headers=headers, target_object=_target_object) if not 200 <= response.status < 300: raise make_exception(response, content, diff --git a/packages/google-cloud-core/unit_tests/test_connection.py b/packages/google-cloud-core/unit_tests/test_connection.py index af3802b21a9f..e2a02e83bdf7 100644 --- a/packages/google-cloud-core/unit_tests/test_connection.py +++ b/packages/google-cloud-core/unit_tests/test_connection.py @@ -318,6 +318,37 @@ def test_api_request_w_query_params(self): } self.assertEqual(http._called_with['headers'], expected_headers) + def test_api_request_w_headers(self): + from six.moves.urllib.parse import urlsplit + conn = self._makeMockOne() + http = conn._http = _Http( + {'status': '200', 'content-type': 'application/json'}, + b'{}', + ) + self.assertEqual( + conn.api_request('GET', '/', headers={'X-Foo': 'bar'}), {}) + self.assertEqual(http._called_with['method'], 'GET') + uri = http._called_with['uri'] + scheme, netloc, path, qs, _ = urlsplit(uri) + self.assertEqual('%s://%s' % (scheme, netloc), conn.API_BASE_URL) + # Intended to emulate self.mock_template + PATH = '/'.join([ + '', + 'mock', + conn.API_VERSION, + '', + ]) + self.assertEqual(path, PATH) + self.assertEqual(qs, '') + self.assertIsNone(http._called_with['body']) + expected_headers = { + 'Accept-Encoding': 'gzip', + 'Content-Length': '0', + 'User-Agent': conn.USER_AGENT, + 'X-Foo': 'bar', + } + self.assertEqual(http._called_with['headers'], expected_headers) + def test_api_request_w_data(self): import json DATA = {'foo': 'bar'} From d1444fceee8551dbe96d4ee44c776345c11d67ef Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 6 Oct 2016 17:10:54 -0400 Subject: [PATCH 018/468] Typo fix. [ci skip] --- packages/google-cloud-core/google/cloud/connection.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-core/google/cloud/connection.py b/packages/google-cloud-core/google/cloud/connection.py index 45d7aa17388c..eca7be400bfe 100644 --- a/packages/google-cloud-core/google/cloud/connection.py +++ b/packages/google-cloud-core/google/cloud/connection.py @@ -303,7 +303,7 @@ def api_request(self, method, path, query_params=None, :param content_type: The proper MIME type of the data provided. Default is None. - :type headers: dist + :type headers: dict :param headers: extra HTTP headers to be sent with the request. :type api_base_url: string From 383d842efbc797ca517db0830c9bb2fd91073d57 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 11 Oct 2016 21:11:19 -0700 Subject: [PATCH 019/468] Implementing basic Page class to help with Iterator. Intended to hold and slice up state that has already been retrieved from the server. --- .../google/cloud/iterator.py | 61 +++++++++++++++++- .../unit_tests/test_iterator.py | 63 +++++++++++++++++++ 2 files changed, 121 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index b7652e647767..b9d2b3112bca 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -39,15 +39,70 @@ def get_items_from_response(self, response): requests):: >>> for item in MyIterator(...): - >>> print(item.name) - >>> if not item.is_valid: - >>> break + ... print(item.name) + ... if not item.is_valid: + ... break """ import six +class Page(object): + """Single page of results in an iterator. + + :type parent: :class:`Iterator` + :param parent: The iterator that owns the current page. + """ + + def __init__(self, parent): + self._parent = parent + self._num_items = 0 + self._remaining = 0 + + @property + def num_items(self): + """Total items in the page. + + :rtype: int + :returns: The number of items in this page of items. + """ + return self._num_items + + @property + def remaining(self): + """Remaining items in the page. + + :rtype: int + :returns: The number of items remaining this page. + """ + return self._remaining + + def __iter__(self): + """The :class:`Page` is an iterator.""" + return self + + def _next_item(self): + """Get the next item in the page. + + This method (along with the constructor) is the workhorse + of this class. Subclasses will need to implement this method. + + It is separate from :meth:`next` since that method needs + to be aliased as ``__next__`` in Python 3. + + :raises NotImplementedError: Always + """ + raise NotImplementedError + + def next(self): + """Get the next value in the iterator.""" + return self._next_item() + + # Alias needed for Python 2/3 support. + __next__ = next + + class Iterator(object): """A generic class for iterating through Cloud JSON APIs list responses. diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index 44d02d30770e..6012ad6f8f1c 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -15,6 +15,69 @@ import unittest +class TestPage(unittest.TestCase): + + def _getTargetClass(self): + from google.cloud.iterator import Page + return Page + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_constructor(self): + parent = object() + page = self._makeOne(parent) + self.assertIs(page._parent, parent) + self.assertEqual(page._num_items, 0) + self.assertEqual(page._remaining, 0) + + def test_num_items_property(self): + page = self._makeOne(None) + num_items = 42 + page._num_items = num_items + self.assertEqual(page.num_items, num_items) + + def test_remaining_property(self): + page = self._makeOne(None) + remaining = 1337 + page._remaining = remaining + self.assertEqual(page.remaining, remaining) + + def test___iter__(self): + page = self._makeOne(None) + self.assertIs(iter(page), page) + + def test__next_item_virtual(self): + page = self._makeOne(None) + with self.assertRaises(NotImplementedError): + page._next_item() + + def test_iterator_calls__next_item(self): + import six + + klass = self._getTargetClass() + + class CountItPage(klass): + + calls = 0 + values = None + + def _next_item(self): + self.calls += 1 + return self.values.pop(0) + + page = CountItPage(None) + page.values = [10, 11, 12] + + self.assertEqual(page.calls, 0) + self.assertEqual(six.next(page), 10) + self.assertEqual(page.calls, 1) + self.assertEqual(six.next(page), 11) + self.assertEqual(page.calls, 2) + self.assertEqual(six.next(page), 12) + self.assertEqual(page.calls, 3) + + class TestIterator(unittest.TestCase): def _getTargetClass(self): From 909c399970c5e53a684d2810be97287f73556d41 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 11 Oct 2016 21:42:51 -0700 Subject: [PATCH 020/468] Collapsing get_items_from_response behavior into Page subclasses. --- packages/google-cloud-core/google/cloud/iterator.py | 6 +++++- packages/google-cloud-core/unit_tests/test_iterator.py | 5 +++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index b9d2b3112bca..d9cd4f024734 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -97,7 +97,11 @@ def _next_item(self): def next(self): """Get the next value in the iterator.""" - return self._next_item() + result = self._next_item() + # Since we've successfully got the next value from the + # iterator, we update the number of remaining. + self._remaining -= 1 + return result # Alias needed for Python 2/3 support. __next__ = next diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index 6012ad6f8f1c..6bbc85c17683 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -67,15 +67,20 @@ def _next_item(self): return self.values.pop(0) page = CountItPage(None) + page._remaining = 100 page.values = [10, 11, 12] self.assertEqual(page.calls, 0) + self.assertEqual(page.remaining, 100) self.assertEqual(six.next(page), 10) self.assertEqual(page.calls, 1) + self.assertEqual(page.remaining, 99) self.assertEqual(six.next(page), 11) self.assertEqual(page.calls, 2) + self.assertEqual(page.remaining, 98) self.assertEqual(six.next(page), 12) self.assertEqual(page.calls, 3) + self.assertEqual(page.remaining, 97) class TestIterator(unittest.TestCase): From 5a35d46cad005de0aacb89243182730285ccc25f Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 11 Oct 2016 21:56:34 -0700 Subject: [PATCH 021/468] Collapsing Page __init__ functionality into base class. --- .../google-cloud-core/google/cloud/iterator.py | 13 ++++++++++--- .../unit_tests/test_iterator.py | 18 ++++++++++-------- 2 files changed, 20 insertions(+), 11 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index d9cd4f024734..ad46485822ec 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -53,12 +53,19 @@ class Page(object): :type parent: :class:`Iterator` :param parent: The iterator that owns the current page. + + :type response: dict + :param response: The JSON API response for a page. """ - def __init__(self, parent): + ITEMS_KEY = 'items' + + def __init__(self, parent, response): self._parent = parent - self._num_items = 0 - self._remaining = 0 + items = response.get(self.ITEMS_KEY, ()) + self._num_items = len(items) + self._remaining = self._num_items + self._item_iter = iter(items) @property def num_items(self): diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index 6bbc85c17683..28dcf1a0f8c1 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -25,30 +25,32 @@ def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) def test_constructor(self): + klass = self._getTargetClass() parent = object() - page = self._makeOne(parent) + response = {klass.ITEMS_KEY: (1, 2, 3)} + page = self._makeOne(parent, response) self.assertIs(page._parent, parent) - self.assertEqual(page._num_items, 0) - self.assertEqual(page._remaining, 0) + self.assertEqual(page._num_items, 3) + self.assertEqual(page._remaining, 3) def test_num_items_property(self): - page = self._makeOne(None) + page = self._makeOne(None, {}) num_items = 42 page._num_items = num_items self.assertEqual(page.num_items, num_items) def test_remaining_property(self): - page = self._makeOne(None) + page = self._makeOne(None, {}) remaining = 1337 page._remaining = remaining self.assertEqual(page.remaining, remaining) def test___iter__(self): - page = self._makeOne(None) + page = self._makeOne(None, {}) self.assertIs(iter(page), page) def test__next_item_virtual(self): - page = self._makeOne(None) + page = self._makeOne(None, {}) with self.assertRaises(NotImplementedError): page._next_item() @@ -66,7 +68,7 @@ def _next_item(self): self.calls += 1 return self.values.pop(0) - page = CountItPage(None) + page = CountItPage(None, {}) page._remaining = 100 page.values = [10, 11, 12] From 08c2ce57855c0a166301817f307b20a1ae5715df Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 11 Oct 2016 22:32:18 -0700 Subject: [PATCH 022/468] Replacing get_items_from_response() with a page class. --- .../google/cloud/iterator.py | 51 ++++++++----------- .../unit_tests/test_iterator.py | 19 +++---- 2 files changed, 29 insertions(+), 41 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index ad46485822ec..6151fbc57023 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -17,17 +17,23 @@ These iterators simplify the process of paging through API responses where the response is a list of results with a ``nextPageToken``. -To make an iterator work, just override the ``get_items_from_response`` -method so that given a response (containing a page of results) it parses -those results into an iterable of the actual objects you want:: +To make an iterator work, just override the ``PAGE_CLASS`` class +attribute so that given a response (containing a page of results) can +be parsed into an iterable page of the actual objects you want:: +those results into an iterable of the actual + + class MyPage(Page): + + def _next_item(self): + item = six.next(self._item_iter) + my_item = MyItemClass(other_arg=True) + my_item._set_properties(item) + return my_item + class MyIterator(Iterator): - def get_items_from_response(self, response): - items = response.get('items', []) - for item in items: - my_item = MyItemClass(other_arg=True) - my_item._set_properties(item) - yield my_item + + PAGE_CLASS = MyPage You then can use this to get **all** the results from a resource:: @@ -38,9 +44,9 @@ def get_items_from_response(self, response): you find what you're looking for (resulting in possibly fewer requests):: - >>> for item in MyIterator(...): - ... print(item.name) - ... if not item.is_valid: + >>> for my_item in MyIterator(...): + ... print(my_item.name) + ... if not my_item.is_valid: ... break """ @@ -117,6 +123,8 @@ def next(self): class Iterator(object): """A generic class for iterating through Cloud JSON APIs list responses. + Sub-classes need to over-write ``PAGE_CLASS``. + :type client: :class:`google.cloud.client.Client` :param client: The client, which owns a connection to make requests. @@ -136,6 +144,7 @@ class Iterator(object): PAGE_TOKEN = 'pageToken' MAX_RESULTS = 'maxResults' RESERVED_PARAMS = frozenset([PAGE_TOKEN, MAX_RESULTS]) + PAGE_CLASS = Page def __init__(self, client, path, page_token=None, max_results=None, extra_params=None): @@ -171,8 +180,7 @@ def _update_items(self): """ if self.has_next_page(): response = self.get_next_page_response() - items = self.get_items_from_response(response) - self._curr_items = iter(items) + self._curr_items = self.PAGE_CLASS(self, response) return six.next(self._curr_items) else: raise StopIteration @@ -242,21 +250,6 @@ def reset(self): self.next_page_token = None self.num_results = 0 - def get_items_from_response(self, response): - """Factory method called while iterating. This should be overridden. - - This method should be overridden by a subclass. It should - accept the API response of a request for the next page of items, - and return a list (or other iterable) of items. - - Typically this method will construct a Bucket or a Blob from the - page of results in the response. - - :type response: dict - :param response: The response of asking for the next page of items. - """ - raise NotImplementedError - class MethodIterator(object): """Method-based iterator iterating through Cloud JSON APIs list responses. diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index 28dcf1a0f8c1..d71a1971bfc7 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -118,6 +118,7 @@ def test___iter__(self): def test_iterate(self): import six + from google.cloud.iterator import Page PATH = '/foo' KEY1 = 'key1' @@ -125,15 +126,17 @@ def test_iterate(self): ITEM1, ITEM2 = object(), object() ITEMS = {KEY1: ITEM1, KEY2: ITEM2} - def _get_items(response): - return [ITEMS[item['name']] - for item in response.get('items', [])] + class _Page(Page): + + def _next_item(self): + item = six.next(self._item_iter) + return ITEMS[item['name']] connection = _Connection( {'items': [{'name': KEY1}, {'name': KEY2}]}) client = _Client(connection) iterator = self._makeOne(client, PATH) - iterator.get_items_from_response = _get_items + iterator.PAGE_CLASS = _Page self.assertEqual(iterator.num_results, 0) val1 = six.next(iterator) @@ -278,14 +281,6 @@ def test_reset(self): self.assertEqual(iterator.page_number, 0) self.assertIsNone(iterator.next_page_token) - def test_get_items_from_response_raises_NotImplementedError(self): - PATH = '/foo' - connection = _Connection() - client = _Client(connection) - iterator = self._makeOne(client, PATH) - self.assertRaises(NotImplementedError, - iterator.get_items_from_response, object()) - class TestMethodIterator(unittest.TestCase): From a7013b066bb8ecf00d090b275646eb0579640560 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 11 Oct 2016 23:28:05 -0700 Subject: [PATCH 023/468] Making path an optional argument to Iterator. --- .../google/cloud/iterator.py | 13 +- .../unit_tests/test_iterator.py | 128 ++++++++++-------- 2 files changed, 79 insertions(+), 62 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index 6151fbc57023..6799fa778ce6 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -128,9 +128,6 @@ class Iterator(object): :type client: :class:`google.cloud.client.Client` :param client: The client, which owns a connection to make requests. - :type path: str - :param path: The path to query for the list of items. - :type page_token: str :param page_token: (Optional) A token identifying a page in a result set. @@ -139,16 +136,22 @@ class Iterator(object): :type extra_params: dict or None :param extra_params: Extra query string parameters for the API call. + + :type path: str + :param path: The path to query for the list of items. """ PAGE_TOKEN = 'pageToken' MAX_RESULTS = 'maxResults' RESERVED_PARAMS = frozenset([PAGE_TOKEN, MAX_RESULTS]) PAGE_CLASS = Page + PATH = None - def __init__(self, client, path, page_token=None, - max_results=None, extra_params=None): + def __init__(self, client, page_token=None, max_results=None, + extra_params=None, path=None): self.client = client + if path is None: + path = self.PATH self.path = path self.page_number = 0 self.next_page_token = page_token diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index d71a1971bfc7..41d135df7f83 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -94,23 +94,37 @@ def _getTargetClass(self): def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) - def test_ctor(self): + def test_constructor(self): + connection = _Connection() + client = _Client(connection) + path = '/foo' + iterator = self._makeOne(client, path=path) + self.assertIs(iterator.client, client) + self.assertEqual(iterator.path, path) + self.assertEqual(iterator.page_number, 0) + self.assertIsNone(iterator.next_page_token) + + def test_constructor_default_path(self): + klass = self._getTargetClass() + + class WithPath(klass): + PATH = '/path' + connection = _Connection() client = _Client(connection) - PATH = '/foo' - iterator = self._makeOne(client, PATH) + iterator = WithPath(client) self.assertIs(iterator.client, client) - self.assertEqual(iterator.path, PATH) + self.assertEqual(iterator.path, WithPath.PATH) self.assertEqual(iterator.page_number, 0) self.assertIsNone(iterator.next_page_token) def test_constructor_w_extra_param_collision(self): connection = _Connection() client = _Client(connection) - PATH = '/foo' + path = '/foo' extra_params = {'pageToken': 'val'} - self.assertRaises(ValueError, self._makeOne, client, PATH, - extra_params=extra_params) + with self.assertRaises(ValueError): + self._makeOne(client, path=path, extra_params=extra_params) def test___iter__(self): iterator = self._makeOne(None, None) @@ -120,11 +134,11 @@ def test_iterate(self): import six from google.cloud.iterator import Page - PATH = '/foo' - KEY1 = 'key1' - KEY2 = 'key2' - ITEM1, ITEM2 = object(), object() - ITEMS = {KEY1: ITEM1, KEY2: ITEM2} + path = '/foo' + key1 = 'key1' + key2 = 'key2' + item1, item2 = object(), object() + ITEMS = {key1: item1, key2: item2} class _Page(Page): @@ -133,18 +147,18 @@ def _next_item(self): return ITEMS[item['name']] connection = _Connection( - {'items': [{'name': KEY1}, {'name': KEY2}]}) + {'items': [{'name': key1}, {'name': key2}]}) client = _Client(connection) - iterator = self._makeOne(client, PATH) + iterator = self._makeOne(client, path=path) iterator.PAGE_CLASS = _Page self.assertEqual(iterator.num_results, 0) val1 = six.next(iterator) - self.assertEqual(val1, ITEM1) + self.assertEqual(val1, item1) self.assertEqual(iterator.num_results, 1) val2 = six.next(iterator) - self.assertEqual(val2, ITEM2) + self.assertEqual(val2, item2) self.assertEqual(iterator.num_results, 2) with self.assertRaises(StopIteration): @@ -152,36 +166,36 @@ def _next_item(self): kw, = connection._requested self.assertEqual(kw['method'], 'GET') - self.assertEqual(kw['path'], PATH) + self.assertEqual(kw['path'], path) self.assertEqual(kw['query_params'], {}) def test_has_next_page_new(self): connection = _Connection() client = _Client(connection) - PATH = '/foo' - iterator = self._makeOne(client, PATH) + path = '/foo' + iterator = self._makeOne(client, path=path) self.assertTrue(iterator.has_next_page()) def test_has_next_page_w_number_no_token(self): connection = _Connection() client = _Client(connection) - PATH = '/foo' - iterator = self._makeOne(client, PATH) + path = '/foo' + iterator = self._makeOne(client, path=path) iterator.page_number = 1 self.assertFalse(iterator.has_next_page()) def test_has_next_page_w_number_w_token(self): connection = _Connection() client = _Client(connection) - PATH = '/foo' - TOKEN = 'token' - iterator = self._makeOne(client, PATH) + path = '/foo' + token = 'token' + iterator = self._makeOne(client, path=path) iterator.page_number = 1 - iterator.next_page_token = TOKEN + iterator.next_page_token = token self.assertTrue(iterator.has_next_page()) def test_has_next_page_w_max_results_not_done(self): - iterator = self._makeOne(None, None, max_results=3, + iterator = self._makeOne(None, path=None, max_results=3, page_token='definitely-not-none') iterator.page_number = 1 self.assertLess(iterator.num_results, iterator.max_results) @@ -196,26 +210,26 @@ def test_has_next_page_w_max_results_done(self): def test_get_query_params_no_token(self): connection = _Connection() client = _Client(connection) - PATH = '/foo' - iterator = self._makeOne(client, PATH) + path = '/foo' + iterator = self._makeOne(client, path=path) self.assertEqual(iterator.get_query_params(), {}) def test_get_query_params_w_token(self): connection = _Connection() client = _Client(connection) - PATH = '/foo' - TOKEN = 'token' - iterator = self._makeOne(client, PATH) - iterator.next_page_token = TOKEN + path = '/foo' + token = 'token' + iterator = self._makeOne(client, path=path) + iterator.next_page_token = token self.assertEqual(iterator.get_query_params(), - {'pageToken': TOKEN}) + {'pageToken': token}) def test_get_query_params_w_max_results(self): connection = _Connection() client = _Client(connection) path = '/foo' max_results = 3 - iterator = self._makeOne(client, path, + iterator = self._makeOne(client, path=path, max_results=max_results) iterator.num_results = 1 local_max = max_results - iterator.num_results @@ -225,58 +239,58 @@ def test_get_query_params_w_max_results(self): def test_get_query_params_extra_params(self): connection = _Connection() client = _Client(connection) - PATH = '/foo' + path = '/foo' extra_params = {'key': 'val'} - iterator = self._makeOne(client, PATH, extra_params=extra_params) + iterator = self._makeOne(client, path=path, extra_params=extra_params) self.assertEqual(iterator.get_query_params(), extra_params) def test_get_query_params_w_token_and_extra_params(self): connection = _Connection() client = _Client(connection) - PATH = '/foo' - TOKEN = 'token' + path = '/foo' + token = 'token' extra_params = {'key': 'val'} - iterator = self._makeOne(client, PATH, extra_params=extra_params) - iterator.next_page_token = TOKEN + iterator = self._makeOne(client, path=path, extra_params=extra_params) + iterator.next_page_token = token expected_query = extra_params.copy() - expected_query.update({'pageToken': TOKEN}) + expected_query.update({'pageToken': token}) self.assertEqual(iterator.get_query_params(), expected_query) def test_get_next_page_response_new_no_token_in_response(self): - PATH = '/foo' - TOKEN = 'token' - KEY1 = 'key1' - KEY2 = 'key2' - connection = _Connection({'items': [{'name': KEY1}, {'name': KEY2}], - 'nextPageToken': TOKEN}) + path = '/foo' + token = 'token' + key1 = 'key1' + key2 = 'key2' + connection = _Connection({'items': [{'name': key1}, {'name': key2}], + 'nextPageToken': token}) client = _Client(connection) - iterator = self._makeOne(client, PATH) + iterator = self._makeOne(client, path=path) response = iterator.get_next_page_response() - self.assertEqual(response['items'], [{'name': KEY1}, {'name': KEY2}]) + self.assertEqual(response['items'], [{'name': key1}, {'name': key2}]) self.assertEqual(iterator.page_number, 1) - self.assertEqual(iterator.next_page_token, TOKEN) + self.assertEqual(iterator.next_page_token, token) kw, = connection._requested self.assertEqual(kw['method'], 'GET') - self.assertEqual(kw['path'], PATH) + self.assertEqual(kw['path'], path) self.assertEqual(kw['query_params'], {}) def test_get_next_page_response_no_token(self): connection = _Connection() client = _Client(connection) - PATH = '/foo' - iterator = self._makeOne(client, PATH) + path = '/foo' + iterator = self._makeOne(client, path=path) iterator.page_number = 1 self.assertRaises(RuntimeError, iterator.get_next_page_response) def test_reset(self): connection = _Connection() client = _Client(connection) - PATH = '/foo' - TOKEN = 'token' - iterator = self._makeOne(client, PATH) + path = '/foo' + token = 'token' + iterator = self._makeOne(client, path=path) iterator.page_number = 1 - iterator.next_page_token = TOKEN + iterator.next_page_token = token iterator.reset() self.assertEqual(iterator.page_number, 0) self.assertIsNone(iterator.next_page_token) From 8f9951c03266ad4009c78d8caa6d7a8464ce5077 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 11 Oct 2016 23:41:58 -0700 Subject: [PATCH 024/468] Swapping iterator's _next_item() for _item_to_value(). This is so that the owned items iterator management could be done on the base class and the child classes just need to worry about converting the JSON values to whatever type is required. --- .../google-cloud-core/google/cloud/iterator.py | 12 ++++++------ .../unit_tests/test_iterator.py | 17 ++++++++--------- 2 files changed, 14 insertions(+), 15 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index 6799fa778ce6..57139e6fdfb1 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -24,8 +24,7 @@ class MyPage(Page): - def _next_item(self): - item = six.next(self._item_iter) + def _item_to_value(self, item): my_item = MyItemClass(other_arg=True) my_item._set_properties(item) return my_item @@ -95,14 +94,14 @@ def __iter__(self): """The :class:`Page` is an iterator.""" return self - def _next_item(self): + def _item_to_value(self, item): """Get the next item in the page. This method (along with the constructor) is the workhorse of this class. Subclasses will need to implement this method. - It is separate from :meth:`next` since that method needs - to be aliased as ``__next__`` in Python 3. + :type item: dict + :param item: An item to be converted to a native object. :raises NotImplementedError: Always """ @@ -110,7 +109,8 @@ def _next_item(self): def next(self): """Get the next value in the iterator.""" - result = self._next_item() + item = six.next(self._item_iter) + result = self._item_to_value(item) # Since we've successfully got the next value from the # iterator, we update the number of remaining. self._remaining -= 1 diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index 41d135df7f83..78e333ab8917 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -49,12 +49,12 @@ def test___iter__(self): page = self._makeOne(None, {}) self.assertIs(iter(page), page) - def test__next_item_virtual(self): + def test__item_to_value(self): page = self._makeOne(None, {}) with self.assertRaises(NotImplementedError): - page._next_item() + page._item_to_value(None) - def test_iterator_calls__next_item(self): + def test_iterator_calls__item_to_value(self): import six klass = self._getTargetClass() @@ -64,13 +64,13 @@ class CountItPage(klass): calls = 0 values = None - def _next_item(self): + def _item_to_value(self, item): self.calls += 1 - return self.values.pop(0) + return item - page = CountItPage(None, {}) + response = {klass.ITEMS_KEY: [10, 11, 12]} + page = CountItPage(None, response) page._remaining = 100 - page.values = [10, 11, 12] self.assertEqual(page.calls, 0) self.assertEqual(page.remaining, 100) @@ -142,8 +142,7 @@ def test_iterate(self): class _Page(Page): - def _next_item(self): - item = six.next(self._item_iter) + def _item_to_value(self, item): return ITEMS[item['name']] connection = _Connection( From bc2b4588470d8f99a0e0ccb6735c0b1750888620 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 12 Oct 2016 00:18:09 -0700 Subject: [PATCH 025/468] Exposing the current page in iterator. --- .../google/cloud/iterator.py | 76 +++++++++++++------ 1 file changed, 53 insertions(+), 23 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index 57139e6fdfb1..eea5c5ca75f9 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -20,7 +20,6 @@ To make an iterator work, just override the ``PAGE_CLASS`` class attribute so that given a response (containing a page of results) can be parsed into an iterable page of the actual objects you want:: -those results into an iterable of the actual class MyPage(Page): @@ -47,6 +46,29 @@ class MyIterator(Iterator): ... print(my_item.name) ... if not my_item.is_valid: ... break + +When iterating, not every new item will send a request to the server. +To monitor these requests, track the current page of the iterator:: + + >>> iterator = MyIterator(...) + >>> iterator.page_number + 0 + >>> next(iterator) + + >>> iterator.page_number + 1 + >>> iterator.page.remaining + 1 + >>> next(iterator) + + >>> iterator.page.remaining + 0 + >>> next(iterator) + + >>> iterator.page_number + 2 + >>> iterator.page.remaining + 19 """ @@ -149,52 +171,60 @@ class Iterator(object): def __init__(self, client, page_token=None, max_results=None, extra_params=None, path=None): + self.extra_params = extra_params or {} + self._verify_params() self.client = client - if path is None: - path = self.PATH - self.path = path + self.path = path or self.PATH self.page_number = 0 self.next_page_token = page_token self.max_results = max_results self.num_results = 0 - self.extra_params = extra_params or {} + self._page = None + + def _verify_params(self): + """Verifies the parameters don't use any reserved parameter. + + :raises ValueError: If a reserved parameter is used. + """ reserved_in_use = self.RESERVED_PARAMS.intersection( self.extra_params) if reserved_in_use: - raise ValueError(('Using a reserved parameter', - reserved_in_use)) - self._curr_items = iter(()) + raise ValueError('Using a reserved parameter', + reserved_in_use) + + @property + def page(self): + """The current page of results that has been retrieved. + + :rtype: :class:`Page` + :returns: The page of items that has been retrieved. + """ + return self._page def __iter__(self): """The :class:`Iterator` is an iterator.""" return self - def _update_items(self): - """Replace the current items iterator. - - Intended to be used when the current items iterator is exhausted. + def _update_page(self): + """Replace the current page. - After replacing the iterator, consumes the first value to make sure - it is valid. + Does nothing if the current page is non-null and has items + remaining. - :rtype: object - :returns: The first item in the next iterator. :raises: :class:`~exceptions.StopIteration` if there is no next page. """ + if self.page is not None and self.page.remaining > 0: + return if self.has_next_page(): response = self.get_next_page_response() - self._curr_items = self.PAGE_CLASS(self, response) - return six.next(self._curr_items) + self._page = self.PAGE_CLASS(self, response) else: raise StopIteration def next(self): """Get the next value in the iterator.""" - try: - item = six.next(self._curr_items) - except StopIteration: - item = self._update_items() - + self._update_page() + item = six.next(self.page) self.num_results += 1 return item From eb089fa152129948abe9b33f400b5cd150dd70d1 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 12 Oct 2016 00:24:01 -0700 Subject: [PATCH 026/468] Removing RuntimeError from iterator's get_next_page_response(). This state can never happen since a StopIteration will occur before the method would ever be called without a token. --- packages/google-cloud-core/google/cloud/iterator.py | 3 --- packages/google-cloud-core/unit_tests/test_iterator.py | 8 -------- 2 files changed, 11 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index eea5c5ca75f9..37a9e3388506 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -266,9 +266,6 @@ def get_next_page_response(self): :rtype: dict :returns: The parsed JSON response of the next page's contents. """ - if not self.has_next_page(): - raise RuntimeError('No more pages. Try resetting the iterator.') - response = self.client.connection.api_request( method='GET', path=self.path, query_params=self.get_query_params()) diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index 78e333ab8917..4cbba78c9a35 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -274,14 +274,6 @@ def test_get_next_page_response_new_no_token_in_response(self): self.assertEqual(kw['path'], path) self.assertEqual(kw['query_params'], {}) - def test_get_next_page_response_no_token(self): - connection = _Connection() - client = _Client(connection) - path = '/foo' - iterator = self._makeOne(client, path=path) - iterator.page_number = 1 - self.assertRaises(RuntimeError, iterator.get_next_page_response) - def test_reset(self): connection = _Connection() client = _Client(connection) From fd86b0397bb3fe9b06196ebe106604277ddabac0 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 12 Oct 2016 00:28:00 -0700 Subject: [PATCH 027/468] Adding a page reset to iterator's reset. Also doing a tiny re-org in constructor to separate the attributes which change and those which don't. --- packages/google-cloud-core/google/cloud/iterator.py | 4 +++- packages/google-cloud-core/unit_tests/test_iterator.py | 3 +++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index 37a9e3388506..23ae06f89458 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -173,11 +173,12 @@ def __init__(self, client, page_token=None, max_results=None, extra_params=None, path=None): self.extra_params = extra_params or {} self._verify_params() + self.max_results = max_results self.client = client self.path = path or self.PATH + # The attributes below will change over the life of the iterator. self.page_number = 0 self.next_page_token = page_token - self.max_results = max_results self.num_results = 0 self._page = None @@ -279,6 +280,7 @@ def reset(self): self.page_number = 0 self.next_page_token = None self.num_results = 0 + self._page = None class MethodIterator(object): diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index 4cbba78c9a35..4f609091d908 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -282,9 +282,12 @@ def test_reset(self): iterator = self._makeOne(client, path=path) iterator.page_number = 1 iterator.next_page_token = token + iterator._page = object() iterator.reset() self.assertEqual(iterator.page_number, 0) + self.assertEqual(iterator.num_results, 0) self.assertIsNone(iterator.next_page_token) + self.assertIsNone(iterator.page) class TestMethodIterator(unittest.TestCase): From f472c67142ce898146f331b93a44cb5b0687bb7f Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 12 Oct 2016 00:49:45 -0700 Subject: [PATCH 028/468] Removing unused MethodIterator. --- .../google/cloud/iterator.py | 44 ---------- .../unit_tests/test_iterator.py | 88 ------------------- 2 files changed, 132 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index 23ae06f89458..ca97af95a3e7 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -281,47 +281,3 @@ def reset(self): self.next_page_token = None self.num_results = 0 self._page = None - - -class MethodIterator(object): - """Method-based iterator iterating through Cloud JSON APIs list responses. - - :type method: instance method - :param method: ``list_foo`` method of a domain object, taking as arguments - ``page_token``, ``page_size``, and optional additional - keyword arguments. - - :type page_token: string or ``NoneType`` - :param page_token: Initial page token to pass. if ``None``, fetch the - first page from the ``method`` API call. - - :type page_size: integer or ``NoneType`` - :param page_size: Maximum number of items to return from the ``method`` - API call; if ``None``, uses the default for the API. - - :type max_calls: integer or ``NoneType`` - :param max_calls: Maximum number of times to make the ``method`` - API call; if ``None``, applies no limit. - - :type kw: dict - :param kw: optional keyword arguments to be passed to ``method``. - """ - def __init__(self, method, page_token=None, page_size=None, - max_calls=None, **kw): - self._method = method - self._token = page_token - self._page_size = page_size - self._kw = kw - self._max_calls = max_calls - self._page_num = 0 - - def __iter__(self): - while self._max_calls is None or self._page_num < self._max_calls: - items, new_token = self._method( - page_token=self._token, page_size=self._page_size, **self._kw) - for item in items: - yield item - if new_token is None: - return - self._page_num += 1 - self._token = new_token diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index 4f609091d908..738ccffb7af5 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -290,94 +290,6 @@ def test_reset(self): self.assertIsNone(iterator.page) -class TestMethodIterator(unittest.TestCase): - - def _getTargetClass(self): - from google.cloud.iterator import MethodIterator - return MethodIterator - - def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) - - def test_ctor_defaults(self): - wlm = _WithListMethod() - iterator = self._makeOne(wlm.list_foo) - self.assertEqual(iterator._method, wlm.list_foo) - self.assertIsNone(iterator._token) - self.assertIsNone(iterator._page_size) - self.assertEqual(iterator._kw, {}) - self.assertIsNone(iterator._max_calls) - self.assertEqual(iterator._page_num, 0) - - def test_ctor_explicit(self): - wlm = _WithListMethod() - TOKEN = wlm._letters - SIZE = 4 - CALLS = 2 - iterator = self._makeOne(wlm.list_foo, TOKEN, SIZE, CALLS, - foo_type='Bar') - self.assertEqual(iterator._method, wlm.list_foo) - self.assertEqual(iterator._token, TOKEN) - self.assertEqual(iterator._page_size, SIZE) - self.assertEqual(iterator._kw, {'foo_type': 'Bar'}) - self.assertEqual(iterator._max_calls, CALLS) - self.assertEqual(iterator._page_num, 0) - - def test___iter___defaults(self): - import string - wlm = _WithListMethod() - iterator = self._makeOne(wlm.list_foo) - found = [] - for char in iterator: - found.append(char) - self.assertEqual(found, list(string.printable)) - self.assertEqual(len(wlm._called_with), len(found) // 10) - for i, (token, size, kw) in enumerate(wlm._called_with): - if i == 0: - self.assertIsNone(token) - else: - self.assertEqual(token, string.printable[i * 10:]) - self.assertIsNone(size) - self.assertEqual(kw, {}) - - def test___iter___explicit_size_and_maxcalls_and_kw(self): - import string - wlm = _WithListMethod() - iterator = self._makeOne(wlm.list_foo, page_size=2, max_calls=3, - foo_type='Bar') - found = [] - for char in iterator: - found.append(char) - self.assertEqual(found, list(string.printable[:2 * 3])) - self.assertEqual(len(wlm._called_with), len(found) // 2) - for i, (token, size, kw) in enumerate(wlm._called_with): - if i == 0: - self.assertIsNone(token) - else: - self.assertEqual(token, string.printable[i * 2:]) - self.assertEqual(size, 2) - self.assertEqual(kw, {'foo_type': 'Bar'}) - - -class _WithListMethod(object): - - def __init__(self): - import string - self._called_with = [] - self._letters = string.printable - - def list_foo(self, page_token, page_size, **kw): - if page_token is not None: - assert page_token == self._letters - self._called_with.append((page_token, page_size, kw)) - if page_size is None: - page_size = 10 - page, self._letters = ( - self._letters[:page_size], self._letters[page_size:]) - token = self._letters or None - return page, token - - class _Connection(object): def __init__(self, *responses): From cb4efe6e591f98c9b7e08ab8c51bd31e9d566cba Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 12 Oct 2016 00:54:57 -0700 Subject: [PATCH 029/468] Implementation hiding in Iterator class. Making JSON API call helpers non-public. This reduces the interface to methods relevant to iterating. --- .../google/cloud/iterator.py | 9 +++---- .../unit_tests/test_iterator.py | 24 +++++++++---------- 2 files changed, 17 insertions(+), 16 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index ca97af95a3e7..3721f257151f 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -217,7 +217,7 @@ def _update_page(self): if self.page is not None and self.page.remaining > 0: return if self.has_next_page(): - response = self.get_next_page_response() + response = self._get_next_page_response() self._page = self.PAGE_CLASS(self, response) else: raise StopIteration @@ -247,7 +247,7 @@ def has_next_page(self): return self.next_page_token is not None - def get_query_params(self): + def _get_query_params(self): """Getter for query parameters for the next request. :rtype: dict @@ -261,14 +261,15 @@ def get_query_params(self): result.update(self.extra_params) return result - def get_next_page_response(self): + def _get_next_page_response(self): """Requests the next page from the path provided. :rtype: dict :returns: The parsed JSON response of the next page's contents. """ response = self.client.connection.api_request( - method='GET', path=self.path, query_params=self.get_query_params()) + method='GET', path=self.path, + query_params=self._get_query_params()) self.page_number += 1 self.next_page_token = response.get('nextPageToken') diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index 738ccffb7af5..7f0a80b4e335 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -206,24 +206,24 @@ def test_has_next_page_w_max_results_done(self): iterator.num_results = iterator.max_results self.assertFalse(iterator.has_next_page()) - def test_get_query_params_no_token(self): + def test__get_query_params_no_token(self): connection = _Connection() client = _Client(connection) path = '/foo' iterator = self._makeOne(client, path=path) - self.assertEqual(iterator.get_query_params(), {}) + self.assertEqual(iterator._get_query_params(), {}) - def test_get_query_params_w_token(self): + def test__get_query_params_w_token(self): connection = _Connection() client = _Client(connection) path = '/foo' token = 'token' iterator = self._makeOne(client, path=path) iterator.next_page_token = token - self.assertEqual(iterator.get_query_params(), + self.assertEqual(iterator._get_query_params(), {'pageToken': token}) - def test_get_query_params_w_max_results(self): + def test__get_query_params_w_max_results(self): connection = _Connection() client = _Client(connection) path = '/foo' @@ -232,18 +232,18 @@ def test_get_query_params_w_max_results(self): max_results=max_results) iterator.num_results = 1 local_max = max_results - iterator.num_results - self.assertEqual(iterator.get_query_params(), + self.assertEqual(iterator._get_query_params(), {'maxResults': local_max}) - def test_get_query_params_extra_params(self): + def test__get_query_params_extra_params(self): connection = _Connection() client = _Client(connection) path = '/foo' extra_params = {'key': 'val'} iterator = self._makeOne(client, path=path, extra_params=extra_params) - self.assertEqual(iterator.get_query_params(), extra_params) + self.assertEqual(iterator._get_query_params(), extra_params) - def test_get_query_params_w_token_and_extra_params(self): + def test__get_query_params_w_token_and_extra_params(self): connection = _Connection() client = _Client(connection) path = '/foo' @@ -254,9 +254,9 @@ def test_get_query_params_w_token_and_extra_params(self): expected_query = extra_params.copy() expected_query.update({'pageToken': token}) - self.assertEqual(iterator.get_query_params(), expected_query) + self.assertEqual(iterator._get_query_params(), expected_query) - def test_get_next_page_response_new_no_token_in_response(self): + def test__get_next_page_response_new_no_token_in_response(self): path = '/foo' token = 'token' key1 = 'key1' @@ -265,7 +265,7 @@ def test_get_next_page_response_new_no_token_in_response(self): 'nextPageToken': token}) client = _Client(connection) iterator = self._makeOne(client, path=path) - response = iterator.get_next_page_response() + response = iterator._get_next_page_response() self.assertEqual(response['items'], [{'name': key1}, {'name': key2}]) self.assertEqual(iterator.page_number, 1) self.assertEqual(iterator.next_page_token, token) From 62d35996eb5b4cfa484ab39369eb7198ed9c0ae7 Mon Sep 17 00:00:00 2001 From: Thomas Schultz Date: Wed, 12 Oct 2016 15:58:38 -0400 Subject: [PATCH 030/468] Return None if project key is missing. Fixes #2536 --- .../google/cloud/_helpers.py | 5 ++++- .../unit_tests/test__helpers.py | 22 +++++++++++++++++-- 2 files changed, 24 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/_helpers.py b/packages/google-cloud-core/google/cloud/_helpers.py index 64e2e7b9fc1e..0b515cc5ce9d 100644 --- a/packages/google-cloud-core/google/cloud/_helpers.py +++ b/packages/google-cloud-core/google/cloud/_helpers.py @@ -240,7 +240,10 @@ def _default_service_project_id(): config.read(search_paths) if config.has_section(_GCLOUD_CONFIG_SECTION): - return config.get(_GCLOUD_CONFIG_SECTION, _GCLOUD_CONFIG_KEY) + try: + return config.get(_GCLOUD_CONFIG_SECTION, _GCLOUD_CONFIG_KEY) + except configparser.NoOptionError: + return None def _compute_engine_id(): diff --git a/packages/google-cloud-core/unit_tests/test__helpers.py b/packages/google-cloud-core/unit_tests/test__helpers.py index 4dd512ee9701..b1863b8ce5e0 100644 --- a/packages/google-cloud-core/unit_tests/test__helpers.py +++ b/packages/google-cloud-core/unit_tests/test__helpers.py @@ -254,6 +254,26 @@ def mock_get_path(): self.assertEqual(result, project_id) + def test_nix_missing_prject_key(self): + from google.cloud import _helpers as MUT + from google.cloud._testing import _Monkey + from google.cloud._testing import _NamedTemporaryFile + + with _NamedTemporaryFile() as temp: + config_value = '[%s]' % (MUT._GCLOUD_CONFIG_SECTION,) + with open(temp.name, 'w') as config_file: + config_file.write(config_value) + + def mock_get_path(): + return temp.name + + with _Monkey(os, name='not-nt'): + with _Monkey(MUT, _get_nix_config_path=mock_get_path, + _USER_ROOT='not-None'): + result = self._callFUT() + + self.assertEqual(result, None) + def test_windows(self): from google.cloud import _helpers as MUT from google.cloud._testing import _Monkey @@ -694,7 +714,6 @@ def test_w_utc_datetime(self): def test_w_non_utc_datetime(self): import datetime - from google.cloud._helpers import _UTC zone = self._make_timezone(offset=datetime.timedelta(hours=-1)) TIMESTAMP = datetime.datetime(2016, 4, 5, 13, 30, 0, tzinfo=zone) @@ -703,7 +722,6 @@ def test_w_non_utc_datetime(self): def test_w_non_utc_datetime_and_ignore_zone(self): import datetime - from google.cloud._helpers import _UTC zone = self._make_timezone(offset=datetime.timedelta(hours=-1)) TIMESTAMP = datetime.datetime(2016, 4, 5, 13, 30, 0, tzinfo=zone) From 9e2fbee0e405c3b15fa0d37ee44584c53447fa5a Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 14 Oct 2016 14:11:53 -0700 Subject: [PATCH 031/468] Moving backend specific behavior from Page to Iterator. This is to lower the burden on implementers. The previous approach (requiring a Page and Iterator subclass) ended up causing lots of copy-pasta docstrings that were just a distraction. Follow up to #2531. --- .../google/cloud/iterator.py | 102 +++++++++++------- .../unit_tests/test_iterator.py | 50 ++++----- 2 files changed, 90 insertions(+), 62 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index 3721f257151f..46107753c251 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -17,22 +17,19 @@ These iterators simplify the process of paging through API responses where the response is a list of results with a ``nextPageToken``. -To make an iterator work, just override the ``PAGE_CLASS`` class -attribute so that given a response (containing a page of results) can -be parsed into an iterable page of the actual objects you want:: +To make an iterator work, you may need to override the +``ITEMS_KEY`` class attribute so that given a response (containing a page of +results) can be parsed into an iterable page of the actual objects you want:: - class MyPage(Page): + class MyIterator(Iterator): + + ITEMS_KEY = 'blocks' def _item_to_value(self, item): my_item = MyItemClass(other_arg=True) my_item._set_properties(item) return my_item - - class MyIterator(Iterator): - - PAGE_CLASS = MyPage - You then can use this to get **all** the results from a resource:: >>> iterator = MyIterator(...) @@ -69,6 +66,30 @@ class MyIterator(Iterator): 2 >>> iterator.page.remaining 19 + +It's also possible to consume an entire page and handle the paging process +manually:: + + >>> iterator = MyIterator(...) + >>> items = list(iterator.page) + >>> items + [ + , + , + , + ] + >>> iterator.page.remaining + 0 + >>> iterator.page.num_items + 3 + >>> iterator.next_page_token + 'eav1OzQB0OM8rLdGXOEsyQWSG' + >>> # And just do the same thing to consume the next page. + >>> list(iterator.page) + [ + , + , + ] """ @@ -83,16 +104,19 @@ class Page(object): :type response: dict :param response: The JSON API response for a page. - """ - ITEMS_KEY = 'items' + :type items_key: str + :param items_key: The dictionary key used to retrieve items + from the response. + """ - def __init__(self, parent, response): + def __init__(self, parent, response, items_key): self._parent = parent - items = response.get(self.ITEMS_KEY, ()) + items = response.get(items_key, ()) self._num_items = len(items) self._remaining = self._num_items self._item_iter = iter(items) + self.response = response @property def num_items(self): @@ -116,23 +140,10 @@ def __iter__(self): """The :class:`Page` is an iterator.""" return self - def _item_to_value(self, item): - """Get the next item in the page. - - This method (along with the constructor) is the workhorse - of this class. Subclasses will need to implement this method. - - :type item: dict - :param item: An item to be converted to a native object. - - :raises NotImplementedError: Always - """ - raise NotImplementedError - def next(self): """Get the next value in the iterator.""" item = six.next(self._item_iter) - result = self._item_to_value(item) + result = self._parent._item_to_value(item) # Since we've successfully got the next value from the # iterator, we update the number of remaining. self._remaining -= 1 @@ -145,7 +156,8 @@ def next(self): class Iterator(object): """A generic class for iterating through Cloud JSON APIs list responses. - Sub-classes need to over-write ``PAGE_CLASS``. + Sub-classes need to over-write :attr:`ITEMS_KEY` and to define + :meth:`_item_to_value`. :type client: :class:`google.cloud.client.Client` :param client: The client, which owns a connection to make requests. @@ -166,8 +178,9 @@ class Iterator(object): PAGE_TOKEN = 'pageToken' MAX_RESULTS = 'maxResults' RESERVED_PARAMS = frozenset([PAGE_TOKEN, MAX_RESULTS]) - PAGE_CLASS = Page PATH = None + ITEMS_KEY = 'items' + """The dictionary key used to retrieve items from each response.""" def __init__(self, client, page_token=None, max_results=None, extra_params=None, path=None): @@ -200,6 +213,7 @@ def page(self): :rtype: :class:`Page` :returns: The page of items that has been retrieved. """ + self._update_page() return self._page def __iter__(self): @@ -207,24 +221,38 @@ def __iter__(self): return self def _update_page(self): - """Replace the current page. + """Update the current page if needed. - Does nothing if the current page is non-null and has items - remaining. + Subclasses will need to implement this method if they + use data from the ``response`` other than the items. + :rtype: bool + :returns: Flag indicated if the page was updated. :raises: :class:`~exceptions.StopIteration` if there is no next page. """ - if self.page is not None and self.page.remaining > 0: - return - if self.has_next_page(): + if self._page is not None and self._page.remaining > 0: + return False + elif self.has_next_page(): response = self._get_next_page_response() - self._page = self.PAGE_CLASS(self, response) + self._page = Page(self, response, self.ITEMS_KEY) + return True else: raise StopIteration + def _item_to_value(self, item): + """Get the next item in the page. + + Subclasses will need to implement this method. + + :type item: dict + :param item: An item to be converted to a native object. + + :raises NotImplementedError: Always + """ + raise NotImplementedError + def next(self): """Get the next value in the iterator.""" - self._update_page() item = six.next(self.page) self.num_results += 1 return item diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index 7f0a80b4e335..d788bcc486e7 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -25,41 +25,34 @@ def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) def test_constructor(self): - klass = self._getTargetClass() parent = object() - response = {klass.ITEMS_KEY: (1, 2, 3)} - page = self._makeOne(parent, response) + items_key = 'potatoes' + response = {items_key: (1, 2, 3)} + page = self._makeOne(parent, response, items_key) self.assertIs(page._parent, parent) self.assertEqual(page._num_items, 3) self.assertEqual(page._remaining, 3) def test_num_items_property(self): - page = self._makeOne(None, {}) + page = self._makeOne(None, {}, '') num_items = 42 page._num_items = num_items self.assertEqual(page.num_items, num_items) def test_remaining_property(self): - page = self._makeOne(None, {}) + page = self._makeOne(None, {}, '') remaining = 1337 page._remaining = remaining self.assertEqual(page.remaining, remaining) def test___iter__(self): - page = self._makeOne(None, {}) + page = self._makeOne(None, {}, '') self.assertIs(iter(page), page) - def test__item_to_value(self): - page = self._makeOne(None, {}) - with self.assertRaises(NotImplementedError): - page._item_to_value(None) - def test_iterator_calls__item_to_value(self): import six - klass = self._getTargetClass() - - class CountItPage(klass): + class Parent(object): calls = 0 values = None @@ -68,20 +61,22 @@ def _item_to_value(self, item): self.calls += 1 return item - response = {klass.ITEMS_KEY: [10, 11, 12]} - page = CountItPage(None, response) + items_key = 'turkeys' + response = {items_key: [10, 11, 12]} + parent = Parent() + page = self._makeOne(parent, response, items_key) page._remaining = 100 - self.assertEqual(page.calls, 0) + self.assertEqual(parent.calls, 0) self.assertEqual(page.remaining, 100) self.assertEqual(six.next(page), 10) - self.assertEqual(page.calls, 1) + self.assertEqual(parent.calls, 1) self.assertEqual(page.remaining, 99) self.assertEqual(six.next(page), 11) - self.assertEqual(page.calls, 2) + self.assertEqual(parent.calls, 2) self.assertEqual(page.remaining, 98) self.assertEqual(six.next(page), 12) - self.assertEqual(page.calls, 3) + self.assertEqual(parent.calls, 3) self.assertEqual(page.remaining, 97) @@ -132,7 +127,6 @@ def test___iter__(self): def test_iterate(self): import six - from google.cloud.iterator import Page path = '/foo' key1 = 'key1' @@ -140,7 +134,9 @@ def test_iterate(self): item1, item2 = object(), object() ITEMS = {key1: item1, key2: item2} - class _Page(Page): + klass = self._getTargetClass() + + class WithItemToValue(klass): def _item_to_value(self, item): return ITEMS[item['name']] @@ -148,8 +144,7 @@ def _item_to_value(self, item): connection = _Connection( {'items': [{'name': key1}, {'name': key2}]}) client = _Client(connection) - iterator = self._makeOne(client, path=path) - iterator.PAGE_CLASS = _Page + iterator = WithItemToValue(client, path=path) self.assertEqual(iterator.num_results, 0) val1 = six.next(iterator) @@ -274,6 +269,11 @@ def test__get_next_page_response_new_no_token_in_response(self): self.assertEqual(kw['path'], path) self.assertEqual(kw['query_params'], {}) + def test__item_to_value_virtual(self): + iterator = self._makeOne(None) + with self.assertRaises(NotImplementedError): + iterator._item_to_value({}) + def test_reset(self): connection = _Connection() client = _Client(connection) @@ -287,7 +287,7 @@ def test_reset(self): self.assertEqual(iterator.page_number, 0) self.assertEqual(iterator.num_results, 0) self.assertIsNone(iterator.next_page_token) - self.assertIsNone(iterator.page) + self.assertIsNone(iterator._page) class _Connection(object): From a81f21731af2aef617dd1fd84847d222c198c85c Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 14 Oct 2016 17:08:41 -0700 Subject: [PATCH 032/468] Adding public next_page() to Iterator. The previous implementation may catch users off guard since the iterator.page access may also update the value before access. In addition, this PR removed the _update_page() / next_page() subclass behavior in _BlobIterator. Over-riding that method was never intended. Instead makes a non-public class attribute _PAGE_CLASS that can be replaced with Page subclasses. This can be revisited if more implementations require custom behavior on Page creation / Page.__init__. --- .../google/cloud/iterator.py | 94 ++++++++++++++----- .../unit_tests/test_iterator.py | 57 +++++++++++ 2 files changed, 129 insertions(+), 22 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index 46107753c251..22566d8cc3b3 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -18,7 +18,7 @@ where the response is a list of results with a ``nextPageToken``. To make an iterator work, you may need to override the -``ITEMS_KEY`` class attribute so that given a response (containing a page of +``ITEMS_KEY`` class attribute so that a given response (containing a page of results) can be parsed into an iterable page of the actual objects you want:: class MyIterator(Iterator): @@ -71,6 +71,13 @@ def _item_to_value(self, item): manually:: >>> iterator = MyIterator(...) + >>> # No page of results before the iterator has started. + >>> iterator.page is None + True + >>> + >>> # Manually pull down the next page. + >>> iterator.next_page() # Returns "updated" status of page + True >>> items = list(iterator.page) >>> items [ @@ -84,18 +91,34 @@ def _item_to_value(self, item): 3 >>> iterator.next_page_token 'eav1OzQB0OM8rLdGXOEsyQWSG' - >>> # And just do the same thing to consume the next page. + >>> + >>> iterator.next_page() + True >>> list(iterator.page) [ , , ] + >>> + >>> # When there are no more results + >>> iterator.next_page() + True + >>> iterator.page is google.cloud.iterator.NO_MORE_PAGES + True """ import six +NO_MORE_PAGES = object() +"""Sentinel object indicating an iterator has no more pages.""" +_NO_MORE_PAGES_ERR = 'Iterator has no more pages.' +_PAGE_ERR_TEMPLATE = ( + 'Tried to get next_page() while current page (%r) still has %d ' + 'items remaining.') + + class Page(object): """Single page of results in an iterator. @@ -123,7 +146,7 @@ def num_items(self): """Total items in the page. :rtype: int - :returns: The number of items in this page of items. + :returns: The number of items in this page. """ return self._num_items @@ -132,7 +155,7 @@ def remaining(self): """Remaining items in the page. :rtype: int - :returns: The number of items remaining this page. + :returns: The number of items remaining in this page. """ return self._remaining @@ -141,7 +164,7 @@ def __iter__(self): return self def next(self): - """Get the next value in the iterator.""" + """Get the next value in the page.""" item = six.next(self._item_iter) result = self._parent._item_to_value(item) # Since we've successfully got the next value from the @@ -159,7 +182,7 @@ class Iterator(object): Sub-classes need to over-write :attr:`ITEMS_KEY` and to define :meth:`_item_to_value`. - :type client: :class:`google.cloud.client.Client` + :type client: :class:`~google.cloud.client.Client` :param client: The client, which owns a connection to make requests. :type page_token: str @@ -168,7 +191,7 @@ class Iterator(object): :type max_results: int :param max_results: (Optional) The maximum number of results to fetch. - :type extra_params: dict or None + :type extra_params: :class:`dict` or :data:`None` :param extra_params: Extra query string parameters for the API call. :type path: str @@ -181,6 +204,7 @@ class Iterator(object): PATH = None ITEMS_KEY = 'items' """The dictionary key used to retrieve items from each response.""" + _PAGE_CLASS = Page def __init__(self, client, page_token=None, max_results=None, extra_params=None, path=None): @@ -213,31 +237,54 @@ def page(self): :rtype: :class:`Page` :returns: The page of items that has been retrieved. """ - self._update_page() return self._page def __iter__(self): """The :class:`Iterator` is an iterator.""" return self - def _update_page(self): - """Update the current page if needed. + def next_page(self, require_empty=True): + """Move to the next page in the result set. + + If the current page is not empty and ``require_empty`` is :data:`True` + then an exception will be raised. If the current page is not empty + and ``require_empty`` is :data:`False`, then this will return + without updating the current page (and will return an ``updated`` + value of :data:`False`). + + If the current page **is** empty, but there are no more results, + sets the current page to :attr:`NO_MORE_PAGES`. + + If the current page is :attr:`NO_MORE_PAGES`, throws an exception. - Subclasses will need to implement this method if they - use data from the ``response`` other than the items. + :type require_empty: bool + :param require_empty: (Optional) Flag to indicate if the current page + must be empty before updating. :rtype: bool :returns: Flag indicated if the page was updated. - :raises: :class:`~exceptions.StopIteration` if there is no next page. + :raises ValueError: If ``require_empty`` is :data:`True` but the + current page is not empty. + :raises ValueError: If the current page is :attr:`NO_MORE_PAGES`. """ - if self._page is not None and self._page.remaining > 0: - return False - elif self.has_next_page(): - response = self._get_next_page_response() - self._page = Page(self, response, self.ITEMS_KEY) + if self._page is NO_MORE_PAGES: + raise ValueError(_NO_MORE_PAGES_ERR) + + # NOTE: This assumes Page.remaining can never go below 0. + page_empty = self._page is None or self._page.remaining == 0 + if page_empty: + if self.has_next_page(): + response = self._get_next_page_response() + self._page = self._PAGE_CLASS(self, response, self.ITEMS_KEY) + else: + self._page = NO_MORE_PAGES + return True else: - raise StopIteration + if require_empty: + msg = _PAGE_ERR_TEMPLATE % (self._page, self.page.remaining) + raise ValueError(msg) + return False def _item_to_value(self, item): """Get the next item in the page. @@ -252,7 +299,10 @@ def _item_to_value(self, item): raise NotImplementedError def next(self): - """Get the next value in the iterator.""" + """Get the next item from the request.""" + self.next_page(require_empty=False) + if self.page is NO_MORE_PAGES: + raise StopIteration item = six.next(self.page) self.num_results += 1 return item @@ -261,10 +311,10 @@ def next(self): __next__ = next def has_next_page(self): - """Determines whether or not this iterator has more pages. + """Determines whether or not there are more pages with results. :rtype: boolean - :returns: Whether the iterator has more pages or not. + :returns: Whether the iterator has more pages. """ if self.page_number == 0: return True diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index d788bcc486e7..0f979860147e 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -121,6 +121,63 @@ def test_constructor_w_extra_param_collision(self): with self.assertRaises(ValueError): self._makeOne(client, path=path, extra_params=extra_params) + def test_next_page_no_more(self): + from google.cloud.iterator import NO_MORE_PAGES + + iterator = self._makeOne(None) + iterator._page = NO_MORE_PAGES + with self.assertRaises(ValueError): + iterator.next_page() + + def test_next_page_not_empty_success(self): + from google.cloud.iterator import Page + + iterator = self._makeOne(None) + iterator._page = Page(None, {}, '') + iterator._page._remaining = 1 + updated = iterator.next_page(require_empty=False) + self.assertFalse(updated) + + def test_next_page_not_empty_fail(self): + from google.cloud.iterator import Page + + iterator = self._makeOne(None) + iterator._page = Page(None, {}, '') + iterator._page._remaining = 1 + with self.assertRaises(ValueError): + iterator.next_page(require_empty=True) + + def test_next_page_empty_then_no_more(self): + from google.cloud.iterator import NO_MORE_PAGES + + iterator = self._makeOne(None) + # Fake that there are no more pages. + iterator.page_number = 1 + iterator.next_page_token = None + updated = iterator.next_page() + self.assertTrue(updated) + self.assertIs(iterator.page, NO_MORE_PAGES) + + def test_next_page_empty_then_another(self): + iterator = self._makeOne(None) + # Fake the next page class. + fake_page = object() + page_args = [] + + def dummy_response(): + return {} + + def dummy_page_class(*args): + page_args.append(args) + return fake_page + + iterator._get_next_page_response = dummy_response + iterator._PAGE_CLASS = dummy_page_class + updated = iterator.next_page() + self.assertTrue(updated) + self.assertIs(iterator.page, fake_page) + self.assertEqual(page_args, [(iterator, {}, iterator.ITEMS_KEY)]) + def test___iter__(self): iterator = self._makeOne(None, None) self.assertIs(iter(iterator), iterator) From 509a509e53807c385dc268a83dc5a3fae6e85989 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Sun, 16 Oct 2016 11:12:05 -0700 Subject: [PATCH 033/468] Removing NO_MORE_PAGES sentinel and just making None mean no more pages. Also renaming next_page() to update_page() on Iterator and dropping any return value from that method. Also throwing an AttributeError if the page is unset on @property access. --- .../google/cloud/iterator.py | 69 +++++++++---------- .../unit_tests/test_iterator.py | 51 ++++++++------ 2 files changed, 64 insertions(+), 56 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index 22566d8cc3b3..3c23d1040e69 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -71,13 +71,8 @@ def _item_to_value(self, item): manually:: >>> iterator = MyIterator(...) - >>> # No page of results before the iterator has started. - >>> iterator.page is None - True - >>> - >>> # Manually pull down the next page. - >>> iterator.next_page() # Returns "updated" status of page - True + >>> # Manually pull down the first page. + >>> iterator.update_page() >>> items = list(iterator.page) >>> items [ @@ -92,8 +87,8 @@ def _item_to_value(self, item): >>> iterator.next_page_token 'eav1OzQB0OM8rLdGXOEsyQWSG' >>> - >>> iterator.next_page() - True + >>> # Ask for the next page to be grabbed. + >>> iterator.update_page() >>> list(iterator.page) [ , @@ -101,9 +96,8 @@ def _item_to_value(self, item): ] >>> >>> # When there are no more results - >>> iterator.next_page() - True - >>> iterator.page is google.cloud.iterator.NO_MORE_PAGES + >>> iterator.update_page() + >>> iterator.page is None True """ @@ -111,11 +105,13 @@ def _item_to_value(self, item): import six -NO_MORE_PAGES = object() -"""Sentinel object indicating an iterator has no more pages.""" +_UNSET = object() _NO_MORE_PAGES_ERR = 'Iterator has no more pages.' +_UNSTARTED_ERR = ( + 'Iterator has not been started. Either begin iterating, ' + 'call next(my_iter) or call my_iter.update_page().') _PAGE_ERR_TEMPLATE = ( - 'Tried to get next_page() while current page (%r) still has %d ' + 'Tried to update the page while current page (%r) still has %d ' 'items remaining.') @@ -191,11 +187,13 @@ class Iterator(object): :type max_results: int :param max_results: (Optional) The maximum number of results to fetch. - :type extra_params: :class:`dict` or :data:`None` - :param extra_params: Extra query string parameters for the API call. + :type extra_params: dict + :param extra_params: (Optional) Extra query string parameters for the + API call. :type path: str - :param path: The path to query for the list of items. + :param path: (Optional) The path to query for the list of items. Defaults + to :attr:`PATH` on the current iterator class. """ PAGE_TOKEN = 'pageToken' @@ -217,7 +215,7 @@ def __init__(self, client, page_token=None, max_results=None, self.page_number = 0 self.next_page_token = page_token self.num_results = 0 - self._page = None + self._page = _UNSET def _verify_params(self): """Verifies the parameters don't use any reserved parameter. @@ -234,57 +232,56 @@ def _verify_params(self): def page(self): """The current page of results that has been retrieved. + If there are no more results, will return :data:`None`. + :rtype: :class:`Page` :returns: The page of items that has been retrieved. + :raises AttributeError: If the page has not been set. """ + if self._page is _UNSET: + raise AttributeError(_UNSTARTED_ERR) return self._page def __iter__(self): """The :class:`Iterator` is an iterator.""" return self - def next_page(self, require_empty=True): + def update_page(self, require_empty=True): """Move to the next page in the result set. If the current page is not empty and ``require_empty`` is :data:`True` then an exception will be raised. If the current page is not empty and ``require_empty`` is :data:`False`, then this will return - without updating the current page (and will return an ``updated`` - value of :data:`False`). + without updating the current page. If the current page **is** empty, but there are no more results, - sets the current page to :attr:`NO_MORE_PAGES`. + sets the current page to :data:`None`. - If the current page is :attr:`NO_MORE_PAGES`, throws an exception. + If there are no more pages, throws an exception. :type require_empty: bool :param require_empty: (Optional) Flag to indicate if the current page must be empty before updating. - :rtype: bool - :returns: Flag indicated if the page was updated. :raises ValueError: If ``require_empty`` is :data:`True` but the current page is not empty. - :raises ValueError: If the current page is :attr:`NO_MORE_PAGES`. + :raises ValueError: If there are no more pages. """ - if self._page is NO_MORE_PAGES: + if self._page is None: raise ValueError(_NO_MORE_PAGES_ERR) # NOTE: This assumes Page.remaining can never go below 0. - page_empty = self._page is None or self._page.remaining == 0 + page_empty = self._page is _UNSET or self._page.remaining == 0 if page_empty: if self.has_next_page(): response = self._get_next_page_response() self._page = self._PAGE_CLASS(self, response, self.ITEMS_KEY) else: - self._page = NO_MORE_PAGES - - return True + self._page = None else: if require_empty: msg = _PAGE_ERR_TEMPLATE % (self._page, self.page.remaining) raise ValueError(msg) - return False def _item_to_value(self, item): """Get the next item in the page. @@ -300,8 +297,8 @@ def _item_to_value(self, item): def next(self): """Get the next item from the request.""" - self.next_page(require_empty=False) - if self.page is NO_MORE_PAGES: + self.update_page(require_empty=False) + if self.page is None: raise StopIteration item = six.next(self.page) self.num_results += 1 @@ -359,4 +356,4 @@ def reset(self): self.page_number = 0 self.next_page_token = None self.num_results = 0 - self._page = None + self._page = _UNSET diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index 0f979860147e..6b3afaaf6172 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -121,44 +121,54 @@ def test_constructor_w_extra_param_collision(self): with self.assertRaises(ValueError): self._makeOne(client, path=path, extra_params=extra_params) - def test_next_page_no_more(self): - from google.cloud.iterator import NO_MORE_PAGES + def test_page_property(self): + iterator = self._makeOne(None) + page = object() + iterator._page = page + self.assertIs(iterator.page, page) + + def test_page_property_unset(self): + from google.cloud.iterator import _UNSET + + iterator = self._makeOne(None) + self.assertIs(iterator._page, _UNSET) + with self.assertRaises(AttributeError): + getattr(iterator, 'page') + def test_update_page_no_more(self): iterator = self._makeOne(None) - iterator._page = NO_MORE_PAGES + iterator._page = None with self.assertRaises(ValueError): - iterator.next_page() + iterator.update_page() - def test_next_page_not_empty_success(self): + def test_update_page_not_empty_success(self): from google.cloud.iterator import Page iterator = self._makeOne(None) - iterator._page = Page(None, {}, '') + page = Page(None, {}, '') + iterator._page = page iterator._page._remaining = 1 - updated = iterator.next_page(require_empty=False) - self.assertFalse(updated) + iterator.update_page(require_empty=False) + self.assertIs(iterator._page, page) - def test_next_page_not_empty_fail(self): + def test_update_page_not_empty_fail(self): from google.cloud.iterator import Page iterator = self._makeOne(None) iterator._page = Page(None, {}, '') iterator._page._remaining = 1 with self.assertRaises(ValueError): - iterator.next_page(require_empty=True) - - def test_next_page_empty_then_no_more(self): - from google.cloud.iterator import NO_MORE_PAGES + iterator.update_page(require_empty=True) + def test_update_page_empty_then_no_more(self): iterator = self._makeOne(None) # Fake that there are no more pages. iterator.page_number = 1 iterator.next_page_token = None - updated = iterator.next_page() - self.assertTrue(updated) - self.assertIs(iterator.page, NO_MORE_PAGES) + iterator.update_page() + self.assertIsNone(iterator.page) - def test_next_page_empty_then_another(self): + def test_update_page_empty_then_another(self): iterator = self._makeOne(None) # Fake the next page class. fake_page = object() @@ -173,8 +183,7 @@ def dummy_page_class(*args): iterator._get_next_page_response = dummy_response iterator._PAGE_CLASS = dummy_page_class - updated = iterator.next_page() - self.assertTrue(updated) + iterator.update_page() self.assertIs(iterator.page, fake_page) self.assertEqual(page_args, [(iterator, {}, iterator.ITEMS_KEY)]) @@ -332,6 +341,8 @@ def test__item_to_value_virtual(self): iterator._item_to_value({}) def test_reset(self): + from google.cloud.iterator import _UNSET + connection = _Connection() client = _Client(connection) path = '/foo' @@ -344,7 +355,7 @@ def test_reset(self): self.assertEqual(iterator.page_number, 0) self.assertEqual(iterator.num_results, 0) self.assertIsNone(iterator.next_page_token) - self.assertIsNone(iterator._page) + self.assertIs(iterator._page, _UNSET) class _Connection(object): From d22d58c463b0ded35d78ab2ee6f6d6e173c513e3 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Sun, 16 Oct 2016 11:14:08 -0700 Subject: [PATCH 034/468] Making Iterator.has_next_page() non-public. This is because Iterator.page combined with Iterator.update_page() can provide the same thing and has_next_page() is really an implementation detail. Done via: $ git grep -l has_next_page | > xargs sed -i s/has_next_page/_has_next_page/g --- .../google/cloud/iterator.py | 4 ++-- .../unit_tests/test_iterator.py | 20 +++++++++---------- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index 3c23d1040e69..80aebff37f37 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -273,7 +273,7 @@ def update_page(self, require_empty=True): # NOTE: This assumes Page.remaining can never go below 0. page_empty = self._page is _UNSET or self._page.remaining == 0 if page_empty: - if self.has_next_page(): + if self._has_next_page(): response = self._get_next_page_response() self._page = self._PAGE_CLASS(self, response, self.ITEMS_KEY) else: @@ -307,7 +307,7 @@ def next(self): # Alias needed for Python 2/3 support. __next__ = next - def has_next_page(self): + def _has_next_page(self): """Determines whether or not there are more pages with results. :rtype: boolean diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index 6b3afaaf6172..b5c224fcec7a 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -229,22 +229,22 @@ def _item_to_value(self, item): self.assertEqual(kw['path'], path) self.assertEqual(kw['query_params'], {}) - def test_has_next_page_new(self): + def test__has_next_page_new(self): connection = _Connection() client = _Client(connection) path = '/foo' iterator = self._makeOne(client, path=path) - self.assertTrue(iterator.has_next_page()) + self.assertTrue(iterator._has_next_page()) - def test_has_next_page_w_number_no_token(self): + def test__has_next_page_w_number_no_token(self): connection = _Connection() client = _Client(connection) path = '/foo' iterator = self._makeOne(client, path=path) iterator.page_number = 1 - self.assertFalse(iterator.has_next_page()) + self.assertFalse(iterator._has_next_page()) - def test_has_next_page_w_number_w_token(self): + def test__has_next_page_w_number_w_token(self): connection = _Connection() client = _Client(connection) path = '/foo' @@ -252,20 +252,20 @@ def test_has_next_page_w_number_w_token(self): iterator = self._makeOne(client, path=path) iterator.page_number = 1 iterator.next_page_token = token - self.assertTrue(iterator.has_next_page()) + self.assertTrue(iterator._has_next_page()) - def test_has_next_page_w_max_results_not_done(self): + def test__has_next_page_w_max_results_not_done(self): iterator = self._makeOne(None, path=None, max_results=3, page_token='definitely-not-none') iterator.page_number = 1 self.assertLess(iterator.num_results, iterator.max_results) - self.assertTrue(iterator.has_next_page()) + self.assertTrue(iterator._has_next_page()) - def test_has_next_page_w_max_results_done(self): + def test__has_next_page_w_max_results_done(self): iterator = self._makeOne(None, None, max_results=3) iterator.page_number = 1 iterator.num_results = iterator.max_results - self.assertFalse(iterator.has_next_page()) + self.assertFalse(iterator._has_next_page()) def test__get_query_params_no_token(self): connection = _Connection() From ac064ad44f842f5bc79c272b3107f510fbd7b237 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 17 Oct 2016 17:50:03 -0700 Subject: [PATCH 035/468] Removing Iterator and Page subclasses. Instead require `Iterator` takes: - a well-formed path for the request - a callable to convert a JSON item to native obj. - (optional) the key in a response holding all items - (optional) a `page_start` (acts as proxy for `Page.__init__`) --- .../google/cloud/iterator.py | 168 ++++++++++++------ .../unit_tests/test_iterator.py | 97 +++++----- 2 files changed, 161 insertions(+), 104 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index 80aebff37f37..d746bc5847d8 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -17,29 +17,27 @@ These iterators simplify the process of paging through API responses where the response is a list of results with a ``nextPageToken``. -To make an iterator work, you may need to override the -``ITEMS_KEY`` class attribute so that a given response (containing a page of -results) can be parsed into an iterable page of the actual objects you want:: - - class MyIterator(Iterator): - - ITEMS_KEY = 'blocks' - - def _item_to_value(self, item): - my_item = MyItemClass(other_arg=True) - my_item._set_properties(item) - return my_item - -You then can use this to get **all** the results from a resource:: - - >>> iterator = MyIterator(...) +To make an iterator work, you'll need to provide a way to convert a JSON +item returned from the API into the object of your choice (via +``item_to_value``). You also may need to specify a custom ``items_key`` so +that a given response (containing a page of results) can be parsed into an +iterable page of the actual objects you want. You then can use this to get +**all** the results from a resource:: + + >>> def item_to_value(iterator, item): + ... my_item = MyItemClass(iterator.client, other_arg=True) + ... my_item._set_properties(item) + ... return my_item + ... + >>> iterator = Iterator(..., items_key='blocks', + ... item_to_value=item_to_value) >>> list(iterator) # Convert to a list (consumes all values). Or you can walk your way through items and call off the search early if you find what you're looking for (resulting in possibly fewer requests):: - >>> for my_item in MyIterator(...): + >>> for my_item in Iterator(...): ... print(my_item.name) ... if not my_item.is_valid: ... break @@ -47,7 +45,7 @@ def _item_to_value(self, item): When iterating, not every new item will send a request to the server. To monitor these requests, track the current page of the iterator:: - >>> iterator = MyIterator(...) + >>> iterator = Iterator(...) >>> iterator.page_number 0 >>> next(iterator) @@ -58,6 +56,8 @@ def _item_to_value(self, item): 1 >>> next(iterator) + >>> iterator.page_number + 1 >>> iterator.page.remaining 0 >>> next(iterator) @@ -70,7 +70,7 @@ def _item_to_value(self, item): It's also possible to consume an entire page and handle the paging process manually:: - >>> iterator = MyIterator(...) + >>> iterator = Iterator(...) >>> # Manually pull down the first page. >>> iterator.update_page() >>> items = list(iterator.page) @@ -96,6 +96,8 @@ def _item_to_value(self, item): ] >>> >>> # When there are no more results + >>> iterator.next_page_token is None + True >>> iterator.update_page() >>> iterator.page is None True @@ -113,6 +115,43 @@ def _item_to_value(self, item): _PAGE_ERR_TEMPLATE = ( 'Tried to update the page while current page (%r) still has %d ' 'items remaining.') +DEFAULT_ITEMS_KEY = 'items' +"""The dictionary key used to retrieve items from each response.""" + + +# pylint: disable=unused-argument +def _not_implemented_item_to_value(iterator, item): + """Helper to convert an item into the native object. + + This is a virtual stand-in as the default value, effectively + causing callers to pass in their own callable. + + :type iterator: :class:`Iterator` + :param iterator: An iterator that holds some request info. + + :type item: dict + :param item: A JSON object to be converted into a native object. + + :raises NotImplementedError: Always. + """ + raise NotImplementedError + + +def _do_nothing_page_start(iterator, page, response): + """Helper to provide custom behavior after a :class:`Page` is started. + + This is a do-nothing stand-in as the default value. + + :type iterator: :class:`Iterator` + :param iterator: An iterator that holds some request info. + + :type page: :class:`Page` + :param page: The page that was just created. + + :type response: dict + :param response: The JSON API response for a page. + """ +# pylint: enable=unused-argument class Page(object): @@ -127,15 +166,21 @@ class Page(object): :type items_key: str :param items_key: The dictionary key used to retrieve items from the response. + + :type item_to_value: callable + :param item_to_value: Callable to convert an item from JSON + into the native object. Assumed signature + takes an :class:`Iterator` and a dictionary + holding a single item. """ - def __init__(self, parent, response, items_key): + def __init__(self, parent, response, items_key, item_to_value): self._parent = parent items = response.get(items_key, ()) self._num_items = len(items) self._remaining = self._num_items self._item_iter = iter(items) - self.response = response + self._item_to_value = item_to_value @property def num_items(self): @@ -162,7 +207,7 @@ def __iter__(self): def next(self): """Get the next value in the page.""" item = six.next(self._item_iter) - result = self._parent._item_to_value(item) + result = self._item_to_value(self._parent, item) # Since we've successfully got the next value from the # iterator, we update the number of remaining. self._remaining -= 1 @@ -175,12 +220,23 @@ def next(self): class Iterator(object): """A generic class for iterating through Cloud JSON APIs list responses. - Sub-classes need to over-write :attr:`ITEMS_KEY` and to define - :meth:`_item_to_value`. - :type client: :class:`~google.cloud.client.Client` :param client: The client, which owns a connection to make requests. + :type path: str + :param path: The path to query for the list of items. Defaults + to :attr:`PATH` on the current iterator class. + + :type items_key: str + :param items_key: The key used to grab retrieved items from an API + response. Defaults to :data:`DEFAULT_ITEMS_KEY`. + + :type item_to_value: callable + :param item_to_value: (Optional) Callable to convert an item from JSON + into the native object. Assumed signature + takes an :class:`Iterator` and a dictionary + holding a single item. + :type page_token: str :param page_token: (Optional) A token identifying a page in a result set. @@ -191,26 +247,32 @@ class Iterator(object): :param extra_params: (Optional) Extra query string parameters for the API call. - :type path: str - :param path: (Optional) The path to query for the list of items. Defaults - to :attr:`PATH` on the current iterator class. + :type page_start: callable + :param page_start: (Optional) Callable to provide any special behavior + after a new page has been created. Assumed signature + takes the :class:`Iterator` that started the page, + the :class:`Page` that was started and the dictionary + containing the page response. """ - PAGE_TOKEN = 'pageToken' - MAX_RESULTS = 'maxResults' - RESERVED_PARAMS = frozenset([PAGE_TOKEN, MAX_RESULTS]) - PATH = None - ITEMS_KEY = 'items' - """The dictionary key used to retrieve items from each response.""" - _PAGE_CLASS = Page - - def __init__(self, client, page_token=None, max_results=None, - extra_params=None, path=None): - self.extra_params = extra_params or {} - self._verify_params() - self.max_results = max_results + _PAGE_TOKEN = 'pageToken' + _MAX_RESULTS = 'maxResults' + _RESERVED_PARAMS = frozenset([_PAGE_TOKEN, _MAX_RESULTS]) + + def __init__(self, client, path, items_key=DEFAULT_ITEMS_KEY, + item_to_value=_not_implemented_item_to_value, + page_token=None, max_results=None, extra_params=None, + page_start=_do_nothing_page_start): self.client = client - self.path = path or self.PATH + self.path = path + self._items_key = items_key + self._item_to_value = item_to_value + self.max_results = max_results + self.extra_params = extra_params + self._page_start = page_start + if self.extra_params is None: + self.extra_params = {} + self._verify_params() # The attributes below will change over the life of the iterator. self.page_number = 0 self.next_page_token = page_token @@ -222,7 +284,7 @@ def _verify_params(self): :raises ValueError: If a reserved parameter is used. """ - reserved_in_use = self.RESERVED_PARAMS.intersection( + reserved_in_use = self._RESERVED_PARAMS.intersection( self.extra_params) if reserved_in_use: raise ValueError('Using a reserved parameter', @@ -275,7 +337,9 @@ def update_page(self, require_empty=True): if page_empty: if self._has_next_page(): response = self._get_next_page_response() - self._page = self._PAGE_CLASS(self, response, self.ITEMS_KEY) + self._page = Page(self, response, self._items_key, + self._item_to_value) + self._page_start(self, self._page, response) else: self._page = None else: @@ -283,18 +347,6 @@ def update_page(self, require_empty=True): msg = _PAGE_ERR_TEMPLATE % (self._page, self.page.remaining) raise ValueError(msg) - def _item_to_value(self, item): - """Get the next item in the page. - - Subclasses will need to implement this method. - - :type item: dict - :param item: An item to be converted to a native object. - - :raises NotImplementedError: Always - """ - raise NotImplementedError - def next(self): """Get the next item from the request.""" self.update_page(require_empty=False) @@ -330,9 +382,9 @@ def _get_query_params(self): """ result = {} if self.next_page_token is not None: - result[self.PAGE_TOKEN] = self.next_page_token + result[self._PAGE_TOKEN] = self.next_page_token if self.max_results is not None: - result[self.MAX_RESULTS] = self.max_results - self.num_results + result[self._MAX_RESULTS] = self.max_results - self.num_results result.update(self.extra_params) return result diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index b5c224fcec7a..08fb609822cb 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -15,6 +15,28 @@ import unittest +class Test__not_implemented_item_to_value(unittest.TestCase): + + def _callFUT(self, iterator, item): + from google.cloud.iterator import _not_implemented_item_to_value + return _not_implemented_item_to_value(iterator, item) + + def test_virtual(self): + with self.assertRaises(NotImplementedError): + self._callFUT(None, None) + + +class Test__do_nothing_page_start(unittest.TestCase): + + def _callFUT(self, iterator, page, response): + from google.cloud.iterator import _do_nothing_page_start + return _do_nothing_page_start(iterator, page, response) + + def test_do_nothing(self): + result = self._callFUT(None, None, None) + self.assertIsNone(result) + + class TestPage(unittest.TestCase): def _getTargetClass(self): @@ -28,25 +50,25 @@ def test_constructor(self): parent = object() items_key = 'potatoes' response = {items_key: (1, 2, 3)} - page = self._makeOne(parent, response, items_key) + page = self._makeOne(parent, response, items_key, None) self.assertIs(page._parent, parent) self.assertEqual(page._num_items, 3) self.assertEqual(page._remaining, 3) def test_num_items_property(self): - page = self._makeOne(None, {}, '') + page = self._makeOne(None, {}, '', None) num_items = 42 page._num_items = num_items self.assertEqual(page.num_items, num_items) def test_remaining_property(self): - page = self._makeOne(None, {}, '') + page = self._makeOne(None, {}, '', None) remaining = 1337 page._remaining = remaining self.assertEqual(page.remaining, remaining) def test___iter__(self): - page = self._makeOne(None, {}, '') + page = self._makeOne(None, {}, '', None) self.assertIs(iter(page), page) def test_iterator_calls__item_to_value(self): @@ -55,16 +77,16 @@ def test_iterator_calls__item_to_value(self): class Parent(object): calls = 0 - values = None - def _item_to_value(self, item): + def item_to_value(self, item): self.calls += 1 return item items_key = 'turkeys' response = {items_key: [10, 11, 12]} parent = Parent() - page = self._makeOne(parent, response, items_key) + page = self._makeOne(parent, response, items_key, + Parent.item_to_value) page._remaining = 100 self.assertEqual(parent.calls, 0) @@ -93,26 +115,12 @@ def test_constructor(self): connection = _Connection() client = _Client(connection) path = '/foo' - iterator = self._makeOne(client, path=path) + iterator = self._makeOne(client, path) self.assertIs(iterator.client, client) self.assertEqual(iterator.path, path) self.assertEqual(iterator.page_number, 0) self.assertIsNone(iterator.next_page_token) - def test_constructor_default_path(self): - klass = self._getTargetClass() - - class WithPath(klass): - PATH = '/path' - - connection = _Connection() - client = _Client(connection) - iterator = WithPath(client) - self.assertIs(iterator.client, client) - self.assertEqual(iterator.path, WithPath.PATH) - self.assertEqual(iterator.page_number, 0) - self.assertIsNone(iterator.next_page_token) - def test_constructor_w_extra_param_collision(self): connection = _Connection() client = _Client(connection) @@ -122,7 +130,7 @@ def test_constructor_w_extra_param_collision(self): self._makeOne(client, path=path, extra_params=extra_params) def test_page_property(self): - iterator = self._makeOne(None) + iterator = self._makeOne(None, None) page = object() iterator._page = page self.assertIs(iterator.page, page) @@ -130,13 +138,13 @@ def test_page_property(self): def test_page_property_unset(self): from google.cloud.iterator import _UNSET - iterator = self._makeOne(None) + iterator = self._makeOne(None, None) self.assertIs(iterator._page, _UNSET) with self.assertRaises(AttributeError): getattr(iterator, 'page') def test_update_page_no_more(self): - iterator = self._makeOne(None) + iterator = self._makeOne(None, None) iterator._page = None with self.assertRaises(ValueError): iterator.update_page() @@ -144,8 +152,8 @@ def test_update_page_no_more(self): def test_update_page_not_empty_success(self): from google.cloud.iterator import Page - iterator = self._makeOne(None) - page = Page(None, {}, '') + iterator = self._makeOne(None, None) + page = Page(None, {}, '', None) iterator._page = page iterator._page._remaining = 1 iterator.update_page(require_empty=False) @@ -154,14 +162,14 @@ def test_update_page_not_empty_success(self): def test_update_page_not_empty_fail(self): from google.cloud.iterator import Page - iterator = self._makeOne(None) - iterator._page = Page(None, {}, '') + iterator = self._makeOne(None, None) + iterator._page = Page(None, {}, '', None) iterator._page._remaining = 1 with self.assertRaises(ValueError): iterator.update_page(require_empty=True) def test_update_page_empty_then_no_more(self): - iterator = self._makeOne(None) + iterator = self._makeOne(None, None) # Fake that there are no more pages. iterator.page_number = 1 iterator.next_page_token = None @@ -169,7 +177,11 @@ def test_update_page_empty_then_no_more(self): self.assertIsNone(iterator.page) def test_update_page_empty_then_another(self): - iterator = self._makeOne(None) + from google.cloud._testing import _Monkey + from google.cloud import iterator as MUT + + items_key = 'its-key' + iterator = self._makeOne(None, None, items_key=items_key) # Fake the next page class. fake_page = object() page_args = [] @@ -182,10 +194,11 @@ def dummy_page_class(*args): return fake_page iterator._get_next_page_response = dummy_response - iterator._PAGE_CLASS = dummy_page_class - iterator.update_page() + with _Monkey(MUT, Page=dummy_page_class): + iterator.update_page() self.assertIs(iterator.page, fake_page) - self.assertEqual(page_args, [(iterator, {}, iterator.ITEMS_KEY)]) + self.assertEqual( + page_args, [(iterator, {}, items_key, iterator._item_to_value)]) def test___iter__(self): iterator = self._makeOne(None, None) @@ -200,17 +213,14 @@ def test_iterate(self): item1, item2 = object(), object() ITEMS = {key1: item1, key2: item2} - klass = self._getTargetClass() - - class WithItemToValue(klass): - - def _item_to_value(self, item): - return ITEMS[item['name']] + def item_to_value(iterator, item): # pylint: disable=unused-argument + return ITEMS[item['name']] connection = _Connection( {'items': [{'name': key1}, {'name': key2}]}) client = _Client(connection) - iterator = WithItemToValue(client, path=path) + iterator = self._makeOne(client, path=path, + item_to_value=item_to_value) self.assertEqual(iterator.num_results, 0) val1 = six.next(iterator) @@ -335,11 +345,6 @@ def test__get_next_page_response_new_no_token_in_response(self): self.assertEqual(kw['path'], path) self.assertEqual(kw['query_params'], {}) - def test__item_to_value_virtual(self): - iterator = self._makeOne(None) - with self.assertRaises(NotImplementedError): - iterator._item_to_value({}) - def test_reset(self): from google.cloud.iterator import _UNSET From 5c468ae68a088578bfce536ddd8d6e2e5db6816b Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 18 Oct 2016 12:24:58 -0700 Subject: [PATCH 036/468] Making item_to_value a required argument. Also adding "(Optional)" to items_key docstring. --- .../google/cloud/iterator.py | 31 +++------- .../unit_tests/test_iterator.py | 57 ++++++++----------- 2 files changed, 31 insertions(+), 57 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index d746bc5847d8..2deb9ebbe703 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -120,23 +120,6 @@ # pylint: disable=unused-argument -def _not_implemented_item_to_value(iterator, item): - """Helper to convert an item into the native object. - - This is a virtual stand-in as the default value, effectively - causing callers to pass in their own callable. - - :type iterator: :class:`Iterator` - :param iterator: An iterator that holds some request info. - - :type item: dict - :param item: A JSON object to be converted into a native object. - - :raises NotImplementedError: Always. - """ - raise NotImplementedError - - def _do_nothing_page_start(iterator, page, response): """Helper to provide custom behavior after a :class:`Page` is started. @@ -227,16 +210,16 @@ class Iterator(object): :param path: The path to query for the list of items. Defaults to :attr:`PATH` on the current iterator class. - :type items_key: str - :param items_key: The key used to grab retrieved items from an API - response. Defaults to :data:`DEFAULT_ITEMS_KEY`. - :type item_to_value: callable - :param item_to_value: (Optional) Callable to convert an item from JSON + :param item_to_value: Callable to convert an item from JSON into the native object. Assumed signature takes an :class:`Iterator` and a dictionary holding a single item. + :type items_key: str + :param items_key: (Optional) The key used to grab retrieved items from an + API response. Defaults to :data:`DEFAULT_ITEMS_KEY`. + :type page_token: str :param page_token: (Optional) A token identifying a page in a result set. @@ -259,8 +242,8 @@ class Iterator(object): _MAX_RESULTS = 'maxResults' _RESERVED_PARAMS = frozenset([_PAGE_TOKEN, _MAX_RESULTS]) - def __init__(self, client, path, items_key=DEFAULT_ITEMS_KEY, - item_to_value=_not_implemented_item_to_value, + def __init__(self, client, path, item_to_value, + items_key=DEFAULT_ITEMS_KEY, page_token=None, max_results=None, extra_params=None, page_start=_do_nothing_page_start): self.client = client diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index 08fb609822cb..81a3738ffc1f 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -15,17 +15,6 @@ import unittest -class Test__not_implemented_item_to_value(unittest.TestCase): - - def _callFUT(self, iterator, item): - from google.cloud.iterator import _not_implemented_item_to_value - return _not_implemented_item_to_value(iterator, item) - - def test_virtual(self): - with self.assertRaises(NotImplementedError): - self._callFUT(None, None) - - class Test__do_nothing_page_start(unittest.TestCase): def _callFUT(self, iterator, page, response): @@ -115,7 +104,7 @@ def test_constructor(self): connection = _Connection() client = _Client(connection) path = '/foo' - iterator = self._makeOne(client, path) + iterator = self._makeOne(client, path, None) self.assertIs(iterator.client, client) self.assertEqual(iterator.path, path) self.assertEqual(iterator.page_number, 0) @@ -127,10 +116,10 @@ def test_constructor_w_extra_param_collision(self): path = '/foo' extra_params = {'pageToken': 'val'} with self.assertRaises(ValueError): - self._makeOne(client, path=path, extra_params=extra_params) + self._makeOne(client, path, None, extra_params=extra_params) def test_page_property(self): - iterator = self._makeOne(None, None) + iterator = self._makeOne(None, None, None) page = object() iterator._page = page self.assertIs(iterator.page, page) @@ -138,13 +127,13 @@ def test_page_property(self): def test_page_property_unset(self): from google.cloud.iterator import _UNSET - iterator = self._makeOne(None, None) + iterator = self._makeOne(None, None, None) self.assertIs(iterator._page, _UNSET) with self.assertRaises(AttributeError): getattr(iterator, 'page') def test_update_page_no_more(self): - iterator = self._makeOne(None, None) + iterator = self._makeOne(None, None, None) iterator._page = None with self.assertRaises(ValueError): iterator.update_page() @@ -152,7 +141,7 @@ def test_update_page_no_more(self): def test_update_page_not_empty_success(self): from google.cloud.iterator import Page - iterator = self._makeOne(None, None) + iterator = self._makeOne(None, None, None) page = Page(None, {}, '', None) iterator._page = page iterator._page._remaining = 1 @@ -162,14 +151,14 @@ def test_update_page_not_empty_success(self): def test_update_page_not_empty_fail(self): from google.cloud.iterator import Page - iterator = self._makeOne(None, None) + iterator = self._makeOne(None, None, None) iterator._page = Page(None, {}, '', None) iterator._page._remaining = 1 with self.assertRaises(ValueError): iterator.update_page(require_empty=True) def test_update_page_empty_then_no_more(self): - iterator = self._makeOne(None, None) + iterator = self._makeOne(None, None, None) # Fake that there are no more pages. iterator.page_number = 1 iterator.next_page_token = None @@ -181,7 +170,7 @@ def test_update_page_empty_then_another(self): from google.cloud import iterator as MUT items_key = 'its-key' - iterator = self._makeOne(None, None, items_key=items_key) + iterator = self._makeOne(None, None, None, items_key=items_key) # Fake the next page class. fake_page = object() page_args = [] @@ -201,7 +190,7 @@ def dummy_page_class(*args): page_args, [(iterator, {}, items_key, iterator._item_to_value)]) def test___iter__(self): - iterator = self._makeOne(None, None) + iterator = self._makeOne(None, None, None) self.assertIs(iter(iterator), iterator) def test_iterate(self): @@ -243,14 +232,14 @@ def test__has_next_page_new(self): connection = _Connection() client = _Client(connection) path = '/foo' - iterator = self._makeOne(client, path=path) + iterator = self._makeOne(client, path, None) self.assertTrue(iterator._has_next_page()) def test__has_next_page_w_number_no_token(self): connection = _Connection() client = _Client(connection) path = '/foo' - iterator = self._makeOne(client, path=path) + iterator = self._makeOne(client, path, None) iterator.page_number = 1 self.assertFalse(iterator._has_next_page()) @@ -259,20 +248,20 @@ def test__has_next_page_w_number_w_token(self): client = _Client(connection) path = '/foo' token = 'token' - iterator = self._makeOne(client, path=path) + iterator = self._makeOne(client, path, None) iterator.page_number = 1 iterator.next_page_token = token self.assertTrue(iterator._has_next_page()) def test__has_next_page_w_max_results_not_done(self): - iterator = self._makeOne(None, path=None, max_results=3, + iterator = self._makeOne(None, None, None, max_results=3, page_token='definitely-not-none') iterator.page_number = 1 self.assertLess(iterator.num_results, iterator.max_results) self.assertTrue(iterator._has_next_page()) def test__has_next_page_w_max_results_done(self): - iterator = self._makeOne(None, None, max_results=3) + iterator = self._makeOne(None, None, None, max_results=3) iterator.page_number = 1 iterator.num_results = iterator.max_results self.assertFalse(iterator._has_next_page()) @@ -281,7 +270,7 @@ def test__get_query_params_no_token(self): connection = _Connection() client = _Client(connection) path = '/foo' - iterator = self._makeOne(client, path=path) + iterator = self._makeOne(client, path, None) self.assertEqual(iterator._get_query_params(), {}) def test__get_query_params_w_token(self): @@ -289,7 +278,7 @@ def test__get_query_params_w_token(self): client = _Client(connection) path = '/foo' token = 'token' - iterator = self._makeOne(client, path=path) + iterator = self._makeOne(client, path, None) iterator.next_page_token = token self.assertEqual(iterator._get_query_params(), {'pageToken': token}) @@ -299,7 +288,7 @@ def test__get_query_params_w_max_results(self): client = _Client(connection) path = '/foo' max_results = 3 - iterator = self._makeOne(client, path=path, + iterator = self._makeOne(client, path, None, max_results=max_results) iterator.num_results = 1 local_max = max_results - iterator.num_results @@ -311,7 +300,8 @@ def test__get_query_params_extra_params(self): client = _Client(connection) path = '/foo' extra_params = {'key': 'val'} - iterator = self._makeOne(client, path=path, extra_params=extra_params) + iterator = self._makeOne(client, path, None, + extra_params=extra_params) self.assertEqual(iterator._get_query_params(), extra_params) def test__get_query_params_w_token_and_extra_params(self): @@ -320,7 +310,8 @@ def test__get_query_params_w_token_and_extra_params(self): path = '/foo' token = 'token' extra_params = {'key': 'val'} - iterator = self._makeOne(client, path=path, extra_params=extra_params) + iterator = self._makeOne(client, path, None, + extra_params=extra_params) iterator.next_page_token = token expected_query = extra_params.copy() @@ -335,7 +326,7 @@ def test__get_next_page_response_new_no_token_in_response(self): connection = _Connection({'items': [{'name': key1}, {'name': key2}], 'nextPageToken': token}) client = _Client(connection) - iterator = self._makeOne(client, path=path) + iterator = self._makeOne(client, path, None) response = iterator._get_next_page_response() self.assertEqual(response['items'], [{'name': key1}, {'name': key2}]) self.assertEqual(iterator.page_number, 1) @@ -352,7 +343,7 @@ def test_reset(self): client = _Client(connection) path = '/foo' token = 'token' - iterator = self._makeOne(client, path=path) + iterator = self._makeOne(client, path, None) iterator.page_number = 1 iterator.next_page_token = token iterator._page = object() From ee6f460d112d152c1e51b244395618e9b39cb846 Mon Sep 17 00:00:00 2001 From: Thomas Schultz Date: Thu, 20 Oct 2016 11:42:19 -0400 Subject: [PATCH 037/468] Add Gateway Timeout exception. --- packages/google-cloud-core/google/cloud/exceptions.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/packages/google-cloud-core/google/cloud/exceptions.py b/packages/google-cloud-core/google/cloud/exceptions.py index 6a4b8706917a..5fa75abc54a4 100644 --- a/packages/google-cloud-core/google/cloud/exceptions.py +++ b/packages/google-cloud-core/google/cloud/exceptions.py @@ -177,6 +177,11 @@ class ServiceUnavailable(ServerError): code = 503 +class GatewayTimeout(ServerError): + """Excepption mapping a `504 Gateway Timeout'` response.""" + code = 504 + + def make_exception(response, content, error_info=None, use_json=True): """Factory: create exception based on HTTP response code. From 83134725560859df301ee7c5575c88bf68d3f7e9 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 20 Oct 2016 15:34:03 -0700 Subject: [PATCH 038/468] Replace string with str in rtypes. Used the command: ag -l 'rtype: string' | xargs sed -i .bak 's/rtype: string/rtype: str/g' Based on this comment: https://github.com/GoogleCloudPlatform/google-cloud-python/pull/2485#discussion_r83267163 `str` is a type, `string` is a module. --- packages/google-cloud-core/google/cloud/_helpers.py | 2 +- packages/google-cloud-core/google/cloud/connection.py | 2 +- packages/google-cloud-core/google/cloud/credentials.py | 2 +- .../google/cloud/streaming/transfer.py | 10 +++++----- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/_helpers.py b/packages/google-cloud-core/google/cloud/_helpers.py index 0b515cc5ce9d..3d0a58916737 100644 --- a/packages/google-cloud-core/google/cloud/_helpers.py +++ b/packages/google-cloud-core/google/cloud/_helpers.py @@ -526,7 +526,7 @@ def _pb_timestamp_to_rfc3339(timestamp_pb): :type timestamp_pb: :class:`google.protobuf.timestamp_pb2.Timestamp` :param timestamp_pb: A Google returned timestamp protobuf. - :rtype: string + :rtype: str :returns: An RFC 3339 formatted timestamp string. """ timestamp = _pb_timestamp_to_datetime(timestamp_pb) diff --git a/packages/google-cloud-core/google/cloud/connection.py b/packages/google-cloud-core/google/cloud/connection.py index eca7be400bfe..be0f64059b36 100644 --- a/packages/google-cloud-core/google/cloud/connection.py +++ b/packages/google-cloud-core/google/cloud/connection.py @@ -179,7 +179,7 @@ def build_api_url(cls, path, query_params=None, Typically you shouldn't provide this and instead use the default for the library. - :rtype: string + :rtype: str :returns: The URL assembled from the pieces provided. """ url = cls.API_URL_TEMPLATE.format( diff --git a/packages/google-cloud-core/google/cloud/credentials.py b/packages/google-cloud-core/google/cloud/credentials.py index 61f918856b9c..6f57a4c2481a 100644 --- a/packages/google-cloud-core/google/cloud/credentials.py +++ b/packages/google-cloud-core/google/cloud/credentials.py @@ -220,7 +220,7 @@ def generate_signed_url(credentials, resource, expiration, :param generation: (Optional) A value that indicates which generation of the resource to fetch. - :rtype: string + :rtype: str :returns: A signed URL you can use to access the resource until expiration. """ diff --git a/packages/google-cloud-core/google/cloud/streaming/transfer.py b/packages/google-cloud-core/google/cloud/streaming/transfer.py index 196a388f3237..e27fcd1bfe5a 100644 --- a/packages/google-cloud-core/google/cloud/streaming/transfer.py +++ b/packages/google-cloud-core/google/cloud/streaming/transfer.py @@ -163,7 +163,7 @@ def stream(self): def url(self): """URL to / from which data is downloaded/uploaded. - :rtype: string + :rtype: str :returns: The URL where data is sent/received. """ return self._url @@ -318,7 +318,7 @@ def total_size(self): def encoding(self): """'Content-Encoding' used to transfer the file - :rtype: string or None + :rtype: str or None :returns: The encoding of the downloaded content. """ return self._encoding @@ -732,7 +732,7 @@ def complete(self): def mime_type(self): """MIMEtype of the file being uploaded. - :rtype: string + :rtype: str :returns: The mime-type of the upload. """ return self._mime_type @@ -750,7 +750,7 @@ def progress(self): def strategy(self): """Upload strategy to use - :rtype: string or None + :rtype: str or None :returns: The strategy used to upload the data. """ return self._strategy @@ -972,7 +972,7 @@ def _get_range_header(response): :type response: :class:`google.cloud.streaming.http_wrapper.Response` :param response: response to be queried - :rtype: string + :rtype: str :returns: The header used to determine the bytes range. """ # NOTE: Per RFC 2616[1]/7233[2][3], 'Range' is a request header, From a83e53965df04283e9dad9fe7f9580d2a516fc63 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 20 Oct 2016 15:50:55 -0700 Subject: [PATCH 039/468] Replace types string with str. Uses command: ag -l 'type ([^:]+): string' | \ xargs gsed -r -i.bak -e 's/type ([^:]+): string/type \1: str/g' Note: [-r for gsed (GNU sed) is needed for group matching](http://superuser.com/a/336819/125262). --- .../google-cloud-core/google/cloud/client.py | 10 +++--- .../google/cloud/connection.py | 32 +++++++++---------- .../google/cloud/credentials.py | 4 +-- .../google/cloud/exceptions.py | 4 +-- .../google/cloud/streaming/exceptions.py | 4 +-- .../google/cloud/streaming/transfer.py | 16 +++++----- .../google/cloud/streaming/util.py | 2 +- 7 files changed, 36 insertions(+), 36 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/client.py b/packages/google-cloud-core/google/cloud/client.py index ea079e14fd6e..6495911185c4 100644 --- a/packages/google-cloud-core/google/cloud/client.py +++ b/packages/google-cloud-core/google/cloud/client.py @@ -34,7 +34,7 @@ class _ClientFactoryMixin(object): def from_service_account_json(cls, json_credentials_path, *args, **kwargs): """Factory to retrieve JSON credentials while creating client. - :type json_credentials_path: string + :type json_credentials_path: str :param json_credentials_path: The path to a private key file (this file was given to you when you created the service account). This file must contain @@ -69,10 +69,10 @@ def from_service_account_p12(cls, client_email, private_key_path, Unless you have an explicit reason to use a PKCS12 key for your service account, we recommend using a JSON key. - :type client_email: string + :type client_email: str :param client_email: The e-mail attached to the service account. - :type private_key_path: string + :type private_key_path: str :param private_key_path: The path to a private key file (this file was given to you when you created the service account). This file must be in P12 format. @@ -127,7 +127,7 @@ def __init__(self, credentials=None, http=None): class _ClientProjectMixin(object): """Mixin to allow setting the project on the client. - :type project: string + :type project: str :param project: the project which the client acts on behalf of. If not passed falls back to the default inferred from the environment. @@ -160,7 +160,7 @@ class JSONClient(Client, _ClientProjectMixin): Assumes such APIs use the ``project`` and the client needs to store this value. - :type project: string + :type project: str :param project: the project which the client acts on behalf of. If not passed falls back to the default inferred from the environment. diff --git a/packages/google-cloud-core/google/cloud/connection.py b/packages/google-cloud-core/google/cloud/connection.py index be0f64059b36..02f6b8f24f6b 100644 --- a/packages/google-cloud-core/google/cloud/connection.py +++ b/packages/google-cloud-core/google/cloud/connection.py @@ -162,7 +162,7 @@ def build_api_url(cls, path, query_params=None, Typically, you shouldn't need to use this method. - :type path: string + :type path: str :param path: The path to the resource (ie, ``'/b/bucket-name'``). :type query_params: dict or list @@ -170,11 +170,11 @@ def build_api_url(cls, path, query_params=None, key-value pairs) to insert into the query string of the URL. - :type api_base_url: string + :type api_base_url: str :param api_base_url: The base URL for the API endpoint. Typically you won't have to provide this. - :type api_version: string + :type api_version: str :param api_version: The version of the API to call. Typically you shouldn't provide this and instead use the default for the library. @@ -199,16 +199,16 @@ def _make_request(self, method, url, data=None, content_type=None, Typically, you shouldn't need to use this method. - :type method: string + :type method: str :param method: The HTTP method to use in the request. - :type url: string + :type url: str :param url: The URL to send the request to. - :type data: string + :type data: str :param data: The data to send as the body of the request. - :type content_type: string + :type content_type: str :param content_type: The proper MIME type of the data provided. :type headers: dict @@ -249,16 +249,16 @@ def _do_request(self, method, url, headers, data, Allows batch context managers to override and defer a request. - :type method: string + :type method: str :param method: The HTTP method to use in the request. - :type url: string + :type url: str :param url: The URL to send the request to. :type headers: dict :param headers: A dictionary of HTTP headers to send with the request. - :type data: string + :type data: str :param data: The data to send as the body of the request. :type target_object: object or :class:`NoneType` @@ -282,11 +282,11 @@ def api_request(self, method, path, query_params=None, interact with the API using these primitives, this is the correct one to use. - :type method: string + :type method: str :param method: The HTTP method name (ie, ``GET``, ``POST``, etc). Required. - :type path: string + :type path: str :param path: The path to the resource (ie, ``'/b/bucket-name'``). Required. @@ -295,23 +295,23 @@ def api_request(self, method, path, query_params=None, key-value pairs) to insert into the query string of the URL. - :type data: string + :type data: str :param data: The data to send as the body of the request. Default is the empty string. - :type content_type: string + :type content_type: str :param content_type: The proper MIME type of the data provided. Default is None. :type headers: dict :param headers: extra HTTP headers to be sent with the request. - :type api_base_url: string + :type api_base_url: str :param api_base_url: The base URL for the API endpoint. Typically you won't have to provide this. Default is the standard API base URL. - :type api_version: string + :type api_version: str :param api_version: The version of the API to call. Typically you shouldn't provide this and instead use the default for the library. Default is the diff --git a/packages/google-cloud-core/google/cloud/credentials.py b/packages/google-cloud-core/google/cloud/credentials.py index 6f57a4c2481a..8b1bc8d14bf3 100644 --- a/packages/google-cloud-core/google/cloud/credentials.py +++ b/packages/google-cloud-core/google/cloud/credentials.py @@ -97,7 +97,7 @@ def _get_signed_query_params(credentials, expiration, string_to_sign): :type expiration: int or long :param expiration: When the signed URL should expire. - :type string_to_sign: string + :type string_to_sign: str :param string_to_sign: The string to be signed by the credentials. :raises AttributeError: If :meth: sign_blob is unavailable. @@ -184,7 +184,7 @@ def generate_signed_url(credentials, resource, expiration, :param credentials: Credentials object with an associated private key to sign text. - :type resource: string + :type resource: str :param resource: A pointer to a specific resource (typically, ``/bucket-name/path/to/blob.txt``). diff --git a/packages/google-cloud-core/google/cloud/exceptions.py b/packages/google-cloud-core/google/cloud/exceptions.py index 6a4b8706917a..52f705ea44fa 100644 --- a/packages/google-cloud-core/google/cloud/exceptions.py +++ b/packages/google-cloud-core/google/cloud/exceptions.py @@ -184,10 +184,10 @@ def make_exception(response, content, error_info=None, use_json=True): :param response: A response object that defines a status code as the status attribute. - :type content: string or dictionary + :type content: str or dictionary :param content: The body of the HTTP error response. - :type error_info: string + :type error_info: str :param error_info: Optional string giving extra information about the failed request. diff --git a/packages/google-cloud-core/google/cloud/streaming/exceptions.py b/packages/google-cloud-core/google/cloud/streaming/exceptions.py index 0a7e4b94815e..c5cdf6ef0106 100644 --- a/packages/google-cloud-core/google/cloud/streaming/exceptions.py +++ b/packages/google-cloud-core/google/cloud/streaming/exceptions.py @@ -32,7 +32,7 @@ class HttpError(CommunicationError): :type content: bytes :param content: payload of the response which returned the error - :type url: string + :type url: str :param url: URL of the response which returned the error """ def __init__(self, response, content, url): @@ -94,7 +94,7 @@ class RetryAfterError(HttpError): :type content: bytes :param content: payload of the response which returned the error. - :type url: string + :type url: str :param url: URL of the response which returned the error. :type retry_after: integer diff --git a/packages/google-cloud-core/google/cloud/streaming/transfer.py b/packages/google-cloud-core/google/cloud/streaming/transfer.py index e27fcd1bfe5a..0aa196d95dde 100644 --- a/packages/google-cloud-core/google/cloud/streaming/transfer.py +++ b/packages/google-cloud-core/google/cloud/streaming/transfer.py @@ -178,7 +178,7 @@ def _initialize(self, http, url): :type http: :class:`httplib2.Http` (or a worklike) or None. :param http: the Http instance to use to make requests. - :type url: string + :type url: str :param url: The url for this transfer. """ self._ensure_uninitialized() @@ -250,7 +250,7 @@ def __init__(self, stream, **kwds): def from_file(cls, filename, overwrite=False, auto_transfer=True, **kwds): """Create a new download object from a filename. - :type filename: string + :type filename: str :param filename: path/filename for the target file :type overwrite: boolean @@ -622,7 +622,7 @@ class Upload(_Transfer): :type stream: file-like object :param stream: stream to/from which data is downloaded/uploaded. - :type mime_type: string: + :type mime_type: str: :param mime_type: MIME type of the upload. :type total_size: integer or None @@ -663,10 +663,10 @@ def __init__(self, stream, mime_type, total_size=None, http=None, def from_file(cls, filename, mime_type=None, auto_transfer=True, **kwds): """Create a new Upload object from a filename. - :type filename: string + :type filename: str :param filename: path/filename to the file being uploaded - :type mime_type: string + :type mime_type: str :param mime_type: MIMEtype of the file being uploaded :type auto_transfer: boolean or None @@ -697,7 +697,7 @@ def from_stream(cls, stream, mime_type, :type stream: writable file-like object :param stream: the target file - :type mime_type: string + :type mime_type: str :param mime_type: MIMEtype of the file being uploaded :type total_size: integer or None @@ -759,7 +759,7 @@ def strategy(self): def strategy(self, value): """Update upload strategy to use - :type value: string (one of :data:`SIMPLE_UPLOAD` or + :type value: str (one of :data:`SIMPLE_UPLOAD` or :data:`RESUMABLE_UPLOAD`) :raises: :exc:`ValueError` if value is not one of the two allowed @@ -1034,7 +1034,7 @@ def initialize_upload(self, http_request, http): def _last_byte(range_header): """Parse the last byte from a 'Range' header. - :type range_header: string + :type range_header: str :param range_header: 'Range' header value per RFC 2616/7233 :rtype: int diff --git a/packages/google-cloud-core/google/cloud/streaming/util.py b/packages/google-cloud-core/google/cloud/streaming/util.py index c5d1e5b082f3..abe92ac9638a 100644 --- a/packages/google-cloud-core/google/cloud/streaming/util.py +++ b/packages/google-cloud-core/google/cloud/streaming/util.py @@ -50,7 +50,7 @@ def acceptable_mime_type(accept_patterns, mime_type): :type accept_patterns: list of string :param accept_patterns: acceptable MIME types. - :type mime_type: string + :type mime_type: str :param mime_type: the MIME being checked :rtype: boolean From acb4af5d04343161693b0a6100f08663c8767e1f Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 20 Oct 2016 16:02:02 -0700 Subject: [PATCH 040/468] Replace types boolean with bool. Uses the command: ag -l 'type ([^:]+): boolean' | \ xargs gsed -r -i.bak -e 's/type ([^:]+): boolean/type \1: bool/g' --- .../google/cloud/_helpers.py | 2 +- .../google/cloud/streaming/transfer.py | 26 +++++++++---------- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/_helpers.py b/packages/google-cloud-core/google/cloud/_helpers.py index 3d0a58916737..1e5fd3ab9b68 100644 --- a/packages/google-cloud-core/google/cloud/_helpers.py +++ b/packages/google-cloud-core/google/cloud/_helpers.py @@ -438,7 +438,7 @@ def _datetime_to_rfc3339(value, ignore_zone=True): :type value: :class:`datetime.datetime` :param value: The datetime object to be converted to a string. - :type ignore_zone: boolean + :type ignore_zone: bool :param ignore_zone: If True, then the timezone (if any) of the datetime object is ignored. diff --git a/packages/google-cloud-core/google/cloud/streaming/transfer.py b/packages/google-cloud-core/google/cloud/streaming/transfer.py index 0aa196d95dde..608ebf26afdc 100644 --- a/packages/google-cloud-core/google/cloud/streaming/transfer.py +++ b/packages/google-cloud-core/google/cloud/streaming/transfer.py @@ -52,13 +52,13 @@ class _Transfer(object): :type stream: file-like object :param stream: stream to/from which data is downloaded/uploaded. - :type close_stream: boolean + :type close_stream: bool :param close_stream: should this instance close the stream when deleted :type chunksize: integer :param chunksize: the size of chunks used to download/upload a file. - :type auto_transfer: boolean + :type auto_transfer: bool :param auto_transfer: should this instance automatically begin transfering data when initialized @@ -253,10 +253,10 @@ def from_file(cls, filename, overwrite=False, auto_transfer=True, **kwds): :type filename: str :param filename: path/filename for the target file - :type overwrite: boolean + :type overwrite: bool :param overwrite: should an existing file be overwritten - :type auto_transfer: boolean + :type auto_transfer: bool :param auto_transfer: should the transfer be started immediately :type kwds: dict @@ -283,7 +283,7 @@ def from_stream(cls, stream, auto_transfer=True, total_size=None, **kwds): :type total_size: integer or None :param total_size: total size of the file to be downloaded - :type auto_transfer: boolean + :type auto_transfer: bool :param auto_transfer: should the transfer be started immediately :type kwds: dict @@ -460,7 +460,7 @@ def _compute_end_byte(self, start, end=None, use_chunks=True): :type end: integer or None :param end: suggested last byte of the range. - :type use_chunks: boolean + :type use_chunks: bool :param use_chunks: If False, ignore :attr:`chunksize`. :rtype: str @@ -558,7 +558,7 @@ def get_range(self, start, end=None, use_chunks=True): :type end: integer or ``None`` :param end: Where to stop fetching bytes. (See above.) - :type use_chunks: boolean + :type use_chunks: bool :param use_chunks: If False, ignore :attr:`chunksize` and fetch this range in a single request. If True, streams via chunks. @@ -594,7 +594,7 @@ def stream_file(self, use_chunks=True): Writes retrieved bytes into :attr:`stream`. - :type use_chunks: boolean + :type use_chunks: bool :param use_chunks: If False, ignore :attr:`chunksize` and stream this download in a single request. If True, streams via chunks. @@ -631,10 +631,10 @@ class Upload(_Transfer): :type http: :class:`httplib2.Http` (or workalike) :param http: Http instance used to perform requests. - :type close_stream: boolean + :type close_stream: bool :param close_stream: should this instance close the stream when deleted - :type auto_transfer: boolean + :type auto_transfer: bool :param auto_transfer: should this instance automatically begin transfering data when initialized @@ -669,7 +669,7 @@ def from_file(cls, filename, mime_type=None, auto_transfer=True, **kwds): :type mime_type: str :param mime_type: MIMEtype of the file being uploaded - :type auto_transfer: boolean or None + :type auto_transfer: bool or None :param auto_transfer: should the transfer be started immediately :type kwds: dict @@ -703,7 +703,7 @@ def from_stream(cls, stream, mime_type, :type total_size: integer or None :param total_size: Size of the file being uploaded - :type auto_transfer: boolean or None + :type auto_transfer: bool or None :param auto_transfer: should the transfer be started immediately :type kwds: dict @@ -1065,7 +1065,7 @@ def _validate_chunksize(self, chunksize=None): def stream_file(self, use_chunks=True): """Upload the stream. - :type use_chunks: boolean + :type use_chunks: bool :param use_chunks: If False, send the stream in a single request. Otherwise, send it in chunks. From 59811e510a68cfa5892c1436ca170ec471659629 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 20 Oct 2016 16:04:23 -0700 Subject: [PATCH 041/468] Replace rtypes boolean with bool. Uses the command: ag -l 'rtype: boolean' | xargs sed -i .bak 's/rtype: boolean/rtype: bool/g' --- packages/google-cloud-core/google/cloud/iterator.py | 2 +- .../google/cloud/streaming/buffered_stream.py | 2 +- .../google/cloud/streaming/http_wrapper.py | 2 +- .../google-cloud-core/google/cloud/streaming/transfer.py | 6 +++--- packages/google-cloud-core/google/cloud/streaming/util.py | 2 +- 5 files changed, 7 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index 2deb9ebbe703..ac1323a35e19 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -345,7 +345,7 @@ def next(self): def _has_next_page(self): """Determines whether or not there are more pages with results. - :rtype: boolean + :rtype: bool :returns: Whether the iterator has more pages. """ if self.page_number == 0: diff --git a/packages/google-cloud-core/google/cloud/streaming/buffered_stream.py b/packages/google-cloud-core/google/cloud/streaming/buffered_stream.py index b9a3d2ff84d1..544933101d49 100644 --- a/packages/google-cloud-core/google/cloud/streaming/buffered_stream.py +++ b/packages/google-cloud-core/google/cloud/streaming/buffered_stream.py @@ -55,7 +55,7 @@ def __len__(self): def stream_exhausted(self): """Does the stream have bytes remaining beyond the buffer - :rtype: boolean + :rtype: bool :returns: Boolean indicating if the stream is exhausted. """ return self._stream_at_end diff --git a/packages/google-cloud-core/google/cloud/streaming/http_wrapper.py b/packages/google-cloud-core/google/cloud/streaming/http_wrapper.py index 398c9f2f572f..fe4eb61c25d2 100644 --- a/packages/google-cloud-core/google/cloud/streaming/http_wrapper.py +++ b/packages/google-cloud-core/google/cloud/streaming/http_wrapper.py @@ -258,7 +258,7 @@ def retry_after(self): def is_redirect(self): """Does this response contain a redirect - :rtype: boolean + :rtype: bool :returns: True if the status code indicates a redirect and the 'location' header is present. """ diff --git a/packages/google-cloud-core/google/cloud/streaming/transfer.py b/packages/google-cloud-core/google/cloud/streaming/transfer.py index 608ebf26afdc..3a3ad3b244e8 100644 --- a/packages/google-cloud-core/google/cloud/streaming/transfer.py +++ b/packages/google-cloud-core/google/cloud/streaming/transfer.py @@ -93,7 +93,7 @@ def __repr__(self): def close_stream(self): """Should this instance close the stream when deleted. - :rtype: boolean + :rtype: bool :returns: Boolean indicated if the stream should be closed. """ return self._close_stream @@ -190,7 +190,7 @@ def _initialize(self, http, url): def initialized(self): """Has the instance been initialized - :rtype: boolean + :rtype: bool :returns: Boolean indicating if the current transfer has been initialized. """ @@ -723,7 +723,7 @@ def from_stream(cls, stream, mime_type, def complete(self): """Has the entire stream been uploaded. - :rtype: boolean + :rtype: bool :returns: Boolean indicated if the upload is complete. """ return self._complete diff --git a/packages/google-cloud-core/google/cloud/streaming/util.py b/packages/google-cloud-core/google/cloud/streaming/util.py index abe92ac9638a..ad25fedbd0e3 100644 --- a/packages/google-cloud-core/google/cloud/streaming/util.py +++ b/packages/google-cloud-core/google/cloud/streaming/util.py @@ -53,7 +53,7 @@ def acceptable_mime_type(accept_patterns, mime_type): :type mime_type: str :param mime_type: the MIME being checked - :rtype: boolean + :rtype: bool :returns: True if the supplied MIME type matches at least one of the patterns, else False. """ From b48f8e590a2a351fcb99f850fe8423e0562277d9 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 20 Oct 2016 16:20:20 -0700 Subject: [PATCH 042/468] Replace integer with int in types. Uses the command: ag -l 'type ([^:]+): integer' | \ xargs gsed -r -i.bak -e 's/type ([^:]+): integer/type \1: int/g' --- .../google/cloud/streaming/buffered_stream.py | 6 +-- .../google/cloud/streaming/exceptions.py | 2 +- .../google/cloud/streaming/http_wrapper.py | 8 ++-- .../google/cloud/streaming/stream_slice.py | 4 +- .../google/cloud/streaming/transfer.py | 42 +++++++++---------- .../google/cloud/streaming/util.py | 2 +- 6 files changed, 32 insertions(+), 32 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/streaming/buffered_stream.py b/packages/google-cloud-core/google/cloud/streaming/buffered_stream.py index b9a3d2ff84d1..ed1f168b631d 100644 --- a/packages/google-cloud-core/google/cloud/streaming/buffered_stream.py +++ b/packages/google-cloud-core/google/cloud/streaming/buffered_stream.py @@ -24,10 +24,10 @@ class BufferedStream(object): :type stream: readable file-like object :param stream: the stream to be buffered - :type start: integer + :type start: int :param start: the starting point in the stream - :type size: integer + :type size: int :param size: the size of the buffer """ def __init__(self, stream, start, size): @@ -81,7 +81,7 @@ def _bytes_remaining(self): def read(self, size=None): """Read bytes from the buffer. - :type size: integer or None + :type size: int or None :param size: How many bytes to read (defaults to all remaining bytes). :rtype: str diff --git a/packages/google-cloud-core/google/cloud/streaming/exceptions.py b/packages/google-cloud-core/google/cloud/streaming/exceptions.py index 0a7e4b94815e..5b28dd50b014 100644 --- a/packages/google-cloud-core/google/cloud/streaming/exceptions.py +++ b/packages/google-cloud-core/google/cloud/streaming/exceptions.py @@ -97,7 +97,7 @@ class RetryAfterError(HttpError): :type url: string :param url: URL of the response which returned the error. - :type retry_after: integer + :type retry_after: int :param retry_after: seconds to wait before retrying. """ def __init__(self, response, content, url, retry_after): diff --git a/packages/google-cloud-core/google/cloud/streaming/http_wrapper.py b/packages/google-cloud-core/google/cloud/streaming/http_wrapper.py index 398c9f2f572f..deb79f53c920 100644 --- a/packages/google-cloud-core/google/cloud/streaming/http_wrapper.py +++ b/packages/google-cloud-core/google/cloud/streaming/http_wrapper.py @@ -77,7 +77,7 @@ def _httplib2_debug_level(http_request, level, http=None): :type http_request: :class:`Request` :param http_request: the request to be logged. - :type level: integer + :type level: int :param level: the debuglevel for logging. :type http: :class:`httplib2.Http`, or ``None`` @@ -319,7 +319,7 @@ def _make_api_request_no_retry(http, http_request, redirections=_REDIRECTIONS): :type http_request: :class:`Request` :param http_request: the request to send. - :type redirections: integer + :type redirections: int :param redirections: Number of redirects to follow. :rtype: :class:`Response` @@ -363,11 +363,11 @@ def make_api_request(http, http_request, retries=7, :type http_request: :class:`Request` :param http_request: the request to send. - :type retries: integer + :type retries: int :param retries: Number of retries to attempt on retryable responses (such as 429 or 5XX). - :type redirections: integer + :type redirections: int :param redirections: Number of redirects to follow. :rtype: :class:`Response` diff --git a/packages/google-cloud-core/google/cloud/streaming/stream_slice.py b/packages/google-cloud-core/google/cloud/streaming/stream_slice.py index dc2c3229d6f9..6bc1ab98a938 100644 --- a/packages/google-cloud-core/google/cloud/streaming/stream_slice.py +++ b/packages/google-cloud-core/google/cloud/streaming/stream_slice.py @@ -23,7 +23,7 @@ class StreamSlice(object): :type stream: readable file-like object :param stream: the stream to be buffered. - :type max_bytes: integer + :type max_bytes: int :param max_bytes: maximum number of bytes to return in the slice. """ def __init__(self, stream, max_bytes): @@ -65,7 +65,7 @@ def read(self, size=None): slice indicates we should still be able to read more bytes, we raise :exc:`IncompleteRead`. - :type size: integer or None + :type size: int or None :param size: If provided, read no more than size bytes from the stream. :rtype: bytes diff --git a/packages/google-cloud-core/google/cloud/streaming/transfer.py b/packages/google-cloud-core/google/cloud/streaming/transfer.py index 196a388f3237..8349198cf0be 100644 --- a/packages/google-cloud-core/google/cloud/streaming/transfer.py +++ b/packages/google-cloud-core/google/cloud/streaming/transfer.py @@ -55,7 +55,7 @@ class _Transfer(object): :type close_stream: boolean :param close_stream: should this instance close the stream when deleted - :type chunksize: integer + :type chunksize: int :param chunksize: the size of chunks used to download/upload a file. :type auto_transfer: boolean @@ -65,7 +65,7 @@ class _Transfer(object): :type http: :class:`httplib2.Http` (or workalike) :param http: Http instance used to perform requests. - :type num_retries: integer + :type num_retries: int :param num_retries: how many retries should the transfer attempt """ @@ -140,7 +140,7 @@ def num_retries(self): def num_retries(self, value): """Update how many retries should the transfer attempt - :type value: integer + :type value: int """ if not isinstance(value, six.integer_types): raise ValueError("num_retries: pass an integer") @@ -280,7 +280,7 @@ def from_stream(cls, stream, auto_transfer=True, total_size=None, **kwds): :type stream: writable file-like object :param stream: the target file - :type total_size: integer or None + :type total_size: int or None :param total_size: total size of the file to be downloaded :type auto_transfer: boolean @@ -392,11 +392,11 @@ def initialize_download(self, http_request, http): def _normalize_start_end(self, start, end=None): """Validate / fix up byte range. - :type start: integer + :type start: int :param start: start byte of the range: if negative, used as an offset from the end. - :type end: integer + :type end: int :param end: end byte of the range. :rtype: tuple, (start, end) @@ -428,11 +428,11 @@ def _set_range_header(request, start, end=None): :type request: :class:`google.cloud.streaming.http_wrapper.Request` :param request: the request to update - :type start: integer + :type start: int :param start: start byte of the range: if negative, used as an offset from the end. - :type end: integer + :type end: int :param end: end byte of the range. """ if start < 0: @@ -454,10 +454,10 @@ def _compute_end_byte(self, start, end=None, use_chunks=True): - if we have no information about size, and don't want to use the chunksize, we'll return None. - :type start: integer + :type start: int :param start: start byte of the range. - :type end: integer or None + :type end: int or None :param end: suggested last byte of the range. :type use_chunks: boolean @@ -490,10 +490,10 @@ def _compute_end_byte(self, start, end=None, use_chunks=True): def _get_chunk(self, start, end): """Retrieve a chunk of the file. - :type start: integer + :type start: int :param start: start byte of the range. - :type end: integer or None + :type end: int or None :param end: end byte of the range. :rtype: :class:`google.cloud.streaming.http_wrapper.Response` @@ -552,10 +552,10 @@ def get_range(self, start, end=None, use_chunks=True): (These variations correspond to those described in the HTTP 1.1 protocol for range headers in RFC 2616, sec. 14.35.1.) - :type start: integer + :type start: int :param start: Where to start fetching bytes. (See above.) - :type end: integer or ``None`` + :type end: int or ``None`` :param end: Where to stop fetching bytes. (See above.) :type use_chunks: boolean @@ -625,7 +625,7 @@ class Upload(_Transfer): :type mime_type: string: :param mime_type: MIME type of the upload. - :type total_size: integer or None + :type total_size: int or None :param total_size: Total upload size for the stream. :type http: :class:`httplib2.Http` (or workalike) @@ -700,7 +700,7 @@ def from_stream(cls, stream, mime_type, :type mime_type: string :param mime_type: MIMEtype of the file being uploaded - :type total_size: integer or None + :type total_size: int or None :param total_size: Size of the file being uploaded :type auto_transfer: boolean or None @@ -784,7 +784,7 @@ def total_size(self): def total_size(self, value): """Update total size of the stream to be uploaded. - :type value: integer or None + :type value: int or None :param value: the size """ self._ensure_uninitialized() @@ -1048,7 +1048,7 @@ def _validate_chunksize(self, chunksize=None): Helper for :meth:`stream_file`. - :type chunksize: integer or None + :type chunksize: int or None :param chunksize: the chunk size to be tested. :raises: :exc:`ValueError` if ``chunksize`` is not a multiple @@ -1112,7 +1112,7 @@ def _send_media_request(self, request, end): :type request: :class:`google.cloud.streaming.http_wrapper.Request` :param request: the request to upload - :type end: integer + :type end: int :param end: end byte of the to be uploaded :rtype: :class:`google.cloud.streaming.http_wrapper.Response` @@ -1140,7 +1140,7 @@ def _send_media_body(self, start): Helper for :meth:`stream_file`: - :type start: integer + :type start: int :param start: start byte of the range. :rtype: :class:`google.cloud.streaming.http_wrapper.Response` @@ -1170,7 +1170,7 @@ def _send_chunk(self, start): Helper for :meth:`stream_file`: - :type start: integer + :type start: int :param start: start byte of the range. :rtype: :class:`google.cloud.streaming.http_wrapper.Response` diff --git a/packages/google-cloud-core/google/cloud/streaming/util.py b/packages/google-cloud-core/google/cloud/streaming/util.py index c5d1e5b082f3..7e161f26f8eb 100644 --- a/packages/google-cloud-core/google/cloud/streaming/util.py +++ b/packages/google-cloud-core/google/cloud/streaming/util.py @@ -27,7 +27,7 @@ def calculate_wait_for_retry(retry_attempt): random amount of jitter is added to spread out retry attempts from different clients. - :type retry_attempt: integer + :type retry_attempt: int :param retry_attempt: Retry attempt counter. :rtype: integer From d6afe85c5a0bd49b773c63ce0e9e105d326e9bcf Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 20 Oct 2016 16:24:00 -0700 Subject: [PATCH 043/468] Replaces integer with int in rtypes. Uses the command: ag -l 'rtype: integer' | xargs sed -i .bak 's/rtype: integer/rtype: int/g' --- .../google/cloud/streaming/buffered_stream.py | 4 ++-- .../google/cloud/streaming/exceptions.py | 2 +- .../google/cloud/streaming/http_wrapper.py | 8 ++++---- .../google/cloud/streaming/stream_slice.py | 2 +- .../google/cloud/streaming/transfer.py | 10 +++++----- .../google-cloud-core/google/cloud/streaming/util.py | 2 +- 6 files changed, 14 insertions(+), 14 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/streaming/buffered_stream.py b/packages/google-cloud-core/google/cloud/streaming/buffered_stream.py index ed1f168b631d..2bfe35522c95 100644 --- a/packages/google-cloud-core/google/cloud/streaming/buffered_stream.py +++ b/packages/google-cloud-core/google/cloud/streaming/buffered_stream.py @@ -64,7 +64,7 @@ def stream_exhausted(self): def stream_end_position(self): """Point to which stream was read into the buffer - :rtype: integer + :rtype: int :returns: The end-position of the stream. """ return self._end_pos @@ -73,7 +73,7 @@ def stream_end_position(self): def _bytes_remaining(self): """Bytes remaining to be read from the buffer - :rtype: integer + :rtype: int :returns: The number of bytes remaining. """ return len(self._buffered_data) - self._buffer_pos diff --git a/packages/google-cloud-core/google/cloud/streaming/exceptions.py b/packages/google-cloud-core/google/cloud/streaming/exceptions.py index 5b28dd50b014..4c5dea915099 100644 --- a/packages/google-cloud-core/google/cloud/streaming/exceptions.py +++ b/packages/google-cloud-core/google/cloud/streaming/exceptions.py @@ -50,7 +50,7 @@ def __str__(self): def status_code(self): """Status code for the response. - :rtype: integer + :rtype: int :returns: the code """ return int(self.response['status']) diff --git a/packages/google-cloud-core/google/cloud/streaming/http_wrapper.py b/packages/google-cloud-core/google/cloud/streaming/http_wrapper.py index deb79f53c920..87bc43b2accd 100644 --- a/packages/google-cloud-core/google/cloud/streaming/http_wrapper.py +++ b/packages/google-cloud-core/google/cloud/streaming/http_wrapper.py @@ -191,7 +191,7 @@ def _process_content_range(content_range): :type content_range: str :param content_range: the header value being parsed. - :rtype: integer + :rtype: int :returns: the length of the response chunk. """ _, _, range_spec = content_range.partition(' ') @@ -221,7 +221,7 @@ def length(self): Exposed as an attribute since using ``len()`` directly can fail for responses larger than ``sys.maxint``. - :rtype: integer or long + :rtype: int or long :returns: The length of the response. """ if 'content-encoding' in self.info and 'content-range' in self.info: @@ -239,7 +239,7 @@ def length(self): def status_code(self): """HTTP status code - :rtype: integer + :rtype: int :returns: The response status code. """ return int(self.info['status']) @@ -248,7 +248,7 @@ def status_code(self): def retry_after(self): """Retry interval (if set). - :rtype: integer + :rtype: int :returns: interval in seconds """ if 'retry-after' in self.info: diff --git a/packages/google-cloud-core/google/cloud/streaming/stream_slice.py b/packages/google-cloud-core/google/cloud/streaming/stream_slice.py index 6bc1ab98a938..bb37a4446ad5 100644 --- a/packages/google-cloud-core/google/cloud/streaming/stream_slice.py +++ b/packages/google-cloud-core/google/cloud/streaming/stream_slice.py @@ -51,7 +51,7 @@ def length(self): For 32-bit python2.x, len() cannot exceed a 32-bit number. - :rtype: integer + :rtype: int :returns: The max "length" of the stream. """ return self._max_bytes diff --git a/packages/google-cloud-core/google/cloud/streaming/transfer.py b/packages/google-cloud-core/google/cloud/streaming/transfer.py index 8349198cf0be..0fd7a8c31371 100644 --- a/packages/google-cloud-core/google/cloud/streaming/transfer.py +++ b/packages/google-cloud-core/google/cloud/streaming/transfer.py @@ -131,7 +131,7 @@ def bytes_http(self, value): def num_retries(self): """How many retries should the transfer attempt - :rtype: integer + :rtype: int :returns: The number of retries allowed. """ return self._num_retries @@ -300,7 +300,7 @@ def from_stream(cls, stream, auto_transfer=True, total_size=None, **kwds): def progress(self): """Number of bytes have been downloaded. - :rtype: integer >= 0 + :rtype: int >= 0 :returns: The number of downloaded bytes. """ return self._progress @@ -309,7 +309,7 @@ def progress(self): def total_size(self): """Total number of bytes to be downloaded. - :rtype: integer or None + :rtype: int or None :returns: The total number of bytes to download. """ return self._total_size @@ -741,7 +741,7 @@ def mime_type(self): def progress(self): """Bytes uploaded so far - :rtype: integer + :rtype: int :returns: The amount uploaded so far. """ return self._progress @@ -775,7 +775,7 @@ def strategy(self, value): def total_size(self): """Total size of the stream to be uploaded. - :rtype: integer or None + :rtype: int or None :returns: The total size to be uploaded. """ return self._total_size diff --git a/packages/google-cloud-core/google/cloud/streaming/util.py b/packages/google-cloud-core/google/cloud/streaming/util.py index 7e161f26f8eb..e99784345373 100644 --- a/packages/google-cloud-core/google/cloud/streaming/util.py +++ b/packages/google-cloud-core/google/cloud/streaming/util.py @@ -30,7 +30,7 @@ def calculate_wait_for_retry(retry_attempt): :type retry_attempt: int :param retry_attempt: Retry attempt counter. - :rtype: integer + :rtype: int :returns: Number of seconds to wait before retrying request. """ wait_time = 2 ** retry_attempt From 3f58a4364fa1a16ba10040ac9a580f3d248dad30 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 20 Oct 2016 17:04:00 -0700 Subject: [PATCH 044/468] Replace :: with `.. code-block:: console`. Towards #2404. --- packages/google-cloud-core/README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-core/README.rst b/packages/google-cloud-core/README.rst index 28b8431795be..8115c049c89c 100644 --- a/packages/google-cloud-core/README.rst +++ b/packages/google-cloud-core/README.rst @@ -12,6 +12,6 @@ used by all of the ``google-cloud-*``. Quick Start ----------- -:: +.. code-block:: console $ pip install --upgrade google-cloud-core From 9ad4564ac4cf7a711e98e0c28147bb3e5fbb823d Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Fri, 21 Oct 2016 09:51:22 -0700 Subject: [PATCH 045/468] Remove None from param types and add (Optional). This runs a script to remove None from the types for parameters, and added (Optional) to the description. Does not pass lint due to some too-long lines. I will clean those up manually. See: https://github.com/GoogleCloudPlatform/google-cloud-python/pull/2580#pullrequestreview-5178193 --- .../google/cloud/_helpers.py | 8 ++-- .../google/cloud/connection.py | 12 +++--- .../google/cloud/streaming/buffered_stream.py | 4 +- .../google/cloud/streaming/http_wrapper.py | 8 ++-- .../google/cloud/streaming/stream_slice.py | 4 +- .../google/cloud/streaming/transfer.py | 40 +++++++++---------- 6 files changed, 38 insertions(+), 38 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/_helpers.py b/packages/google-cloud-core/google/cloud/_helpers.py index 1e5fd3ab9b68..449961126a69 100644 --- a/packages/google-cloud-core/google/cloud/_helpers.py +++ b/packages/google-cloud-core/google/cloud/_helpers.py @@ -366,8 +366,8 @@ def _microseconds_from_datetime(value): def _millis_from_datetime(value): """Convert non-none datetime to timestamp, assuming UTC. - :type value: :class:`datetime.datetime`, or None - :param value: the timestamp + :type value: :class:`datetime.datetime` + :param value: (Optional) the timestamp :rtype: int, or ``NoneType`` :returns: the timestamp, in milliseconds, or None @@ -554,8 +554,8 @@ def _name_from_project_path(path, project, template): :type path: str :param path: URI path containing the name. - :type project: str or NoneType - :param project: The project associated with the request. It is + :type project: str + :param project: (Optional) The project associated with the request. It is included for validation purposes. If passed as None, disables validation. diff --git a/packages/google-cloud-core/google/cloud/connection.py b/packages/google-cloud-core/google/cloud/connection.py index 02f6b8f24f6b..a5eb2a7a6dd3 100644 --- a/packages/google-cloud-core/google/cloud/connection.py +++ b/packages/google-cloud-core/google/cloud/connection.py @@ -214,8 +214,8 @@ def _make_request(self, method, url, data=None, content_type=None, :type headers: dict :param headers: A dictionary of HTTP headers to send with the request. - :type target_object: object or :class:`NoneType` - :param target_object: Argument to be used by library callers. + :type target_object: object + :param target_object: (Optional) Argument to be used by library callers. This can allow custom behavior, for example, to defer an HTTP request and complete initialization of the object at a later time. @@ -261,8 +261,8 @@ def _do_request(self, method, url, headers, data, :type data: str :param data: The data to send as the body of the request. - :type target_object: object or :class:`NoneType` - :param target_object: Unused ``target_object`` here but may be used + :type target_object: object + :param target_object: (Optional) Unused ``target_object`` here but may be used by a superclass. :rtype: tuple of ``response`` (a dictionary of sorts) @@ -323,8 +323,8 @@ def api_request(self, method, path, query_params=None, response as JSON and raise an exception if that cannot be done. Default is True. - :type _target_object: :class:`object` or :class:`NoneType` - :param _target_object: Protected argument to be used by library + :type _target_object: :class:`object` + :param _target_object: (Optional) Protected argument to be used by library callers. This can allow custom behavior, for example, to defer an HTTP request and complete initialization of the object at a later time. diff --git a/packages/google-cloud-core/google/cloud/streaming/buffered_stream.py b/packages/google-cloud-core/google/cloud/streaming/buffered_stream.py index 748aa2068052..83061ade78a6 100644 --- a/packages/google-cloud-core/google/cloud/streaming/buffered_stream.py +++ b/packages/google-cloud-core/google/cloud/streaming/buffered_stream.py @@ -81,8 +81,8 @@ def _bytes_remaining(self): def read(self, size=None): """Read bytes from the buffer. - :type size: int or None - :param size: How many bytes to read (defaults to all remaining bytes). + :type size: int + :param size: (Optional) How many bytes to read (defaults to all remaining bytes). :rtype: str :returns: The data read from the stream. diff --git a/packages/google-cloud-core/google/cloud/streaming/http_wrapper.py b/packages/google-cloud-core/google/cloud/streaming/http_wrapper.py index 898d1b09029e..d6e58a4f4bf7 100644 --- a/packages/google-cloud-core/google/cloud/streaming/http_wrapper.py +++ b/packages/google-cloud-core/google/cloud/streaming/http_wrapper.py @@ -80,8 +80,8 @@ def _httplib2_debug_level(http_request, level, http=None): :type level: int :param level: the debuglevel for logging. - :type http: :class:`httplib2.Http`, or ``None`` - :param http: the instance on whose connections to set the debuglevel. + :type http: :class:`httplib2.Http` + :param http: (Optional) the instance on whose connections to set the debuglevel. """ if http_request.loggable_body is None: yield @@ -115,8 +115,8 @@ class Request(object): :type http_method: str :param http_method: the HTTP method to use for the request - :type headers: mapping or None - :param headers: headers to be sent with the request + :type headers: mapping + :param headers: (Optional) headers to be sent with the request :type body: str :param body: body to be sent with the request diff --git a/packages/google-cloud-core/google/cloud/streaming/stream_slice.py b/packages/google-cloud-core/google/cloud/streaming/stream_slice.py index bb37a4446ad5..c33cda011738 100644 --- a/packages/google-cloud-core/google/cloud/streaming/stream_slice.py +++ b/packages/google-cloud-core/google/cloud/streaming/stream_slice.py @@ -65,8 +65,8 @@ def read(self, size=None): slice indicates we should still be able to read more bytes, we raise :exc:`IncompleteRead`. - :type size: int or None - :param size: If provided, read no more than size bytes from the stream. + :type size: int + :param size: (Optional) If provided, read no more than size bytes from the stream. :rtype: bytes :returns: bytes read from this slice. diff --git a/packages/google-cloud-core/google/cloud/streaming/transfer.py b/packages/google-cloud-core/google/cloud/streaming/transfer.py index b24f41144d93..b17f63392815 100644 --- a/packages/google-cloud-core/google/cloud/streaming/transfer.py +++ b/packages/google-cloud-core/google/cloud/streaming/transfer.py @@ -280,8 +280,8 @@ def from_stream(cls, stream, auto_transfer=True, total_size=None, **kwds): :type stream: writable file-like object :param stream: the target file - :type total_size: int or None - :param total_size: total size of the file to be downloaded + :type total_size: int + :param total_size: (Optional) total size of the file to be downloaded :type auto_transfer: bool :param auto_transfer: should the transfer be started immediately @@ -457,8 +457,8 @@ def _compute_end_byte(self, start, end=None, use_chunks=True): :type start: int :param start: start byte of the range. - :type end: int or None - :param end: suggested last byte of the range. + :type end: int + :param end: (Optional) suggested last byte of the range. :type use_chunks: bool :param use_chunks: If False, ignore :attr:`chunksize`. @@ -493,8 +493,8 @@ def _get_chunk(self, start, end): :type start: int :param start: start byte of the range. - :type end: int or None - :param end: end byte of the range. + :type end: int + :param end: (Optional) end byte of the range. :rtype: :class:`google.cloud.streaming.http_wrapper.Response` :returns: response from the chunk request. @@ -555,8 +555,8 @@ def get_range(self, start, end=None, use_chunks=True): :type start: int :param start: Where to start fetching bytes. (See above.) - :type end: int or ``None`` - :param end: Where to stop fetching bytes. (See above.) + :type end: int + :param end: (Optional) Where to stop fetching bytes. (See above.) :type use_chunks: bool :param use_chunks: If False, ignore :attr:`chunksize` @@ -625,8 +625,8 @@ class Upload(_Transfer): :type mime_type: str: :param mime_type: MIME type of the upload. - :type total_size: int or None - :param total_size: Total upload size for the stream. + :type total_size: int + :param total_size: (Optional) Total upload size for the stream. :type http: :class:`httplib2.Http` (or workalike) :param http: Http instance used to perform requests. @@ -669,8 +669,8 @@ def from_file(cls, filename, mime_type=None, auto_transfer=True, **kwds): :type mime_type: str :param mime_type: MIMEtype of the file being uploaded - :type auto_transfer: bool or None - :param auto_transfer: should the transfer be started immediately + :type auto_transfer: bool + :param auto_transfer: (Optional) should the transfer be started immediately :type kwds: dict :param kwds: keyword arguments: passed @@ -700,11 +700,11 @@ def from_stream(cls, stream, mime_type, :type mime_type: str :param mime_type: MIMEtype of the file being uploaded - :type total_size: int or None - :param total_size: Size of the file being uploaded + :type total_size: int + :param total_size: (Optional) Size of the file being uploaded - :type auto_transfer: bool or None - :param auto_transfer: should the transfer be started immediately + :type auto_transfer: bool + :param auto_transfer: (Optional) should the transfer be started immediately :type kwds: dict :param kwds: keyword arguments: passed @@ -784,8 +784,8 @@ def total_size(self): def total_size(self, value): """Update total size of the stream to be uploaded. - :type value: int or None - :param value: the size + :type value: int + :param value: (Optional) the size """ self._ensure_uninitialized() self._total_size = value @@ -1048,8 +1048,8 @@ def _validate_chunksize(self, chunksize=None): Helper for :meth:`stream_file`. - :type chunksize: int or None - :param chunksize: the chunk size to be tested. + :type chunksize: int + :param chunksize: (Optional) the chunk size to be tested. :raises: :exc:`ValueError` if ``chunksize`` is not a multiple of the server-specified granulariy. From bb7966489a7a060ca5d9786fca198d167d647e78 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Fri, 21 Oct 2016 11:21:42 -0700 Subject: [PATCH 046/468] Fix lint errors caused by addition of (Optional). Mostly, lines that were too long. --- .../google/cloud/connection.py | 21 ++++++++++--------- .../google/cloud/streaming/buffered_stream.py | 4 +++- .../google/cloud/streaming/http_wrapper.py | 3 ++- .../google/cloud/streaming/stream_slice.py | 4 +++- .../google/cloud/streaming/transfer.py | 6 ++++-- 5 files changed, 23 insertions(+), 15 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/connection.py b/packages/google-cloud-core/google/cloud/connection.py index a5eb2a7a6dd3..03e11449c558 100644 --- a/packages/google-cloud-core/google/cloud/connection.py +++ b/packages/google-cloud-core/google/cloud/connection.py @@ -215,10 +215,10 @@ def _make_request(self, method, url, data=None, content_type=None, :param headers: A dictionary of HTTP headers to send with the request. :type target_object: object - :param target_object: (Optional) Argument to be used by library callers. - This can allow custom behavior, for example, to - defer an HTTP request and complete initialization - of the object at a later time. + :param target_object: + (Optional) Argument to be used by library callers. This can allow + custom behavior, for example, to defer an HTTP request and complete + initialization of the object at a later time. :rtype: tuple of ``response`` (a dictionary of sorts) and ``content`` (a string). @@ -262,8 +262,9 @@ def _do_request(self, method, url, headers, data, :param data: The data to send as the body of the request. :type target_object: object - :param target_object: (Optional) Unused ``target_object`` here but may be used - by a superclass. + :param target_object: + (Optional) Unused ``target_object`` here but may be used by a + superclass. :rtype: tuple of ``response`` (a dictionary of sorts) and ``content`` (a string). @@ -324,10 +325,10 @@ def api_request(self, method, path, query_params=None, that cannot be done. Default is True. :type _target_object: :class:`object` - :param _target_object: (Optional) Protected argument to be used by library - callers. This can allow custom behavior, for - example, to defer an HTTP request and complete - initialization of the object at a later time. + :param _target_object: + (Optional) Protected argument to be used by library callers. This + can allow custom behavior, for example, to defer an HTTP request + and complete initialization of the object at a later time. :raises: Exception if the response code is not 200 OK. :rtype: dict or str diff --git a/packages/google-cloud-core/google/cloud/streaming/buffered_stream.py b/packages/google-cloud-core/google/cloud/streaming/buffered_stream.py index 83061ade78a6..24a52176cb66 100644 --- a/packages/google-cloud-core/google/cloud/streaming/buffered_stream.py +++ b/packages/google-cloud-core/google/cloud/streaming/buffered_stream.py @@ -82,7 +82,9 @@ def read(self, size=None): """Read bytes from the buffer. :type size: int - :param size: (Optional) How many bytes to read (defaults to all remaining bytes). + :param size: + (Optional) How many bytes to read (defaults to all remaining + bytes). :rtype: str :returns: The data read from the stream. diff --git a/packages/google-cloud-core/google/cloud/streaming/http_wrapper.py b/packages/google-cloud-core/google/cloud/streaming/http_wrapper.py index d6e58a4f4bf7..3f8d8355645d 100644 --- a/packages/google-cloud-core/google/cloud/streaming/http_wrapper.py +++ b/packages/google-cloud-core/google/cloud/streaming/http_wrapper.py @@ -81,7 +81,8 @@ def _httplib2_debug_level(http_request, level, http=None): :param level: the debuglevel for logging. :type http: :class:`httplib2.Http` - :param http: (Optional) the instance on whose connections to set the debuglevel. + :param http: + (Optional) the instance on whose connections to set the debuglevel. """ if http_request.loggable_body is None: yield diff --git a/packages/google-cloud-core/google/cloud/streaming/stream_slice.py b/packages/google-cloud-core/google/cloud/streaming/stream_slice.py index c33cda011738..3a13337bb993 100644 --- a/packages/google-cloud-core/google/cloud/streaming/stream_slice.py +++ b/packages/google-cloud-core/google/cloud/streaming/stream_slice.py @@ -66,7 +66,9 @@ def read(self, size=None): raise :exc:`IncompleteRead`. :type size: int - :param size: (Optional) If provided, read no more than size bytes from the stream. + :param size: + (Optional) If provided, read no more than size bytes from the + stream. :rtype: bytes :returns: bytes read from this slice. diff --git a/packages/google-cloud-core/google/cloud/streaming/transfer.py b/packages/google-cloud-core/google/cloud/streaming/transfer.py index b17f63392815..410aa9430bae 100644 --- a/packages/google-cloud-core/google/cloud/streaming/transfer.py +++ b/packages/google-cloud-core/google/cloud/streaming/transfer.py @@ -670,7 +670,8 @@ def from_file(cls, filename, mime_type=None, auto_transfer=True, **kwds): :param mime_type: MIMEtype of the file being uploaded :type auto_transfer: bool - :param auto_transfer: (Optional) should the transfer be started immediately + :param auto_transfer: + (Optional) should the transfer be started immediately :type kwds: dict :param kwds: keyword arguments: passed @@ -704,7 +705,8 @@ def from_stream(cls, stream, mime_type, :param total_size: (Optional) Size of the file being uploaded :type auto_transfer: bool - :param auto_transfer: (Optional) should the transfer be started immediately + :param auto_transfer: + (Optional) should the transfer be started immediately :type kwds: dict :param kwds: keyword arguments: passed From 0ebcf0b01542cfad2f693d37b89fc100ce914722 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Sun, 23 Oct 2016 22:31:07 -0700 Subject: [PATCH 047/468] Removing Iterator.reset(). This is in advance of supporting GAX page iterators within our Iterator class. Iterator.reset() isn't a feature that a typical Python user expects to exist, and restarting an iterator can be as easy as: >>> my_iter = client.list_foo(*args, **kwargs) >>> consume_it(my_iter) >>> restarted_iter = client.list_foo(*args, **kwargs) --- .../google/cloud/iterator.py | 20 ++++++----- .../unit_tests/test_iterator.py | 36 ++++++++++--------- 2 files changed, 30 insertions(+), 26 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index ac1323a35e19..06242e57597d 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -246,10 +246,11 @@ def __init__(self, client, path, item_to_value, items_key=DEFAULT_ITEMS_KEY, page_token=None, max_results=None, extra_params=None, page_start=_do_nothing_page_start): + self._started = False self.client = client self.path = path - self._items_key = items_key self._item_to_value = item_to_value + self._items_key = items_key self.max_results = max_results self.extra_params = extra_params self._page_start = page_start @@ -288,7 +289,15 @@ def page(self): return self._page def __iter__(self): - """The :class:`Iterator` is an iterator.""" + """The :class:`Iterator` is an iterator. + + :rtype: :class:`Iterator` + :returns: Current instance. + :raises ValueError: If the iterator has already been started. + """ + if self._started: + raise ValueError('Iterator has already started', self) + self._started = True return self def update_page(self, require_empty=True): @@ -385,10 +394,3 @@ def _get_next_page_response(self): self.next_page_token = response.get('nextPageToken') return response - - def reset(self): - """Resets the iterator to the beginning.""" - self.page_number = 0 - self.next_page_token = None - self.num_results = 0 - self._page = _UNSET diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index 81a3738ffc1f..e16edaaa00fa 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -101,14 +101,26 @@ def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) def test_constructor(self): + from google.cloud.iterator import _do_nothing_page_start + from google.cloud.iterator import _UNSET + connection = _Connection() client = _Client(connection) path = '/foo' iterator = self._makeOne(client, path, None) + self.assertFalse(iterator._started) self.assertIs(iterator.client, client) self.assertEqual(iterator.path, path) + self.assertIsNone(iterator._item_to_value) + self.assertEqual(iterator._items_key, 'items') + self.assertIsNone(iterator.max_results) + self.assertEqual(iterator.extra_params, {}) + self.assertIs(iterator._page_start, _do_nothing_page_start) + # Changing attributes. self.assertEqual(iterator.page_number, 0) self.assertIsNone(iterator.next_page_token) + self.assertEqual(iterator.num_results, 0) + self.assertIs(iterator._page, _UNSET) def test_constructor_w_extra_param_collision(self): connection = _Connection() @@ -193,6 +205,13 @@ def test___iter__(self): iterator = self._makeOne(None, None, None) self.assertIs(iter(iterator), iterator) + def test___iter___started(self): + iterator = self._makeOne(None, None, None) + iter_obj = iter(iterator) + self.assertIs(iter_obj, iterator) + with self.assertRaises(ValueError): + iter(iterator) + def test_iterate(self): import six @@ -336,23 +355,6 @@ def test__get_next_page_response_new_no_token_in_response(self): self.assertEqual(kw['path'], path) self.assertEqual(kw['query_params'], {}) - def test_reset(self): - from google.cloud.iterator import _UNSET - - connection = _Connection() - client = _Client(connection) - path = '/foo' - token = 'token' - iterator = self._makeOne(client, path, None) - iterator.page_number = 1 - iterator.next_page_token = token - iterator._page = object() - iterator.reset() - self.assertEqual(iterator.page_number, 0) - self.assertEqual(iterator.num_results, 0) - self.assertIsNone(iterator.next_page_token) - self.assertIs(iterator._page, _UNSET) - class _Connection(object): From a9e4ec02d924cc0984a9af8c0848f764a47ca37c Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Sun, 23 Oct 2016 22:43:48 -0700 Subject: [PATCH 048/468] Adding a pages iterator side-by-side with items iterator. --- .../google/cloud/iterator.py | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index 06242e57597d..715e280ac54a 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -274,6 +274,31 @@ def _verify_params(self): raise ValueError('Using a reserved parameter', reserved_in_use) + def _pages_iter(self): + """Generator of pages of API responses. + + Yields :class:`Page` instances. + """ + while self._has_next_page(): + response = self._get_next_page_response() + page = Page(self, response, self._items_key, + self._item_to_value) + self._page_start(self, page, response) + yield page + + @property + def pages(self): + """Iterator of pages in the response. + + :rtype: :class:`~types.GeneratorType` + :returns: A generator of :class:`Page` instances. + :raises ValueError: If the iterator has already been started. + """ + if self._started: + raise ValueError('Iterator has already started', self) + self._started = True + return self._pages_iter() + @property def page(self): """The current page of results that has been retrieved. From 20a5951cbeed3f331fef9ccfa797b000b03d9ba6 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Sun, 23 Oct 2016 22:47:12 -0700 Subject: [PATCH 049/468] Using the pages iterator in the main iterator. NOTE: There is a current mismatch between incrementing Iterator.num_results in Iterator.__iter__ vs. incrementing it in Iterator.pages (there it won't be incremented, so this will need to be addressed in a subsequent commit). --- .../google/cloud/iterator.py | 29 +++++-------------- 1 file changed, 7 insertions(+), 22 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index 715e280ac54a..1cc0f7a0842d 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -314,16 +314,13 @@ def page(self): return self._page def __iter__(self): - """The :class:`Iterator` is an iterator. - - :rtype: :class:`Iterator` - :returns: Current instance. - :raises ValueError: If the iterator has already been started. - """ - if self._started: - raise ValueError('Iterator has already started', self) - self._started = True - return self + """Iterator for each item returned.""" + # NOTE: We don't check if the iterator has started since the pages + # iterator already does this. + for page in self.pages: + for item in page: + self.num_results += 1 + yield item def update_page(self, require_empty=True): """Move to the next page in the result set. @@ -364,18 +361,6 @@ def update_page(self, require_empty=True): msg = _PAGE_ERR_TEMPLATE % (self._page, self.page.remaining) raise ValueError(msg) - def next(self): - """Get the next item from the request.""" - self.update_page(require_empty=False) - if self.page is None: - raise StopIteration - item = six.next(self.page) - self.num_results += 1 - return item - - # Alias needed for Python 2/3 support. - __next__ = next - def _has_next_page(self): """Determines whether or not there are more pages with results. From 327bca407da750ef6f0929f48e97d7ca8d3b0cd4 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Sun, 23 Oct 2016 22:49:35 -0700 Subject: [PATCH 050/468] Remove some manual paging functionality. --- .../google/cloud/iterator.py | 55 ------------------- 1 file changed, 55 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index 1cc0f7a0842d..98a85d613586 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -107,7 +107,6 @@ import six -_UNSET = object() _NO_MORE_PAGES_ERR = 'Iterator has no more pages.' _UNSTARTED_ERR = ( 'Iterator has not been started. Either begin iterating, ' @@ -261,7 +260,6 @@ def __init__(self, client, path, item_to_value, self.page_number = 0 self.next_page_token = page_token self.num_results = 0 - self._page = _UNSET def _verify_params(self): """Verifies the parameters don't use any reserved parameter. @@ -299,20 +297,6 @@ def pages(self): self._started = True return self._pages_iter() - @property - def page(self): - """The current page of results that has been retrieved. - - If there are no more results, will return :data:`None`. - - :rtype: :class:`Page` - :returns: The page of items that has been retrieved. - :raises AttributeError: If the page has not been set. - """ - if self._page is _UNSET: - raise AttributeError(_UNSTARTED_ERR) - return self._page - def __iter__(self): """Iterator for each item returned.""" # NOTE: We don't check if the iterator has started since the pages @@ -322,45 +306,6 @@ def __iter__(self): self.num_results += 1 yield item - def update_page(self, require_empty=True): - """Move to the next page in the result set. - - If the current page is not empty and ``require_empty`` is :data:`True` - then an exception will be raised. If the current page is not empty - and ``require_empty`` is :data:`False`, then this will return - without updating the current page. - - If the current page **is** empty, but there are no more results, - sets the current page to :data:`None`. - - If there are no more pages, throws an exception. - - :type require_empty: bool - :param require_empty: (Optional) Flag to indicate if the current page - must be empty before updating. - - :raises ValueError: If ``require_empty`` is :data:`True` but the - current page is not empty. - :raises ValueError: If there are no more pages. - """ - if self._page is None: - raise ValueError(_NO_MORE_PAGES_ERR) - - # NOTE: This assumes Page.remaining can never go below 0. - page_empty = self._page is _UNSET or self._page.remaining == 0 - if page_empty: - if self._has_next_page(): - response = self._get_next_page_response() - self._page = Page(self, response, self._items_key, - self._item_to_value) - self._page_start(self, self._page, response) - else: - self._page = None - else: - if require_empty: - msg = _PAGE_ERR_TEMPLATE % (self._page, self.page.remaining) - raise ValueError(msg) - def _has_next_page(self): """Determines whether or not there are more pages with results. From 8a54340181b53b77615b8d7bd8ddc6ed6e3edaf0 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Sun, 23 Oct 2016 22:51:45 -0700 Subject: [PATCH 051/468] Properly tracking the Iterator #results in pages/items iterators. --- packages/google-cloud-core/google/cloud/iterator.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index 98a85d613586..1e575a999544 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -282,6 +282,7 @@ def _pages_iter(self): page = Page(self, response, self._items_key, self._item_to_value) self._page_start(self, page, response) + self.num_results += page.num_items yield page @property @@ -302,6 +303,9 @@ def __iter__(self): # NOTE: We don't check if the iterator has started since the pages # iterator already does this. for page in self.pages: + # Decrement the total results since the pages iterator adds + # to it when each page is encountered. + self.num_results -= page.num_items for item in page: self.num_results += 1 yield item From 572012a399016e2908a0fcd34913ac2c0691b64d Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Sun, 23 Oct 2016 23:03:05 -0700 Subject: [PATCH 052/468] Updating Iterator/paging documentation. --- .../google/cloud/iterator.py | 76 ++++++------------- 1 file changed, 25 insertions(+), 51 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index 1e575a999544..f9c818b56ca9 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -43,64 +43,38 @@ ... break When iterating, not every new item will send a request to the server. -To monitor these requests, track the current page of the iterator:: +To iterate based on each page of items (where a page corresponds to +a request):: >>> iterator = Iterator(...) - >>> iterator.page_number - 0 - >>> next(iterator) - - >>> iterator.page_number - 1 - >>> iterator.page.remaining - 1 - >>> next(iterator) - - >>> iterator.page_number - 1 - >>> iterator.page.remaining - 0 - >>> next(iterator) - - >>> iterator.page_number - 2 - >>> iterator.page.remaining - 19 - -It's also possible to consume an entire page and handle the paging process -manually:: - - >>> iterator = Iterator(...) - >>> # Manually pull down the first page. - >>> iterator.update_page() - >>> items = list(iterator.page) - >>> items + >>> for page in iterator.pages: + ... print('=' * 20) + ... print(' Page number: %d' % (iterator.page_number,)) + ... print(' Items in page: %d' % (page.num_items,)) + ... print(' First item: %r' % (next(page),)) + ... print('Items remaining: %d' % (page.remaining,)) + ... print('Next page token: %s' % (iterator.next_page_token,)) + ==================== + Page number: 1 + Items in page: 1 + First item: + Items remaining: 0 + Next page token: eav1OzQB0OM8rLdGXOEsyQWSG + ==================== + Page number: 2 + Items in page: 19 + First item: + Items remaining: 18 + Next page token: None + +To consume an entire page:: + + >>> list(page) [ , , , ] - >>> iterator.page.remaining - 0 - >>> iterator.page.num_items - 3 - >>> iterator.next_page_token - 'eav1OzQB0OM8rLdGXOEsyQWSG' - >>> - >>> # Ask for the next page to be grabbed. - >>> iterator.update_page() - >>> list(iterator.page) - [ - , - , - ] - >>> - >>> # When there are no more results - >>> iterator.next_page_token is None - True - >>> iterator.update_page() - >>> iterator.page is None - True """ From 15c39ea524b1ca291374b133c6d78b305277e1ec Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Sun, 23 Oct 2016 23:32:02 -0700 Subject: [PATCH 053/468] Updating core unit tests for Iterator changes. Also moving __iter__ functionality into a helper so that the "started?" check could be done **before** entering the generator. This is because the "self.pages" generator wouldn't be entered until an item was consumed off the items iterator. --- .../google/cloud/iterator.py | 25 ++-- .../unit_tests/test_iterator.py | 107 ++++++++---------- 2 files changed, 63 insertions(+), 69 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index f9c818b56ca9..078593e72539 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -81,13 +81,6 @@ import six -_NO_MORE_PAGES_ERR = 'Iterator has no more pages.' -_UNSTARTED_ERR = ( - 'Iterator has not been started. Either begin iterating, ' - 'call next(my_iter) or call my_iter.update_page().') -_PAGE_ERR_TEMPLATE = ( - 'Tried to update the page while current page (%r) still has %d ' - 'items remaining.') DEFAULT_ITEMS_KEY = 'items' """The dictionary key used to retrieve items from each response.""" @@ -272,11 +265,9 @@ def pages(self): self._started = True return self._pages_iter() - def __iter__(self): + def _items_iter(self): """Iterator for each item returned.""" - # NOTE: We don't check if the iterator has started since the pages - # iterator already does this. - for page in self.pages: + for page in self._pages_iter(): # Decrement the total results since the pages iterator adds # to it when each page is encountered. self.num_results -= page.num_items @@ -284,6 +275,18 @@ def __iter__(self): self.num_results += 1 yield item + def __iter__(self): + """Iterator for each item returned. + + :rtype: :class:`~types.GeneratorType` + :returns: A generator of items from the API. + :raises ValueError: If the iterator has already been started. + """ + if self._started: + raise ValueError('Iterator has already started', self) + self._started = True + return self._items_iter() + def _has_next_page(self): """Determines whether or not there are more pages with results. diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index e16edaaa00fa..53613550323b 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -102,7 +102,6 @@ def _makeOne(self, *args, **kw): def test_constructor(self): from google.cloud.iterator import _do_nothing_page_start - from google.cloud.iterator import _UNSET connection = _Connection() client = _Client(connection) @@ -120,7 +119,6 @@ def test_constructor(self): self.assertEqual(iterator.page_number, 0) self.assertIsNone(iterator.next_page_token) self.assertEqual(iterator.num_results, 0) - self.assertIs(iterator._page, _UNSET) def test_constructor_w_extra_param_collision(self): connection = _Connection() @@ -130,61 +128,15 @@ def test_constructor_w_extra_param_collision(self): with self.assertRaises(ValueError): self._makeOne(client, path, None, extra_params=extra_params) - def test_page_property(self): - iterator = self._makeOne(None, None, None) - page = object() - iterator._page = page - self.assertIs(iterator.page, page) - - def test_page_property_unset(self): - from google.cloud.iterator import _UNSET - - iterator = self._makeOne(None, None, None) - self.assertIs(iterator._page, _UNSET) - with self.assertRaises(AttributeError): - getattr(iterator, 'page') - - def test_update_page_no_more(self): - iterator = self._makeOne(None, None, None) - iterator._page = None - with self.assertRaises(ValueError): - iterator.update_page() - - def test_update_page_not_empty_success(self): - from google.cloud.iterator import Page - - iterator = self._makeOne(None, None, None) - page = Page(None, {}, '', None) - iterator._page = page - iterator._page._remaining = 1 - iterator.update_page(require_empty=False) - self.assertIs(iterator._page, page) - - def test_update_page_not_empty_fail(self): - from google.cloud.iterator import Page - - iterator = self._makeOne(None, None, None) - iterator._page = Page(None, {}, '', None) - iterator._page._remaining = 1 - with self.assertRaises(ValueError): - iterator.update_page(require_empty=True) - - def test_update_page_empty_then_no_more(self): - iterator = self._makeOne(None, None, None) - # Fake that there are no more pages. - iterator.page_number = 1 - iterator.next_page_token = None - iterator.update_page() - self.assertIsNone(iterator.page) - - def test_update_page_empty_then_another(self): + def test_pages_iter_empty_then_another(self): + import six from google.cloud._testing import _Monkey from google.cloud import iterator as MUT items_key = 'its-key' iterator = self._makeOne(None, None, None, items_key=items_key) # Fake the next page class. - fake_page = object() + fake_page = MUT.Page(None, {}, '', None) page_args = [] def dummy_response(): @@ -195,20 +147,58 @@ def dummy_page_class(*args): return fake_page iterator._get_next_page_response = dummy_response + pages_iter = iterator.pages with _Monkey(MUT, Page=dummy_page_class): - iterator.update_page() - self.assertIs(iterator.page, fake_page) + page = six.next(pages_iter) + self.assertIs(page, fake_page) self.assertEqual( page_args, [(iterator, {}, items_key, iterator._item_to_value)]) + def test_pages_property(self): + import types + + iterator = self._makeOne(None, None, None) + self.assertIsInstance(iterator.pages, types.GeneratorType) + + def test_pages_property_started(self): + import types + + iterator = self._makeOne(None, None, None) + pages_iter = iterator.pages + self.assertIsInstance(pages_iter, types.GeneratorType) + with self.assertRaises(ValueError): + getattr(iterator, 'pages') + + def test_pages_property_items_started(self): + import types + + iterator = self._makeOne(None, None, None) + items_iter = iter(iterator) + self.assertIsInstance(items_iter, types.GeneratorType) + with self.assertRaises(ValueError): + getattr(iterator, 'pages') + def test___iter__(self): + import types + iterator = self._makeOne(None, None, None) - self.assertIs(iter(iterator), iterator) + self.assertIsInstance(iter(iterator), types.GeneratorType) def test___iter___started(self): + import types + iterator = self._makeOne(None, None, None) iter_obj = iter(iterator) - self.assertIs(iter_obj, iterator) + self.assertIsInstance(iter_obj, types.GeneratorType) + with self.assertRaises(ValueError): + iter(iterator) + + def test___iter___pages_started(self): + import types + + iterator = self._makeOne(None, None, None) + pages_iter = iterator.pages + self.assertIsInstance(pages_iter, types.GeneratorType) with self.assertRaises(ValueError): iter(iterator) @@ -231,16 +221,17 @@ def item_to_value(iterator, item): # pylint: disable=unused-argument item_to_value=item_to_value) self.assertEqual(iterator.num_results, 0) - val1 = six.next(iterator) + items_iter = iter(iterator) + val1 = six.next(items_iter) self.assertEqual(val1, item1) self.assertEqual(iterator.num_results, 1) - val2 = six.next(iterator) + val2 = six.next(items_iter) self.assertEqual(val2, item2) self.assertEqual(iterator.num_results, 2) with self.assertRaises(StopIteration): - six.next(iterator) + six.next(items_iter) kw, = connection._requested self.assertEqual(kw['method'], 'GET') From 5f9134c9a0cd7d387159b912566d3060484247cc Mon Sep 17 00:00:00 2001 From: Mike Lissner Date: Tue, 25 Oct 2016 07:19:17 -0700 Subject: [PATCH 054/468] Fixes typo --- packages/google-cloud-core/google/cloud/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-core/google/cloud/client.py b/packages/google-cloud-core/google/cloud/client.py index 6495911185c4..893cde192910 100644 --- a/packages/google-cloud-core/google/cloud/client.py +++ b/packages/google-cloud-core/google/cloud/client.py @@ -155,7 +155,7 @@ def _determine_default(project): class JSONClient(Client, _ClientProjectMixin): - """Client to for Google JSON-based API. + """Client for Google JSON-based API. Assumes such APIs use the ``project`` and the client needs to store this value. From 94584a76d8171b4796a831fd40e9e3e68e1a2e44 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 24 Oct 2016 23:39:47 -0700 Subject: [PATCH 055/468] Using Iterators for list_topics() in Pub/Sub. In the process, had to add custom support for the GAX page iterator in our core Iterator implementation. --- .../google/cloud/_testing.py | 13 +++++++++-- .../google/cloud/iterator.py | 22 +++++++++++++++---- .../unit_tests/test_iterator.py | 16 ++++++++++++++ 3 files changed, 45 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/_testing.py b/packages/google-cloud-core/google/cloud/_testing.py index 6aece7ec652b..0acb433a4292 100644 --- a/packages/google-cloud-core/google/cloud/_testing.py +++ b/packages/google-cloud-core/google/cloud/_testing.py @@ -81,5 +81,14 @@ def __init__(self, items, page_token): self.page_token = page_token def next(self): - items, self._items = self._items, None - return items + if self._items is None: + raise StopIteration + else: + items = self._items + self._items = None + return items + + __next__ = next + + def __iter__(self): + return self diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index 078593e72539..87a938022a55 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -202,6 +202,14 @@ class Iterator(object): takes the :class:`Iterator` that started the page, the :class:`Page` that was started and the dictionary containing the page response. + + :type page_iter: callable + :param page_iter: (Optional) Callable to produce a pages iterator from the + current iterator. Assumed signature takes the + :class:`Iterator` that started the page. By default uses + the HTTP pages iterator. Meant to provide a custom + way to create pages (potentially with a custom + transport such as gRPC). """ _PAGE_TOKEN = 'pageToken' @@ -211,7 +219,7 @@ class Iterator(object): def __init__(self, client, path, item_to_value, items_key=DEFAULT_ITEMS_KEY, page_token=None, max_results=None, extra_params=None, - page_start=_do_nothing_page_start): + page_start=_do_nothing_page_start, page_iter=None): self._started = False self.client = client self.path = path @@ -220,8 +228,14 @@ def __init__(self, client, path, item_to_value, self.max_results = max_results self.extra_params = extra_params self._page_start = page_start + self._page_iter = None + # Verify inputs / provide defaults. if self.extra_params is None: self.extra_params = {} + if page_iter is None: + self._page_iter = self._default_page_iter() + else: + self._page_iter = page_iter(self) self._verify_params() # The attributes below will change over the life of the iterator. self.page_number = 0 @@ -239,7 +253,7 @@ def _verify_params(self): raise ValueError('Using a reserved parameter', reserved_in_use) - def _pages_iter(self): + def _default_page_iter(self): """Generator of pages of API responses. Yields :class:`Page` instances. @@ -263,11 +277,11 @@ def pages(self): if self._started: raise ValueError('Iterator has already started', self) self._started = True - return self._pages_iter() + return self._page_iter def _items_iter(self): """Iterator for each item returned.""" - for page in self._pages_iter(): + for page in self._page_iter: # Decrement the total results since the pages iterator adds # to it when each page is encountered. self.num_results -= page.num_items diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index 53613550323b..9a7285cc20d7 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -128,6 +128,22 @@ def test_constructor_w_extra_param_collision(self): with self.assertRaises(ValueError): self._makeOne(client, path, None, extra_params=extra_params) + def test_constructor_non_default_page_iter(self): + connection = _Connection() + client = _Client(connection) + path = '/foo' + result = object() + called = [] + + def page_iter(iterator): + called.append(iterator) + return result + + iterator = self._makeOne(client, path, None, + page_iter=page_iter) + self.assertEqual(called, [iterator]) + self.assertIs(iterator._page_iter, result) + def test_pages_iter_empty_then_another(self): import six from google.cloud._testing import _Monkey From d3482d4fd0213e18b8d79b95add3bc693586bf31 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 25 Oct 2016 11:11:21 -0700 Subject: [PATCH 056/468] Passing client to Pub/Sub API classes. Also tweaking an exhausted _GAXPageIterator in google.cloud._testing. --- packages/google-cloud-core/google/cloud/_testing.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/_testing.py b/packages/google-cloud-core/google/cloud/_testing.py index 0acb433a4292..ddaeb6c8c5a4 100644 --- a/packages/google-cloud-core/google/cloud/_testing.py +++ b/packages/google-cloud-core/google/cloud/_testing.py @@ -83,10 +83,8 @@ def __init__(self, items, page_token): def next(self): if self._items is None: raise StopIteration - else: - items = self._items - self._items = None - return items + items, self._items = self._items, None + return items __next__ = next From f5b8c4ee5bdb945ab6af0a62299716ea1f2066e4 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 25 Oct 2016 12:44:53 -0700 Subject: [PATCH 057/468] Renaming Iterator to HTTPIterator. --- packages/google-cloud-core/google/cloud/iterator.py | 2 +- packages/google-cloud-core/unit_tests/test_iterator.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index 87a938022a55..3cfb5c61ad74 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -166,7 +166,7 @@ def next(self): __next__ = next -class Iterator(object): +class HTTPIterator(object): """A generic class for iterating through Cloud JSON APIs list responses. :type client: :class:`~google.cloud.client.Client` diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index 9a7285cc20d7..d6c651376dbb 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -91,11 +91,11 @@ def item_to_value(self, item): self.assertEqual(page.remaining, 97) -class TestIterator(unittest.TestCase): +class TestHTTPIterator(unittest.TestCase): def _getTargetClass(self): - from google.cloud.iterator import Iterator - return Iterator + from google.cloud.iterator import HTTPIterator + return HTTPIterator def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) From 3c6616fe735c76af5faa5c221697b5fd7541101a Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 25 Oct 2016 14:00:21 -0700 Subject: [PATCH 058/468] Making Iterator base class. For this commit, moving some features of HTTPIterator constructor onto base class. --- .../google/cloud/iterator.py | 39 +++++++++++++------ .../unit_tests/test_iterator.py | 26 +++++++++++++ 2 files changed, 54 insertions(+), 11 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index 3cfb5c61ad74..e12f857f8d9c 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -166,15 +166,37 @@ def next(self): __next__ = next -class HTTPIterator(object): +class Iterator(object): + """A generic class for iterating through API list responses. + + :type client: :class:`~google.cloud.client.Client` + :param client: The client used to identify the application. + + :type page_token: str + :param page_token: (Optional) A token identifying a page in a result set. + + :type max_results: int + :param max_results: (Optional) The maximum number of results to fetch. + """ + + def __init__(self, client, page_token=None, max_results=None): + self._started = False + self.client = client + self.max_results = max_results + # The attributes below will change over the life of the iterator. + self.page_number = 0 + self.next_page_token = page_token + self.num_results = 0 + + +class HTTPIterator(Iterator): """A generic class for iterating through Cloud JSON APIs list responses. :type client: :class:`~google.cloud.client.Client` - :param client: The client, which owns a connection to make requests. + :param client: The client used to identify the application. :type path: str - :param path: The path to query for the list of items. Defaults - to :attr:`PATH` on the current iterator class. + :param path: The path to query for the list of items. :type item_to_value: callable :param item_to_value: Callable to convert an item from JSON @@ -220,12 +242,11 @@ def __init__(self, client, path, item_to_value, items_key=DEFAULT_ITEMS_KEY, page_token=None, max_results=None, extra_params=None, page_start=_do_nothing_page_start, page_iter=None): - self._started = False - self.client = client + super(HTTPIterator, self).__init__( + client, page_token=page_token, max_results=max_results) self.path = path self._item_to_value = item_to_value self._items_key = items_key - self.max_results = max_results self.extra_params = extra_params self._page_start = page_start self._page_iter = None @@ -237,10 +258,6 @@ def __init__(self, client, path, item_to_value, else: self._page_iter = page_iter(self) self._verify_params() - # The attributes below will change over the life of the iterator. - self.page_number = 0 - self.next_page_token = page_token - self.num_results = 0 def _verify_params(self): """Verifies the parameters don't use any reserved parameter. diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index d6c651376dbb..bd57c6009a7a 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -91,6 +91,32 @@ def item_to_value(self, item): self.assertEqual(page.remaining, 97) +class TestIterator(unittest.TestCase): + + def _getTargetClass(self): + from google.cloud.iterator import Iterator + return Iterator + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_constructor(self): + connection = _Connection() + client = _Client(connection) + token = 'ab13nceor03' + max_results = 1337 + iterator = self._makeOne(client, page_token=token, + max_results=max_results) + + self.assertFalse(iterator._started) + self.assertIs(iterator.client, client) + self.assertEqual(iterator.max_results, max_results) + # Changing attributes. + self.assertEqual(iterator.page_number, 0) + self.assertEqual(iterator.next_page_token, token) + self.assertEqual(iterator.num_results, 0) + + class TestHTTPIterator(unittest.TestCase): def _getTargetClass(self): From 76666351cd56b97720991947035d5ea769273ece Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 25 Oct 2016 14:49:32 -0700 Subject: [PATCH 059/468] Moving pages/_items_iter/__iter__ onto Iterator base class. --- .../google/cloud/iterator.py | 77 +++++----- .../unit_tests/test_iterator.py | 136 +++++++++++------- 2 files changed, 129 insertions(+), 84 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index e12f857f8d9c..dcd3ff7291a0 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -183,11 +183,50 @@ def __init__(self, client, page_token=None, max_results=None): self._started = False self.client = client self.max_results = max_results + # NOTE: The _page_iter is not intended to come through the + # constructor, instead subclasses should over-ride + # this property. + self._page_iter = iter(()) # The attributes below will change over the life of the iterator. self.page_number = 0 self.next_page_token = page_token self.num_results = 0 + @property + def pages(self): + """Iterator of pages in the response. + + :rtype: :class:`~types.GeneratorType` + :returns: A generator of :class:`Page` instances. + :raises ValueError: If the iterator has already been started. + """ + if self._started: + raise ValueError('Iterator has already started', self) + self._started = True + return self._page_iter + + def _items_iter(self): + """Iterator for each item returned.""" + for page in self._page_iter: + # Decrement the total results since the pages iterator adds + # to it when each page is encountered. + self.num_results -= page.num_items + for item in page: + self.num_results += 1 + yield item + + def __iter__(self): + """Iterator for each item returned. + + :rtype: :class:`~types.GeneratorType` + :returns: A generator of items from the API. + :raises ValueError: If the iterator has already been started. + """ + if self._started: + raise ValueError('Iterator has already started', self) + self._started = True + return self._items_iter() + class HTTPIterator(Iterator): """A generic class for iterating through Cloud JSON APIs list responses. @@ -232,6 +271,8 @@ class HTTPIterator(Iterator): the HTTP pages iterator. Meant to provide a custom way to create pages (potentially with a custom transport such as gRPC). + + .. autoattribute:: pages """ _PAGE_TOKEN = 'pageToken' @@ -249,7 +290,6 @@ def __init__(self, client, path, item_to_value, self._items_key = items_key self.extra_params = extra_params self._page_start = page_start - self._page_iter = None # Verify inputs / provide defaults. if self.extra_params is None: self.extra_params = {} @@ -283,41 +323,6 @@ def _default_page_iter(self): self.num_results += page.num_items yield page - @property - def pages(self): - """Iterator of pages in the response. - - :rtype: :class:`~types.GeneratorType` - :returns: A generator of :class:`Page` instances. - :raises ValueError: If the iterator has already been started. - """ - if self._started: - raise ValueError('Iterator has already started', self) - self._started = True - return self._page_iter - - def _items_iter(self): - """Iterator for each item returned.""" - for page in self._page_iter: - # Decrement the total results since the pages iterator adds - # to it when each page is encountered. - self.num_results -= page.num_items - for item in page: - self.num_results += 1 - yield item - - def __iter__(self): - """Iterator for each item returned. - - :rtype: :class:`~types.GeneratorType` - :returns: A generator of items from the API. - :raises ValueError: If the iterator has already been started. - """ - if self._started: - raise ValueError('Iterator has already started', self) - self._started = True - return self._items_iter() - def _has_next_page(self): """Determines whether or not there are more pages with results. diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index bd57c6009a7a..747f739530b2 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -111,11 +111,99 @@ def test_constructor(self): self.assertFalse(iterator._started) self.assertIs(iterator.client, client) self.assertEqual(iterator.max_results, max_results) + self.assertEqual(list(iterator._page_iter), []) # Changing attributes. self.assertEqual(iterator.page_number, 0) self.assertEqual(iterator.next_page_token, token) self.assertEqual(iterator.num_results, 0) + def test_pages_property(self): + iterator = self._makeOne(None) + self.assertFalse(iterator._started) + mock_iter = object() + iterator._page_iter = mock_iter + self.assertIs(iterator.pages, mock_iter) + # Check the side-effect. + self.assertTrue(iterator._started) + + def test_pages_property_started(self): + iterator = self._makeOne(None) + self.assertEqual(list(iterator.pages), []) + # Make sure we cannot restart. + with self.assertRaises(ValueError): + getattr(iterator, 'pages') + + def test_pages_property_items_started(self): + iterator = self._makeOne(None) + self.assertEqual(list(iterator), []) + with self.assertRaises(ValueError): + getattr(iterator, 'pages') + + @staticmethod + def _do_nothing(parent, value): + return parent, value + + def test__items_iter(self): + import types + import six + from google.cloud.iterator import Page + + # Items to be returned. + item1 = 17 + item2 = 100 + item3 = 211 + + # Make pages from mock responses + mock_key = 'mock' + parent = object() + page1 = Page(parent, {mock_key: [item1, item2]}, + mock_key, self._do_nothing) + page2 = Page(parent, {mock_key: [item3]}, + mock_key, self._do_nothing) + # Spoof the number of items in each page to offset the + # ``num_results -= page.num_items`` in _items_iter(). + page1._num_items = page2._num_items = 0 + + iterator = self._makeOne(None) + # Fake the page iterator on the object. + iterator._page_iter = iter((page1, page2)) + + items_iter = iterator._items_iter() + # Make sure it is a generator. + self.assertIsInstance(items_iter, types.GeneratorType) + + # Consume items and check the state of the iterator. + self.assertEqual(iterator.num_results, 0) + self.assertEqual(six.next(items_iter), (parent, item1)) + self.assertEqual(iterator.num_results, 1) + self.assertEqual(six.next(items_iter), (parent, item2)) + self.assertEqual(iterator.num_results, 2) + self.assertEqual(six.next(items_iter), (parent, item3)) + self.assertEqual(iterator.num_results, 3) + with self.assertRaises(StopIteration): + six.next(items_iter) + + def test___iter__(self): + iterator = self._makeOne(None) + self.assertFalse(iterator._started) + mock_iter = object() + iterator._page_iter = mock_iter + self.assertIs(iterator.pages, mock_iter) + # Check the side-effect. + self.assertTrue(iterator._started) + + def test___iter___started(self): + iterator = self._makeOne(None) + self.assertEqual(list(iterator), []) + with self.assertRaises(ValueError): + iter(iterator) + + def test___iter___pages_started(self): + iterator = self._makeOne(None) + self.assertEqual(list(iterator.pages), []) + with self.assertRaises(ValueError): + iter(iterator) + class TestHTTPIterator(unittest.TestCase): @@ -196,54 +284,6 @@ def dummy_page_class(*args): self.assertEqual( page_args, [(iterator, {}, items_key, iterator._item_to_value)]) - def test_pages_property(self): - import types - - iterator = self._makeOne(None, None, None) - self.assertIsInstance(iterator.pages, types.GeneratorType) - - def test_pages_property_started(self): - import types - - iterator = self._makeOne(None, None, None) - pages_iter = iterator.pages - self.assertIsInstance(pages_iter, types.GeneratorType) - with self.assertRaises(ValueError): - getattr(iterator, 'pages') - - def test_pages_property_items_started(self): - import types - - iterator = self._makeOne(None, None, None) - items_iter = iter(iterator) - self.assertIsInstance(items_iter, types.GeneratorType) - with self.assertRaises(ValueError): - getattr(iterator, 'pages') - - def test___iter__(self): - import types - - iterator = self._makeOne(None, None, None) - self.assertIsInstance(iter(iterator), types.GeneratorType) - - def test___iter___started(self): - import types - - iterator = self._makeOne(None, None, None) - iter_obj = iter(iterator) - self.assertIsInstance(iter_obj, types.GeneratorType) - with self.assertRaises(ValueError): - iter(iterator) - - def test___iter___pages_started(self): - import types - - iterator = self._makeOne(None, None, None) - pages_iter = iterator.pages - self.assertIsInstance(pages_iter, types.GeneratorType) - with self.assertRaises(ValueError): - iter(iterator) - def test_iterate(self): import six From 879276254eccefb7a89dd88255a2e78a451df044 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 25 Oct 2016 15:08:50 -0700 Subject: [PATCH 060/468] Adding instance flag to determine if page/items are iterating. --- packages/google-cloud-core/google/cloud/iterator.py | 12 ++++++++---- .../google-cloud-core/unit_tests/test_iterator.py | 4 +--- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index dcd3ff7291a0..ebef82bc410c 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -187,6 +187,11 @@ def __init__(self, client, page_token=None, max_results=None): # constructor, instead subclasses should over-ride # this property. self._page_iter = iter(()) + # NOTE: This flag indicates if the total number of results should be + # incremented. This is useful since a page iterator will + # want to increment by results per page while an items + # iterator will want to increment per item. + self._page_increment = False # The attributes below will change over the life of the iterator. self.page_number = 0 self.next_page_token = page_token @@ -203,14 +208,12 @@ def pages(self): if self._started: raise ValueError('Iterator has already started', self) self._started = True + self._page_increment = True return self._page_iter def _items_iter(self): """Iterator for each item returned.""" for page in self._page_iter: - # Decrement the total results since the pages iterator adds - # to it when each page is encountered. - self.num_results -= page.num_items for item in page: self.num_results += 1 yield item @@ -320,7 +323,8 @@ def _default_page_iter(self): page = Page(self, response, self._items_key, self._item_to_value) self._page_start(self, page, response) - self.num_results += page.num_items + if self._page_increment: + self.num_results += page.num_items yield page def _has_next_page(self): diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index 747f739530b2..38f0d1376d20 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -112,6 +112,7 @@ def test_constructor(self): self.assertIs(iterator.client, client) self.assertEqual(iterator.max_results, max_results) self.assertEqual(list(iterator._page_iter), []) + self.assertFalse(iterator._page_increment) # Changing attributes. self.assertEqual(iterator.page_number, 0) self.assertEqual(iterator.next_page_token, token) @@ -160,9 +161,6 @@ def test__items_iter(self): mock_key, self._do_nothing) page2 = Page(parent, {mock_key: [item3]}, mock_key, self._do_nothing) - # Spoof the number of items in each page to offset the - # ``num_results -= page.num_items`` in _items_iter(). - page1._num_items = page2._num_items = 0 iterator = self._makeOne(None) # Fake the page iterator on the object. From 1fbc46e2ed8166cc4e47ead80477f975898e5de6 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 25 Oct 2016 15:25:14 -0700 Subject: [PATCH 061/468] Adding GAXIterator subclass. --- .../google/cloud/iterator.py | 25 +++++++++++++++++ .../unit_tests/test_iterator.py | 28 +++++++++++++++++++ 2 files changed, 53 insertions(+) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index ebef82bc410c..8649a2753c01 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -370,3 +370,28 @@ def _get_next_page_response(self): self.next_page_token = response.get('nextPageToken') return response + + +class GAXIterator(Iterator): + """A generic class for iterating through Cloud gRPC APIs list responses. + + :type client: :class:`~google.cloud.client.Client` + :param client: The client used to identify the application. + + :type page_iter: :class:`~google.gax.PageIterator` + :param page_iter: A GAX page iterator to be wrapped and conform to the + :class:`~google.cloud.iterator.Iterator` surface. + + :type page_token: str + :param page_token: (Optional) A token identifying a page in a result set. + + :type max_results: int + :param max_results: (Optional) The maximum number of results to fetch. + + .. autoattribute:: pages + """ + + def __init__(self, client, page_iter, page_token=None, max_results=None): + super(GAXIterator, self).__init__( + client, page_token=page_token, max_results=max_results) + self._page_iter = page_iter diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index 38f0d1376d20..0fb166af7ff9 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -427,6 +427,34 @@ def test__get_next_page_response_new_no_token_in_response(self): self.assertEqual(kw['query_params'], {}) +class TestGAXIterator(unittest.TestCase): + + def _getTargetClass(self): + from google.cloud.iterator import GAXIterator + return GAXIterator + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_constructor(self): + client = _Client(None) + page_iter = object() + token = 'zzzyy78kl' + max_results = 1337 + iterator = self._makeOne(client, page_iter, page_token=token, + max_results=max_results) + + self.assertFalse(iterator._started) + self.assertIs(iterator.client, client) + self.assertEqual(iterator.max_results, max_results) + self.assertIs(iterator._page_iter, page_iter) + self.assertFalse(iterator._page_increment) + # Changing attributes. + self.assertEqual(iterator.page_number, 0) + self.assertEqual(iterator.next_page_token, token) + self.assertEqual(iterator.num_results, 0) + + class _Connection(object): def __init__(self, *responses): From cf044961c8794bbf0d555834d6f7e666c8b9e418 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 25 Oct 2016 15:37:50 -0700 Subject: [PATCH 062/468] Moving item_to_value into base Iterator class. --- .../google/cloud/iterator.py | 26 +++++++++++++++---- .../unit_tests/test_iterator.py | 24 ++++++++++------- 2 files changed, 35 insertions(+), 15 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index 8649a2753c01..6efb2d08a514 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -172,6 +172,12 @@ class Iterator(object): :type client: :class:`~google.cloud.client.Client` :param client: The client used to identify the application. + :type item_to_value: callable + :param item_to_value: Callable to convert an item from the type in the + raw API response into the native object. + Assumed signature takes an :class:`Iterator` and a + raw API response with a single item. + :type page_token: str :param page_token: (Optional) A token identifying a page in a result set. @@ -179,9 +185,11 @@ class Iterator(object): :param max_results: (Optional) The maximum number of results to fetch. """ - def __init__(self, client, page_token=None, max_results=None): + def __init__(self, client, item_to_value, + page_token=None, max_results=None): self._started = False self.client = client + self._item_to_value = item_to_value self.max_results = max_results # NOTE: The _page_iter is not intended to come through the # constructor, instead subclasses should over-ride @@ -287,9 +295,9 @@ def __init__(self, client, path, item_to_value, page_token=None, max_results=None, extra_params=None, page_start=_do_nothing_page_start, page_iter=None): super(HTTPIterator, self).__init__( - client, page_token=page_token, max_results=max_results) + client, item_to_value, page_token=page_token, + max_results=max_results) self.path = path - self._item_to_value = item_to_value self._items_key = items_key self.extra_params = extra_params self._page_start = page_start @@ -382,6 +390,12 @@ class GAXIterator(Iterator): :param page_iter: A GAX page iterator to be wrapped and conform to the :class:`~google.cloud.iterator.Iterator` surface. + :type item_to_value: callable + :param item_to_value: Callable to convert an item from a protobuf + into the native object. Assumed signature + takes an :class:`Iterator` and a single item + from the API response as a protobuf. + :type page_token: str :param page_token: (Optional) A token identifying a page in a result set. @@ -391,7 +405,9 @@ class GAXIterator(Iterator): .. autoattribute:: pages """ - def __init__(self, client, page_iter, page_token=None, max_results=None): + def __init__(self, client, page_iter, item_to_value, + page_token=None, max_results=None): super(GAXIterator, self).__init__( - client, page_token=page_token, max_results=max_results) + client, item_to_value, page_token=page_token, + max_results=max_results) self._page_iter = page_iter diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index 0fb166af7ff9..2a01d8b6df55 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -103,13 +103,15 @@ def _makeOne(self, *args, **kw): def test_constructor(self): connection = _Connection() client = _Client(connection) + item_to_value = object() token = 'ab13nceor03' max_results = 1337 - iterator = self._makeOne(client, page_token=token, + iterator = self._makeOne(client, item_to_value, page_token=token, max_results=max_results) self.assertFalse(iterator._started) self.assertIs(iterator.client, client) + self.assertIs(iterator._item_to_value, item_to_value) self.assertEqual(iterator.max_results, max_results) self.assertEqual(list(iterator._page_iter), []) self.assertFalse(iterator._page_increment) @@ -119,7 +121,7 @@ def test_constructor(self): self.assertEqual(iterator.num_results, 0) def test_pages_property(self): - iterator = self._makeOne(None) + iterator = self._makeOne(None, None) self.assertFalse(iterator._started) mock_iter = object() iterator._page_iter = mock_iter @@ -128,14 +130,14 @@ def test_pages_property(self): self.assertTrue(iterator._started) def test_pages_property_started(self): - iterator = self._makeOne(None) + iterator = self._makeOne(None, None) self.assertEqual(list(iterator.pages), []) # Make sure we cannot restart. with self.assertRaises(ValueError): getattr(iterator, 'pages') def test_pages_property_items_started(self): - iterator = self._makeOne(None) + iterator = self._makeOne(None, None) self.assertEqual(list(iterator), []) with self.assertRaises(ValueError): getattr(iterator, 'pages') @@ -162,7 +164,7 @@ def test__items_iter(self): page2 = Page(parent, {mock_key: [item3]}, mock_key, self._do_nothing) - iterator = self._makeOne(None) + iterator = self._makeOne(None, None) # Fake the page iterator on the object. iterator._page_iter = iter((page1, page2)) @@ -182,7 +184,7 @@ def test__items_iter(self): six.next(items_iter) def test___iter__(self): - iterator = self._makeOne(None) + iterator = self._makeOne(None, None) self.assertFalse(iterator._started) mock_iter = object() iterator._page_iter = mock_iter @@ -191,13 +193,13 @@ def test___iter__(self): self.assertTrue(iterator._started) def test___iter___started(self): - iterator = self._makeOne(None) + iterator = self._makeOne(None, None) self.assertEqual(list(iterator), []) with self.assertRaises(ValueError): iter(iterator) def test___iter___pages_started(self): - iterator = self._makeOne(None) + iterator = self._makeOne(None, None) self.assertEqual(list(iterator.pages), []) with self.assertRaises(ValueError): iter(iterator) @@ -439,13 +441,15 @@ def _makeOne(self, *args, **kw): def test_constructor(self): client = _Client(None) page_iter = object() + item_to_value = object() token = 'zzzyy78kl' max_results = 1337 - iterator = self._makeOne(client, page_iter, page_token=token, - max_results=max_results) + iterator = self._makeOne(client, page_iter, item_to_value, + page_token=token, max_results=max_results) self.assertFalse(iterator._started) self.assertIs(iterator.client, client) + self.assertIs(iterator._item_to_value, item_to_value) self.assertEqual(iterator.max_results, max_results) self.assertIs(iterator._page_iter, page_iter) self.assertFalse(iterator._page_increment) From e63ab16b1c5559f6598a2fd38074ce9dd519c0bd Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 25 Oct 2016 16:00:10 -0700 Subject: [PATCH 063/468] Removing items key usage from Page helper. This was HTTP/JSON specific and belongs in the HTTP subclass. --- .../google/cloud/iterator.py | 24 +++++++---------- .../unit_tests/test_iterator.py | 27 ++++++++----------- 2 files changed, 21 insertions(+), 30 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index 6efb2d08a514..8dcc3eba7e13 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -109,23 +109,19 @@ class Page(object): :type parent: :class:`Iterator` :param parent: The iterator that owns the current page. - :type response: dict - :param response: The JSON API response for a page. - - :type items_key: str - :param items_key: The dictionary key used to retrieve items - from the response. + :type items: iterable + :param items: An iterable (that also defines __len__) of items + from a raw API response. :type item_to_value: callable - :param item_to_value: Callable to convert an item from JSON - into the native object. Assumed signature - takes an :class:`Iterator` and a dictionary - holding a single item. + :param item_to_value: Callable to convert an item from the type in the + raw API response into the native object. + Assumed signature takes an :class:`Iterator` and a + raw API response with a single item. """ - def __init__(self, parent, response, items_key, item_to_value): + def __init__(self, parent, items, item_to_value): self._parent = parent - items = response.get(items_key, ()) self._num_items = len(items) self._remaining = self._num_items self._item_iter = iter(items) @@ -328,8 +324,8 @@ def _default_page_iter(self): """ while self._has_next_page(): response = self._get_next_page_response() - page = Page(self, response, self._items_key, - self._item_to_value) + items = response.get(self._items_key, ()) + page = Page(self, items, self._item_to_value) self._page_start(self, page, response) if self._page_increment: self.num_results += page.num_items diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index 2a01d8b6df55..e1f1965bc0c6 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -37,27 +37,27 @@ def _makeOne(self, *args, **kw): def test_constructor(self): parent = object() - items_key = 'potatoes' - response = {items_key: (1, 2, 3)} - page = self._makeOne(parent, response, items_key, None) + item_to_value = object() + page = self._makeOne(parent, (1, 2, 3), item_to_value) self.assertIs(page._parent, parent) self.assertEqual(page._num_items, 3) self.assertEqual(page._remaining, 3) + self.assertIs(page._item_to_value, item_to_value) def test_num_items_property(self): - page = self._makeOne(None, {}, '', None) + page = self._makeOne(None, (), None) num_items = 42 page._num_items = num_items self.assertEqual(page.num_items, num_items) def test_remaining_property(self): - page = self._makeOne(None, {}, '', None) + page = self._makeOne(None, (), None) remaining = 1337 page._remaining = remaining self.assertEqual(page.remaining, remaining) def test___iter__(self): - page = self._makeOne(None, {}, '', None) + page = self._makeOne(None, (), None) self.assertIs(iter(page), page) def test_iterator_calls__item_to_value(self): @@ -71,10 +71,8 @@ def item_to_value(self, item): self.calls += 1 return item - items_key = 'turkeys' - response = {items_key: [10, 11, 12]} parent = Parent() - page = self._makeOne(parent, response, items_key, + page = self._makeOne(parent, (10, 11, 12), Parent.item_to_value) page._remaining = 100 @@ -157,12 +155,9 @@ def test__items_iter(self): item3 = 211 # Make pages from mock responses - mock_key = 'mock' parent = object() - page1 = Page(parent, {mock_key: [item1, item2]}, - mock_key, self._do_nothing) - page2 = Page(parent, {mock_key: [item3]}, - mock_key, self._do_nothing) + page1 = Page(parent, (item1, item2), self._do_nothing) + page2 = Page(parent, (item3,), self._do_nothing) iterator = self._makeOne(None, None) # Fake the page iterator on the object. @@ -266,7 +261,7 @@ def test_pages_iter_empty_then_another(self): items_key = 'its-key' iterator = self._makeOne(None, None, None, items_key=items_key) # Fake the next page class. - fake_page = MUT.Page(None, {}, '', None) + fake_page = MUT.Page(None, (), None) page_args = [] def dummy_response(): @@ -282,7 +277,7 @@ def dummy_page_class(*args): page = six.next(pages_iter) self.assertIs(page, fake_page) self.assertEqual( - page_args, [(iterator, {}, items_key, iterator._item_to_value)]) + page_args, [(iterator, (), iterator._item_to_value)]) def test_iterate(self): import six From 1c8c78203b88083d4aadeaa4e9e8f6b4fabf4d20 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 25 Oct 2016 16:43:27 -0700 Subject: [PATCH 064/468] Adding GAXIterator._wrap_gax for wrapping the GAX iterator. Also updating the _GAXPageIterator mock to allow multiple pages. --- .../google/cloud/_testing.py | 12 +-- .../google/cloud/iterator.py | 20 +++- .../unit_tests/test_iterator.py | 91 ++++++++++++++++++- 3 files changed, 114 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/_testing.py b/packages/google-cloud-core/google/cloud/_testing.py index ddaeb6c8c5a4..3c01825fa6f8 100644 --- a/packages/google-cloud-core/google/cloud/_testing.py +++ b/packages/google-cloud-core/google/cloud/_testing.py @@ -76,15 +76,13 @@ def _make_grpc_failed_precondition(self): class _GAXPageIterator(object): - def __init__(self, items, page_token): - self._items = items - self.page_token = page_token + def __init__(self, *pages, **kwargs): + self._pages = iter(pages) + self.page_token = kwargs.get('page_token') def next(self): - if self._items is None: - raise StopIteration - items, self._items = self._items, None - return items + import six + return six.next(self._pages) __next__ = next diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index 8dcc3eba7e13..00d488bdfd23 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -406,4 +406,22 @@ def __init__(self, client, page_iter, item_to_value, super(GAXIterator, self).__init__( client, item_to_value, page_token=page_token, max_results=max_results) - self._page_iter = page_iter + self._page_iter = self._wrap_gax(page_iter) + + def _wrap_gax(self, page_iter): + """Generator of pages of API responses. + + Wraps each response from the :class:`~google.gax.PageIterator` in a + :class:`Page` instance and captures some state at each page. + + :type page_iter: :class:`~google.gax.PageIterator` + :param page_iter: The GAX page iterator to wrap. + + Yields :class:`Page` instances. + """ + for items in page_iter: + page = Page(self, items, self._item_to_value) + self.next_page_token = page_iter.page_token or None + if self._page_increment: + self.num_results += page.num_items + yield page diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index e1f1965bc0c6..5c636381d088 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -434,6 +434,8 @@ def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) def test_constructor(self): + import types + client = _Client(None) page_iter = object() item_to_value = object() @@ -446,13 +448,100 @@ def test_constructor(self): self.assertIs(iterator.client, client) self.assertIs(iterator._item_to_value, item_to_value) self.assertEqual(iterator.max_results, max_results) - self.assertIs(iterator._page_iter, page_iter) + self.assertIsInstance(iterator._page_iter, types.GeneratorType) self.assertFalse(iterator._page_increment) # Changing attributes. self.assertEqual(iterator.page_number, 0) self.assertEqual(iterator.next_page_token, token) self.assertEqual(iterator.num_results, 0) + @staticmethod + def _do_nothing(parent, value): + return parent, value + + def _wrap_gax_helper(self, page_increment=None): + import types + from google.cloud._testing import _GAXPageIterator + from google.cloud.iterator import Page + + iterator = self._makeOne(None, (), self._do_nothing) + if page_increment is not None: + iterator._page_increment = page_increment + # Make a mock ``google.gax.PageIterator`` + page_items = (29, 31) # Items for just one page. + page_token = '2sde98ds2s0hh' + page_iter = _GAXPageIterator(page_items, page_token=page_token) + wrapped = iterator._wrap_gax(page_iter) + self.assertIsInstance(wrapped, types.GeneratorType) + + pages = list(wrapped) + # First check the page token. + self.assertEqual(iterator.next_page_token, page_token) + # Then check the pages of results. + self.assertEqual(len(pages), 1) + page = pages[0] + self.assertIsInstance(page, Page) + # _do_nothing will throw the iterator in front. + expected = zip((iterator, iterator), page_items) + self.assertEqual(list(page), list(expected)) + return iterator + + def test__wrap_gax(self): + iterator = self._wrap_gax_helper() + # Make sure no page incrementing happend. + self.assertFalse(iterator._page_increment) + self.assertEqual(iterator.num_results, 0) + + def test__wrap_gax_with_increment(self): + iterator = self._wrap_gax_helper(True) + # Make sure no page incrementing happend. + self.assertTrue(iterator._page_increment) + self.assertEqual(iterator.num_results, 2) + + def test_iterate(self): + import six + from google.cloud._testing import _GAXPageIterator + + item1 = object() + item2 = object() + item3 = object() + token1 = 'smkdme30e2e32r' + token2 = '39cm9csl123dck' + + # Make a mock ``google.gax.PageIterator`` + page1 = (item1,) + page2 = (item2, item3) + page_iter = _GAXPageIterator(page1, page2, page_token=token1) + iterator = self._makeOne(None, page_iter, self._do_nothing) + + self.assertEqual(iterator.num_results, 0) + + items_iter = iter(iterator) + val1 = six.next(items_iter) + self.assertEqual(val1, (iterator, item1)) + self.assertEqual(iterator.num_results, 1) + self.assertEqual(iterator.next_page_token, token1) + + # Before grabbing the next page, hot-swap the token + # on the ``page_iter``. + page_iter.page_token = token2 + + # Grab the next item (which will cause the next page). + val2 = six.next(items_iter) + self.assertEqual(val2, (iterator, item2)) + self.assertEqual(iterator.num_results, 2) + self.assertEqual(iterator.next_page_token, token2) + + # Grab the final item from the final / current page. + val3 = six.next(items_iter) + self.assertEqual(val3, (iterator, item3)) + self.assertEqual(iterator.num_results, 3) + # Make sure the token did not change. + self.assertEqual(iterator.next_page_token, token2) + + with self.assertRaises(StopIteration): + six.next(items_iter) + class _Connection(object): From 58e3eb1b268f77d08ab77958799b77c37b1d465e Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 25 Oct 2016 16:58:44 -0700 Subject: [PATCH 065/468] Removing page_iter over-ride from HTTPIterator. --- .../google-cloud-core/google/cloud/iterator.py | 17 +++-------------- .../unit_tests/test_iterator.py | 16 ---------------- 2 files changed, 3 insertions(+), 30 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index 00d488bdfd23..d4c12491ce25 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -271,14 +271,6 @@ class HTTPIterator(Iterator): the :class:`Page` that was started and the dictionary containing the page response. - :type page_iter: callable - :param page_iter: (Optional) Callable to produce a pages iterator from the - current iterator. Assumed signature takes the - :class:`Iterator` that started the page. By default uses - the HTTP pages iterator. Meant to provide a custom - way to create pages (potentially with a custom - transport such as gRPC). - .. autoattribute:: pages """ @@ -289,7 +281,7 @@ class HTTPIterator(Iterator): def __init__(self, client, path, item_to_value, items_key=DEFAULT_ITEMS_KEY, page_token=None, max_results=None, extra_params=None, - page_start=_do_nothing_page_start, page_iter=None): + page_start=_do_nothing_page_start): super(HTTPIterator, self).__init__( client, item_to_value, page_token=page_token, max_results=max_results) @@ -300,10 +292,7 @@ def __init__(self, client, path, item_to_value, # Verify inputs / provide defaults. if self.extra_params is None: self.extra_params = {} - if page_iter is None: - self._page_iter = self._default_page_iter() - else: - self._page_iter = page_iter(self) + self._page_iter = self._http_page_iter() self._verify_params() def _verify_params(self): @@ -317,7 +306,7 @@ def _verify_params(self): raise ValueError('Using a reserved parameter', reserved_in_use) - def _default_page_iter(self): + def _http_page_iter(self): """Generator of pages of API responses. Yields :class:`Page` instances. diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index 5c636381d088..025af7f76f57 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -237,22 +237,6 @@ def test_constructor_w_extra_param_collision(self): with self.assertRaises(ValueError): self._makeOne(client, path, None, extra_params=extra_params) - def test_constructor_non_default_page_iter(self): - connection = _Connection() - client = _Client(connection) - path = '/foo' - result = object() - called = [] - - def page_iter(iterator): - called.append(iterator) - return result - - iterator = self._makeOne(client, path, None, - page_iter=page_iter) - self.assertEqual(called, [iterator]) - self.assertIs(iterator._page_iter, result) - def test_pages_iter_empty_then_another(self): import six from google.cloud._testing import _Monkey From 884b4fe80d168c645b0e9e7a3fffc330b5a2f8ca Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 25 Oct 2016 17:23:20 -0700 Subject: [PATCH 066/468] Removing page_token from GAXIterator constructor. Instead, using the page token directly from the page iterator passed in (this may occasionally be strange to a user, e.g. if the token is INITIAL_PAGE). --- .../google-cloud-core/google/cloud/iterator.py | 8 ++------ .../google-cloud-core/unit_tests/test_iterator.py | 14 ++++++++++---- 2 files changed, 12 insertions(+), 10 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index d4c12491ce25..ba161192709a 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -381,19 +381,15 @@ class GAXIterator(Iterator): takes an :class:`Iterator` and a single item from the API response as a protobuf. - :type page_token: str - :param page_token: (Optional) A token identifying a page in a result set. - :type max_results: int :param max_results: (Optional) The maximum number of results to fetch. .. autoattribute:: pages """ - def __init__(self, client, page_iter, item_to_value, - page_token=None, max_results=None): + def __init__(self, client, page_iter, item_to_value, max_results=None): super(GAXIterator, self).__init__( - client, item_to_value, page_token=page_token, + client, item_to_value, page_token=page_iter.page_token, max_results=max_results) self._page_iter = self._wrap_gax(page_iter) diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index 025af7f76f57..bc64f80af5fd 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -421,12 +421,12 @@ def test_constructor(self): import types client = _Client(None) - page_iter = object() - item_to_value = object() token = 'zzzyy78kl' + page_iter = SimpleIter(token) + item_to_value = object() max_results = 1337 iterator = self._makeOne(client, page_iter, item_to_value, - page_token=token, max_results=max_results) + max_results=max_results) self.assertFalse(iterator._started) self.assertIs(iterator.client, client) @@ -448,7 +448,7 @@ def _wrap_gax_helper(self, page_increment=None): from google.cloud._testing import _GAXPageIterator from google.cloud.iterator import Page - iterator = self._makeOne(None, (), self._do_nothing) + iterator = self._makeOne(None, SimpleIter(), self._do_nothing) if page_increment is not None: iterator._page_increment = page_increment # Make a mock ``google.gax.PageIterator`` @@ -543,3 +543,9 @@ class _Client(object): def __init__(self, connection): self.connection = connection + + +class SimpleIter(object): + + def __init__(self, page_token=None): + self.page_token = page_token From 4306a4776d397771849cb4c3e47b6d4a8da0175a Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 25 Oct 2016 18:01:10 -0700 Subject: [PATCH 067/468] Adding _next_page() methods in iterator subclasses. This somewhat of a cart-before-the-horse change, but is done this way to make the commit easier to understand, before unifying the two approaches via _next_page(). --- .../google/cloud/iterator.py | 46 ++++++++++++++++--- 1 file changed, 39 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index ba161192709a..b51b8d1924d9 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -306,19 +306,33 @@ def _verify_params(self): raise ValueError('Using a reserved parameter', reserved_in_use) - def _http_page_iter(self): - """Generator of pages of API responses. + def _next_page(self): + """Get the next page in the iterator. - Yields :class:`Page` instances. + :rtype: :class:`Page` + :returns: The next page in the iterator (or :data:`None` if + there are no pages left). """ - while self._has_next_page(): + if self._has_next_page(): response = self._get_next_page_response() items = response.get(self._items_key, ()) page = Page(self, items, self._item_to_value) self._page_start(self, page, response) + return page + else: + return None + + def _http_page_iter(self): + """Generator of pages of API responses. + + Yields :class:`Page` instances. + """ + page = self._next_page() + while page is not None: if self._page_increment: self.num_results += page.num_items yield page + page = self._next_page() def _has_next_page(self): """Determines whether or not there are more pages with results. @@ -393,6 +407,24 @@ def __init__(self, client, page_iter, item_to_value, max_results=None): max_results=max_results) self._page_iter = self._wrap_gax(page_iter) + def _next_page(self, page_iter): + """Get the next page in the iterator. + + :type page_iter: :class:`~google.gax.PageIterator` + :param page_iter: The GAX page iterator to consume. + + :rtype: :class:`Page` + :returns: The next page in the iterator (or :data:`None` if + there are no pages left). + """ + try: + items = six.next(page_iter) + page = Page(self, items, self._item_to_value) + self.next_page_token = page_iter.page_token or None + return page + except StopIteration: + return None + def _wrap_gax(self, page_iter): """Generator of pages of API responses. @@ -404,9 +436,9 @@ def _wrap_gax(self, page_iter): Yields :class:`Page` instances. """ - for items in page_iter: - page = Page(self, items, self._item_to_value) - self.next_page_token = page_iter.page_token or None + page = self._next_page(page_iter) + while page is not None: if self._page_increment: self.num_results += page.num_items yield page + page = self._next_page(page_iter) From 8ee4df30821b86ff26ec9c3049ea4c1f11ff81ce Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 25 Oct 2016 18:06:14 -0700 Subject: [PATCH 068/468] Moving Iterator page incrementing into page iterator. Also moving the token setting behavior from HTTPIterator._get_next_page_response() into _next_page(). --- packages/google-cloud-core/google/cloud/iterator.py | 10 ++++------ packages/google-cloud-core/unit_tests/test_iterator.py | 2 -- 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index b51b8d1924d9..d1b240a1b467 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -318,6 +318,7 @@ def _next_page(self): items = response.get(self._items_key, ()) page = Page(self, items, self._item_to_value) self._page_start(self, page, response) + self.next_page_token = response.get('nextPageToken') return page else: return None @@ -329,6 +330,7 @@ def _http_page_iter(self): """ page = self._next_page() while page is not None: + self.page_number += 1 if self._page_increment: self.num_results += page.num_items yield page @@ -369,15 +371,10 @@ def _get_next_page_response(self): :rtype: dict :returns: The parsed JSON response of the next page's contents. """ - response = self.client.connection.api_request( + return self.client.connection.api_request( method='GET', path=self.path, query_params=self._get_query_params()) - self.page_number += 1 - self.next_page_token = response.get('nextPageToken') - - return response - class GAXIterator(Iterator): """A generic class for iterating through Cloud gRPC APIs list responses. @@ -438,6 +435,7 @@ def _wrap_gax(self, page_iter): """ page = self._next_page(page_iter) while page is not None: + self.page_number += 1 if self._page_increment: self.num_results += page.num_items yield page diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index bc64f80af5fd..55423641dce0 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -400,8 +400,6 @@ def test__get_next_page_response_new_no_token_in_response(self): iterator = self._makeOne(client, path, None) response = iterator._get_next_page_response() self.assertEqual(response['items'], [{'name': key1}, {'name': key2}]) - self.assertEqual(iterator.page_number, 1) - self.assertEqual(iterator.next_page_token, token) kw, = connection._requested self.assertEqual(kw['method'], 'GET') self.assertEqual(kw['path'], path) From 5e70c2d0d614af5ebc16d7963d1729cb292a14d7 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 25 Oct 2016 18:36:58 -0700 Subject: [PATCH 069/468] Unifying Iterator paging via _next_page(). Also a lint fix for an unimorted member and a unit test fix adding a page token to allow more paging. --- .../google/cloud/iterator.py | 90 ++++++++---------- .../unit_tests/test_iterator.py | 95 +++++++++++-------- 2 files changed, 97 insertions(+), 88 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index d1b240a1b467..78a781188063 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -187,15 +187,6 @@ def __init__(self, client, item_to_value, self.client = client self._item_to_value = item_to_value self.max_results = max_results - # NOTE: The _page_iter is not intended to come through the - # constructor, instead subclasses should over-ride - # this property. - self._page_iter = iter(()) - # NOTE: This flag indicates if the total number of results should be - # incremented. This is useful since a page iterator will - # want to increment by results per page while an items - # iterator will want to increment per item. - self._page_increment = False # The attributes below will change over the life of the iterator. self.page_number = 0 self.next_page_token = page_token @@ -212,12 +203,11 @@ def pages(self): if self._started: raise ValueError('Iterator has already started', self) self._started = True - self._page_increment = True - return self._page_iter + return self._page_iter(increment=True) def _items_iter(self): """Iterator for each item returned.""" - for page in self._page_iter: + for page in self._page_iter(increment=False): for item in page: self.num_results += 1 yield item @@ -234,6 +224,37 @@ def __iter__(self): self._started = True return self._items_iter() + def _page_iter(self, increment): + """Generator of pages of API responses. + + :type increment: bool + :param increment: Flag indicating if the total number of results + should be incremented on each page. This is useful + since a page iterator will want to increment by + results per page while an items iterator will want + to increment per item. + + Yields :class:`Page` instances. + """ + page = self._next_page() + while page is not None: + self.page_number += 1 + if increment: + self.num_results += page.num_items + yield page + page = self._next_page() + + @staticmethod + def _next_page(): + """Get the next page in the iterator. + + This does nothing and is intended to be over-ridden by subclasses + to return the next :class:`Page`. + + :raises NotImplementedError: Always. + """ + raise NotImplementedError + class HTTPIterator(Iterator): """A generic class for iterating through Cloud JSON APIs list responses. @@ -292,7 +313,6 @@ def __init__(self, client, path, item_to_value, # Verify inputs / provide defaults. if self.extra_params is None: self.extra_params = {} - self._page_iter = self._http_page_iter() self._verify_params() def _verify_params(self): @@ -323,19 +343,6 @@ def _next_page(self): else: return None - def _http_page_iter(self): - """Generator of pages of API responses. - - Yields :class:`Page` instances. - """ - page = self._next_page() - while page is not None: - self.page_number += 1 - if self._page_increment: - self.num_results += page.num_items - yield page - page = self._next_page() - def _has_next_page(self): """Determines whether or not there are more pages with results. @@ -402,41 +409,22 @@ def __init__(self, client, page_iter, item_to_value, max_results=None): super(GAXIterator, self).__init__( client, item_to_value, page_token=page_iter.page_token, max_results=max_results) - self._page_iter = self._wrap_gax(page_iter) + self._gax_page_iter = page_iter - def _next_page(self, page_iter): + def _next_page(self): """Get the next page in the iterator. - :type page_iter: :class:`~google.gax.PageIterator` - :param page_iter: The GAX page iterator to consume. + Wraps the response from the :class:`~google.gax.PageIterator` in a + :class:`Page` instance and captures some state at each page. :rtype: :class:`Page` :returns: The next page in the iterator (or :data:`None` if there are no pages left). """ try: - items = six.next(page_iter) + items = six.next(self._gax_page_iter) page = Page(self, items, self._item_to_value) - self.next_page_token = page_iter.page_token or None + self.next_page_token = self._gax_page_iter.page_token or None return page except StopIteration: return None - - def _wrap_gax(self, page_iter): - """Generator of pages of API responses. - - Wraps each response from the :class:`~google.gax.PageIterator` in a - :class:`Page` instance and captures some state at each page. - - :type page_iter: :class:`~google.gax.PageIterator` - :param page_iter: The GAX page iterator to wrap. - - Yields :class:`Page` instances. - """ - page = self._next_page(page_iter) - while page is not None: - self.page_number += 1 - if self._page_increment: - self.num_results += page.num_items - yield page - page = self._next_page(page_iter) diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index 55423641dce0..fa54a13be28f 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -111,8 +111,6 @@ def test_constructor(self): self.assertIs(iterator.client, client) self.assertIs(iterator._item_to_value, item_to_value) self.assertEqual(iterator.max_results, max_results) - self.assertEqual(list(iterator._page_iter), []) - self.assertFalse(iterator._page_increment) # Changing attributes. self.assertEqual(iterator.page_number, 0) self.assertEqual(iterator.next_page_token, token) @@ -122,21 +120,32 @@ def test_pages_property(self): iterator = self._makeOne(None, None) self.assertFalse(iterator._started) mock_iter = object() - iterator._page_iter = mock_iter + incremented = [] + + def page_iter(increment): + incremented.append(increment) + return mock_iter + + iterator._page_iter = page_iter self.assertIs(iterator.pages, mock_iter) + self.assertEqual(incremented, [True]) # Check the side-effect. self.assertTrue(iterator._started) def test_pages_property_started(self): + import types + iterator = self._makeOne(None, None) - self.assertEqual(list(iterator.pages), []) + self.assertIsInstance(iterator.pages, types.GeneratorType) # Make sure we cannot restart. with self.assertRaises(ValueError): getattr(iterator, 'pages') def test_pages_property_items_started(self): + import types + iterator = self._makeOne(None, None) - self.assertEqual(list(iterator), []) + self.assertIsInstance(iter(iterator), types.GeneratorType) with self.assertRaises(ValueError): getattr(iterator, 'pages') @@ -161,8 +170,13 @@ def test__items_iter(self): iterator = self._makeOne(None, None) # Fake the page iterator on the object. - iterator._page_iter = iter((page1, page2)) + incremented = [] + + def page_iter(increment): + incremented.append(increment) + return iter((page1, page2)) + iterator._page_iter = page_iter items_iter = iterator._items_iter() # Make sure it is a generator. self.assertIsInstance(items_iter, types.GeneratorType) @@ -178,27 +192,44 @@ def test__items_iter(self): with self.assertRaises(StopIteration): six.next(items_iter) + # Make sure our page_iter() was called correctly. + self.assertEqual(incremented, [False]) + def test___iter__(self): iterator = self._makeOne(None, None) self.assertFalse(iterator._started) - mock_iter = object() - iterator._page_iter = mock_iter - self.assertIs(iterator.pages, mock_iter) + incremented = [] + + def page_iter(increment): + incremented.append(increment) + return iter(()) + + iterator._page_iter = page_iter + self.assertEqual(list(iterator), []) # Check the side-effect. self.assertTrue(iterator._started) def test___iter___started(self): + import types + iterator = self._makeOne(None, None) - self.assertEqual(list(iterator), []) + self.assertIsInstance(iter(iterator), types.GeneratorType) with self.assertRaises(ValueError): iter(iterator) def test___iter___pages_started(self): + import types + iterator = self._makeOne(None, None) - self.assertEqual(list(iterator.pages), []) + self.assertIsInstance(iterator.pages, types.GeneratorType) with self.assertRaises(ValueError): iter(iterator) + def test__next_page_virtual(self): + iterator = self._makeOne(None, None) + with self.assertRaises(NotImplementedError): + iterator._next_page() + class TestHTTPIterator(unittest.TestCase): @@ -416,8 +447,6 @@ def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) def test_constructor(self): - import types - client = _Client(None) token = 'zzzyy78kl' page_iter = SimpleIter(token) @@ -430,8 +459,7 @@ def test_constructor(self): self.assertIs(iterator.client, client) self.assertIs(iterator._item_to_value, item_to_value) self.assertEqual(iterator.max_results, max_results) - self.assertIsInstance(iterator._page_iter, types.GeneratorType) - self.assertFalse(iterator._page_increment) + self.assertIs(iterator._gax_page_iter, page_iter) # Changing attributes. self.assertEqual(iterator.page_number, 0) self.assertEqual(iterator.next_page_token, token) @@ -441,44 +469,37 @@ def test_constructor(self): def _do_nothing(parent, value): return parent, value - def _wrap_gax_helper(self, page_increment=None): - import types + def test__next_page(self): from google.cloud._testing import _GAXPageIterator from google.cloud.iterator import Page - iterator = self._makeOne(None, SimpleIter(), self._do_nothing) - if page_increment is not None: - iterator._page_increment = page_increment # Make a mock ``google.gax.PageIterator`` page_items = (29, 31) # Items for just one page. page_token = '2sde98ds2s0hh' page_iter = _GAXPageIterator(page_items, page_token=page_token) - wrapped = iterator._wrap_gax(page_iter) - self.assertIsInstance(wrapped, types.GeneratorType) + # Wrap the GAX iterator. + iterator = self._makeOne(None, page_iter, self._do_nothing) - pages = list(wrapped) + page = iterator._next_page() # First check the page token. self.assertEqual(iterator.next_page_token, page_token) - # Then check the pages of results. - self.assertEqual(len(pages), 1) - page = pages[0] + # Then check the page. self.assertIsInstance(page, Page) # _do_nothing will throw the iterator in front. expected = zip((iterator, iterator), page_items) self.assertEqual(list(page), list(expected)) - return iterator - def test__wrap_gax(self): - iterator = self._wrap_gax_helper() - # Make sure no page incrementing happend. - self.assertFalse(iterator._page_increment) - self.assertEqual(iterator.num_results, 0) + def test__next_page_empty(self): + from google.cloud._testing import _GAXPageIterator - def test__wrap_gax_with_increment(self): - iterator = self._wrap_gax_helper(True) - # Make sure no page incrementing happend. - self.assertTrue(iterator._page_increment) - self.assertEqual(iterator.num_results, 2) + # Make a mock ``google.gax.PageIterator`` + page_iter = _GAXPageIterator() + # Wrap the GAX iterator. + iterator = self._makeOne(None, page_iter, self._do_nothing) + + page = iterator._next_page() + self.assertIsNone(page) + self.assertIsNone(iterator.next_page_token) def test_iterate(self): import six From b3e8eb59c33d0a04f4d5cbc78fb7967fb2cebfaf Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 27 Oct 2016 11:02:22 -0700 Subject: [PATCH 070/468] Using absolute imports in core modules that import grpc. This is because a relative import (in Python 2.7) was causing failures when google.cloud.grpc was side-by-side with google.cloud._helpers, etc. --- packages/google-cloud-core/google/cloud/_helpers.py | 3 +++ packages/google-cloud-core/google/cloud/_testing.py | 4 ++++ packages/google-cloud-core/google/cloud/exceptions.py | 3 +++ 3 files changed, 10 insertions(+) diff --git a/packages/google-cloud-core/google/cloud/_helpers.py b/packages/google-cloud-core/google/cloud/_helpers.py index 449961126a69..b9dc5a83b1be 100644 --- a/packages/google-cloud-core/google/cloud/_helpers.py +++ b/packages/google-cloud-core/google/cloud/_helpers.py @@ -17,6 +17,9 @@ This module is not part of the public API surface. """ +# Avoid the grpc and google.cloud.grpc collision. +from __future__ import absolute_import + import calendar import datetime import json diff --git a/packages/google-cloud-core/google/cloud/_testing.py b/packages/google-cloud-core/google/cloud/_testing.py index 3c01825fa6f8..f03f36ed813b 100644 --- a/packages/google-cloud-core/google/cloud/_testing.py +++ b/packages/google-cloud-core/google/cloud/_testing.py @@ -15,6 +15,10 @@ """Shared testing utilities.""" +# Avoid the grpc and google.cloud.grpc collision. +from __future__ import absolute_import + + class _Monkey(object): # context-manager for replacing module names in the scope of a test. diff --git a/packages/google-cloud-core/google/cloud/exceptions.py b/packages/google-cloud-core/google/cloud/exceptions.py index 1f5ae818aad5..01bec56d5b27 100644 --- a/packages/google-cloud-core/google/cloud/exceptions.py +++ b/packages/google-cloud-core/google/cloud/exceptions.py @@ -17,6 +17,9 @@ See: https://cloud.google.com/storage/docs/json_api/v1/status-codes """ +# Avoid the grpc and google.cloud.grpc collision. +from __future__ import absolute_import + import copy import json import six From 0d5298803fe8bfdbbcb3aa653de4a03369a35d1f Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 27 Oct 2016 21:53:21 -0700 Subject: [PATCH 071/468] Converting Logging client->list_entries to iterator. --- .../google-cloud-core/google/cloud/iterator.py | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index 78a781188063..021fab0c1653 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -298,6 +298,7 @@ class HTTPIterator(Iterator): _PAGE_TOKEN = 'pageToken' _MAX_RESULTS = 'maxResults' _RESERVED_PARAMS = frozenset([_PAGE_TOKEN, _MAX_RESULTS]) + _HTTP_METHOD = 'GET' def __init__(self, client, path, item_to_value, items_key=DEFAULT_ITEMS_KEY, @@ -378,9 +379,19 @@ def _get_next_page_response(self): :rtype: dict :returns: The parsed JSON response of the next page's contents. """ - return self.client.connection.api_request( - method='GET', path=self.path, - query_params=self._get_query_params()) + params = self._get_query_params() + if self._HTTP_METHOD == 'GET': + return self.client.connection.api_request( + method=self._HTTP_METHOD, + path=self.path, + query_params=params) + elif self._HTTP_METHOD == 'POST': + return self.client.connection.api_request( + method=self._HTTP_METHOD, + path=self.path, + data=params) + else: + raise ValueError('Unexpected HTTP method', self._HTTP_METHOD) class GAXIterator(Iterator): From 75f7c6823ee6be565b062605bf809521be8dc438 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 27 Oct 2016 22:23:43 -0700 Subject: [PATCH 072/468] Adding unit tests for HTTPIterator GET/POST switching. --- .../unit_tests/test_iterator.py | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index fa54a13be28f..68b2f65aadc0 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -436,6 +436,32 @@ def test__get_next_page_response_new_no_token_in_response(self): self.assertEqual(kw['path'], path) self.assertEqual(kw['query_params'], {}) + def test__get_next_page_response_with_post(self): + path = '/foo' + returned = {'green': 'eggs', 'ham': 55} + connection = _Connection(returned) + client = _Client(connection) + iterator = self._makeOne(client, path, None) + iterator._HTTP_METHOD = 'POST' + response = iterator._get_next_page_response() + self.assertEqual(response, returned) + + self.assertEqual(len(connection._requested), 1) + called_kwargs = connection._requested[0] + self.assertEqual(called_kwargs, { + 'method': iterator._HTTP_METHOD, + 'path': path, + 'data': {}, + }) + + def test__get_next_page_bad_http_method(self): + path = '/foo' + client = _Client(None) + iterator = self._makeOne(client, path, None) + iterator._HTTP_METHOD = 'NOT-A-VERB' + with self.assertRaises(ValueError): + iterator._get_next_page_response() + class TestGAXIterator(unittest.TestCase): From bf875ef1a53763ebe97c5a369111ddc84bfc5222 Mon Sep 17 00:00:00 2001 From: Thomas Schultz Date: Fri, 28 Oct 2016 13:49:55 -0400 Subject: [PATCH 073/468] Actual type_url URI is type.googleapis.com. --- packages/google-cloud-core/google/cloud/operation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-core/google/cloud/operation.py b/packages/google-cloud-core/google/cloud/operation.py index 156212794487..f359496ba3cd 100644 --- a/packages/google-cloud-core/google/cloud/operation.py +++ b/packages/google-cloud-core/google/cloud/operation.py @@ -17,7 +17,7 @@ from google.longrunning import operations_pb2 -_GOOGLE_APIS_PREFIX = 'types.googleapis.com' +_GOOGLE_APIS_PREFIX = 'type.googleapis.com' _TYPE_URL_MAP = { } From 340c3cf2287a926eceb28ffcf7602b9202722efc Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 28 Oct 2016 14:47:01 -0700 Subject: [PATCH 074/468] Re-factoring Operation base class. This is in preparation to support JSON/HTTP operations as well and also to ensure that **all** of the operation PB is parsed when polling. --- .../google/cloud/operation.py | 127 ++++++++++---- .../unit_tests/test_operation.py | 158 ++++++++++++++---- 2 files changed, 221 insertions(+), 64 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/operation.py b/packages/google-cloud-core/google/cloud/operation.py index f359496ba3cd..839e5e3eaf30 100644 --- a/packages/google-cloud-core/google/cloud/operation.py +++ b/packages/google-cloud-core/google/cloud/operation.py @@ -39,7 +39,7 @@ def _compute_type_url(klass, prefix=_GOOGLE_APIS_PREFIX): return '%s/%s' % (prefix, name) -def _register_type_url(type_url, klass): +def register_type_url(type_url, klass): """Register a klass as the factory for a given type URL. :type type_url: str @@ -57,55 +57,102 @@ def _register_type_url(type_url, klass): _TYPE_URL_MAP[type_url] = klass +def _from_any(any_pb): + """Convert an ``Any`` protobuf into the actual class. + + Uses the type URL to do the conversion. + + .. note:: + + This assumes that the type URL is already registered. + + :type any_pb: :class:`google.protobuf.any_pb2.Any` + :param any_pb: An any object to be converted. + + :rtype: object + :returns: The instance (of the correct type) stored in the any + instance. + """ + klass = _TYPE_URL_MAP[any_pb.type_url] + return klass.FromString(any_pb.value) + + class Operation(object): """Representation of a Google API Long-Running Operation. + .. _protobuf: https://github.com/googleapis/googleapis/blob/\ + 050400df0fdb16f63b63e9dee53819044bffc857/\ + google/longrunning/operations.proto#L80 + .. _service: https://github.com/googleapis/googleapis/blob/\ + 050400df0fdb16f63b63e9dee53819044bffc857/\ + google/longrunning/operations.proto#L38 + .. _JSON: https://cloud.google.com/speech/reference/rest/\ + v1beta1/operations#Operation + + This wraps an operation `protobuf`_ object and attempts to + interact with the long-running operations `service`_ (specific + to a given API). (Some services also offer a `JSON`_ + API that maps the same underlying data type.) + :type name: str :param name: The fully-qualified path naming the operation. :type client: object: must provide ``_operations_stub`` accessor. :param client: The client used to poll for the status of the operation. - :type pb_metadata: object - :param pb_metadata: Instance of protobuf metadata class - - :type kw: dict - :param kw: caller-assigned metadata about the operation + :type caller_metadata: dict + :param caller_metadata: caller-assigned metadata about the operation """ target = None """Instance assocated with the operations: callers may set.""" - def __init__(self, name, client, pb_metadata=None, **kw): + response = None + """Response returned from completed operation. + + Only one of this and :attr:`error` can be populated. + """ + + error = None + """Error that resulted from a failed (complete) operation. + + Only one of this and :attr:`response` can be populated. + """ + + metadata = None + """Metadata about the current operation (as a protobuf). + + Code that uses operations must register the metadata types (via + :func:`register_type_url`) to ensure that the metadata fields can be + converted into the correct types. + """ + + def __init__(self, name, client, **caller_metadata): self.name = name self.client = client - self.pb_metadata = pb_metadata - self.metadata = kw.copy() + self.caller_metadata = caller_metadata.copy() self._complete = False @classmethod - def from_pb(cls, op_pb, client, **kw): + def from_pb(cls, operation_pb, client, **caller_metadata): """Factory: construct an instance from a protobuf. - :type op_pb: :class:`google.longrunning.operations_pb2.Operation` - :param op_pb: Protobuf to be parsed. + :type operation_pb: + :class:`~google.longrunning.operations_pb2.Operation` + :param operation_pb: Protobuf to be parsed. :type client: object: must provide ``_operations_stub`` accessor. :param client: The client used to poll for the status of the operation. - :type kw: dict - :param kw: caller-assigned metadata about the operation + :type caller_metadata: dict + :param caller_metadata: caller-assigned metadata about the operation :rtype: :class:`Operation` :returns: new instance, with attributes based on the protobuf. """ - pb_metadata = None - if op_pb.metadata.type_url: - type_url = op_pb.metadata.type_url - md_klass = _TYPE_URL_MAP.get(type_url) - if md_klass: - pb_metadata = md_klass.FromString(op_pb.metadata.value) - return cls(op_pb.name, client, pb_metadata, **kw) + result = cls(operation_pb.name, client, **caller_metadata) + result._update_state(operation_pb) + return result @property def complete(self): @@ -116,22 +163,46 @@ def complete(self): """ return self._complete + def _get_operation_rpc(self): + """Polls the status of the current operation. + + :rtype: :class:`~google.longrunning.operations_pb2.Operation` + :returns: The latest status of the current operation. + """ + request_pb = operations_pb2.GetOperationRequest(name=self.name) + return self.client._operations_stub.GetOperation(request_pb) + + def _update_state(self, operation_pb): + """Update the state of the current object based on operation. + + :type operation_pb: + :class:`~google.longrunning.operations_pb2.Operation` + :param operation_pb: Protobuf to be parsed. + """ + if operation_pb.done: + self._complete = True + + if operation_pb.HasField('metadata'): + self.metadata = _from_any(operation_pb.metadata) + + result_type = operation_pb.WhichOneof('result') + if result_type == 'error': + self.error = operation_pb.error + elif result_type == 'response': + self.response = _from_any(operation_pb.response) + def poll(self): """Check if the operation has finished. :rtype: bool :returns: A boolean indicating if the current operation has completed. - :raises: :class:`ValueError ` if the operation + :raises: :class:`~exceptions.ValueError` if the operation has already completed. """ if self.complete: raise ValueError('The operation has completed.') - request_pb = operations_pb2.GetOperationRequest(name=self.name) - # We expect a `google.longrunning.operations_pb2.Operation`. - operation_pb = self.client._operations_stub.GetOperation(request_pb) - - if operation_pb.done: - self._complete = True + operation_pb = self._get_operation_rpc() + self._update_state(operation_pb) return self.complete diff --git a/packages/google-cloud-core/unit_tests/test_operation.py b/packages/google-cloud-core/unit_tests/test_operation.py index 234b5d93c749..e67bf21f6f23 100644 --- a/packages/google-cloud-core/unit_tests/test_operation.py +++ b/packages/google-cloud-core/unit_tests/test_operation.py @@ -44,11 +44,11 @@ def test_w_prefix(self): '%s/%s' % (PREFIX, Struct.DESCRIPTOR.full_name)) -class Test__register_type_url(unittest.TestCase): +class Test_register_type_url(unittest.TestCase): def _callFUT(self, type_url, klass): - from google.cloud.operation import _register_type_url - _register_type_url(type_url, klass) + from google.cloud.operation import register_type_url + register_type_url(type_url, klass) def test_simple(self): from google.cloud import operation as MUT @@ -106,19 +106,23 @@ def test_ctor_defaults(self): self.assertEqual(operation.name, self.OPERATION_NAME) self.assertIs(operation.client, client) self.assertIsNone(operation.target) - self.assertIsNone(operation.pb_metadata) - self.assertEqual(operation.metadata, {}) + self.assertIsNone(operation.response) + self.assertIsNone(operation.error) + self.assertIsNone(operation.metadata) + self.assertEqual(operation.caller_metadata, {}) def test_ctor_explicit(self): client = _Client() - pb_metadata = object() operation = self._makeOne( - self.OPERATION_NAME, client, pb_metadata, foo='bar') + self.OPERATION_NAME, client, foo='bar') + self.assertEqual(operation.name, self.OPERATION_NAME) self.assertIs(operation.client, client) self.assertIsNone(operation.target) - self.assertIs(operation.pb_metadata, pb_metadata) - self.assertEqual(operation.metadata, {'foo': 'bar'}) + self.assertIsNone(operation.response) + self.assertIsNone(operation.error) + self.assertIsNone(operation.metadata) + self.assertEqual(operation.caller_metadata, {'foo': 'bar'}) def test_from_pb_wo_metadata_or_kw(self): from google.longrunning import operations_pb2 @@ -130,41 +134,47 @@ def test_from_pb_wo_metadata_or_kw(self): self.assertEqual(operation.name, self.OPERATION_NAME) self.assertIs(operation.client, client) - self.assertIsNone(operation.pb_metadata) - self.assertEqual(operation.metadata, {}) + self.assertIsNone(operation.metadata) + self.assertEqual(operation.caller_metadata, {}) def test_from_pb_w_unknown_metadata(self): from google.longrunning import operations_pb2 from google.protobuf.any_pb2 import Any - from google.protobuf.struct_pb2 import Struct, Value - TYPE_URI = 'type.googleapis.com/%s' % (Struct.DESCRIPTOR.full_name,) + from google.protobuf.json_format import ParseDict + from google.protobuf.struct_pb2 import Struct + from google.cloud._testing import _Monkey + from google.cloud import operation as MUT + type_url = 'type.googleapis.com/%s' % (Struct.DESCRIPTOR.full_name,) client = _Client() - meta = Struct(fields={'foo': Value(string_value=u'Bar')}) - metadata_pb = Any(type_url=TYPE_URI, value=meta.SerializeToString()) + meta = ParseDict({'foo': 'Bar'}, Struct()) + metadata_pb = Any(type_url=type_url, value=meta.SerializeToString()) operation_pb = operations_pb2.Operation( name=self.OPERATION_NAME, metadata=metadata_pb) klass = self._getTargetClass() - operation = klass.from_pb(operation_pb, client) + with _Monkey(MUT, _TYPE_URL_MAP={type_url: Struct}): + operation = klass.from_pb(operation_pb, client) self.assertEqual(operation.name, self.OPERATION_NAME) self.assertIs(operation.client, client) - self.assertIsNone(operation.pb_metadata) - self.assertEqual(operation.metadata, {}) + self.assertEqual(operation.metadata, meta) + self.assertEqual(operation.caller_metadata, {}) def test_from_pb_w_metadata_and_kwargs(self): from google.longrunning import operations_pb2 from google.protobuf.any_pb2 import Any - from google.protobuf.struct_pb2 import Struct, Value + from google.protobuf.struct_pb2 import Struct + from google.protobuf.struct_pb2 import Value from google.cloud import operation as MUT from google.cloud._testing import _Monkey - TYPE_URI = 'type.googleapis.com/%s' % (Struct.DESCRIPTOR.full_name,) - type_url_map = {TYPE_URI: Struct} + + type_url = 'type.googleapis.com/%s' % (Struct.DESCRIPTOR.full_name,) + type_url_map = {type_url: Struct} client = _Client() meta = Struct(fields={'foo': Value(string_value=u'Bar')}) - metadata_pb = Any(type_url=TYPE_URI, value=meta.SerializeToString()) + metadata_pb = Any(type_url=type_url, value=meta.SerializeToString()) operation_pb = operations_pb2.Operation( name=self.OPERATION_NAME, metadata=metadata_pb) klass = self._getTargetClass() @@ -174,11 +184,8 @@ def test_from_pb_w_metadata_and_kwargs(self): self.assertEqual(operation.name, self.OPERATION_NAME) self.assertIs(operation.client, client) - pb_metadata = operation.pb_metadata - self.assertIsInstance(pb_metadata, Struct) - self.assertEqual(list(pb_metadata.fields), ['foo']) - self.assertEqual(pb_metadata.fields['foo'].string_value, 'Bar') - self.assertEqual(operation.metadata, {'baz': 'qux'}) + self.assertEqual(operation.metadata, meta) + self.assertEqual(operation.caller_metadata, {'baz': 'qux'}) def test_complete_property(self): client = _Client() @@ -198,8 +205,9 @@ def test_poll_already_complete(self): operation.poll() def test_poll_false(self): - from google.longrunning.operations_pb2 import GetOperationRequest - response_pb = _GetOperationResponse(False) + from google.longrunning import operations_pb2 + + response_pb = operations_pb2.Operation(done=False) client = _Client() stub = client._operations_stub stub._get_operation_response = response_pb @@ -208,12 +216,13 @@ def test_poll_false(self): self.assertFalse(operation.poll()) request_pb = stub._get_operation_requested - self.assertIsInstance(request_pb, GetOperationRequest) + self.assertIsInstance(request_pb, operations_pb2.GetOperationRequest) self.assertEqual(request_pb.name, self.OPERATION_NAME) def test_poll_true(self): - from google.longrunning.operations_pb2 import GetOperationRequest - response_pb = _GetOperationResponse(True) + from google.longrunning import operations_pb2 + + response_pb = operations_pb2.Operation(done=True) client = _Client() stub = client._operations_stub stub._get_operation_response = response_pb @@ -222,13 +231,90 @@ def test_poll_true(self): self.assertTrue(operation.poll()) request_pb = stub._get_operation_requested - self.assertIsInstance(request_pb, GetOperationRequest) + self.assertIsInstance(request_pb, operations_pb2.GetOperationRequest) self.assertEqual(request_pb.name, self.OPERATION_NAME) + def test__update_state_done(self): + from google.longrunning import operations_pb2 + + operation = self._makeOne(None, None) + self.assertFalse(operation.complete) + operation_pb = operations_pb2.Operation(done=True) + operation._update_state(operation_pb) + self.assertTrue(operation.complete) + + def test__update_state_metadata(self): + from google.longrunning import operations_pb2 + from google.protobuf.any_pb2 import Any + from google.protobuf.struct_pb2 import Value + from google.cloud._testing import _Monkey + from google.cloud import operation as MUT + + operation = self._makeOne(None, None) + self.assertIsNone(operation.metadata) + + val_pb = Value(number_value=1337) + type_url = 'type.googleapis.com/%s' % (Value.DESCRIPTOR.full_name,) + val_any = Any(type_url=type_url, value=val_pb.SerializeToString()) + operation_pb = operations_pb2.Operation(metadata=val_any) + + with _Monkey(MUT, _TYPE_URL_MAP={type_url: Value}): + operation._update_state(operation_pb) + + self.assertEqual(operation.metadata, val_pb) + + def test__update_state_error(self): + from google.longrunning import operations_pb2 + from google.rpc.status_pb2 import Status + from google.cloud._testing import _Monkey + + operation = self._makeOne(None, None) + self.assertIsNone(operation.error) + self.assertIsNone(operation.response) + + error_pb = Status(code=1) + operation_pb = operations_pb2.Operation(error=error_pb) + operation._update_state(operation_pb) + + self.assertEqual(operation.error, error_pb) + self.assertIsNone(operation.response) + + def test__update_state_response(self): + from google.longrunning import operations_pb2 + from google.protobuf.any_pb2 import Any + from google.protobuf.struct_pb2 import Value + from google.cloud._testing import _Monkey + from google.cloud import operation as MUT + + operation = self._makeOne(None, None) + self.assertIsNone(operation.error) + self.assertIsNone(operation.response) + + response_pb = Value(string_value='totes a response') + type_url = 'type.googleapis.com/%s' % (Value.DESCRIPTOR.full_name,) + response_any = Any(type_url=type_url, + value=response_pb.SerializeToString()) + operation_pb = operations_pb2.Operation(response=response_any) + + with _Monkey(MUT, _TYPE_URL_MAP={type_url: Value}): + operation._update_state(operation_pb) + + self.assertIsNone(operation.error) + self.assertEqual(operation.response, response_pb) + + def test__update_state_no_result(self): + from google.longrunning import operations_pb2 + + operation = self._makeOne(None, None) + self.assertIsNone(operation.error) + self.assertIsNone(operation.response) + + operation_pb = operations_pb2.Operation() + operation._update_state(operation_pb) -class _GetOperationResponse(object): - def __init__(self, done): - self.done = done + # Make sure nothing changed. + self.assertIsNone(operation.error) + self.assertIsNone(operation.response) class _OperationsStub(object): From 62892906f62b5eadd0a4600b976ade8474e78d02 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 28 Oct 2016 14:57:16 -0700 Subject: [PATCH 075/468] Make type_url optional when registering types. Also renaming register_type_url to register_type. --- .../google/cloud/operation.py | 13 +++-- .../unit_tests/test_operation.py | 47 ++++++++++++------- 2 files changed, 39 insertions(+), 21 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/operation.py b/packages/google-cloud-core/google/cloud/operation.py index 839e5e3eaf30..b98222143b4c 100644 --- a/packages/google-cloud-core/google/cloud/operation.py +++ b/packages/google-cloud-core/google/cloud/operation.py @@ -39,17 +39,20 @@ def _compute_type_url(klass, prefix=_GOOGLE_APIS_PREFIX): return '%s/%s' % (prefix, name) -def register_type_url(type_url, klass): +def register_type(klass, type_url=None): """Register a klass as the factory for a given type URL. - :type type_url: str - :param type_url: URL naming the type - :type klass: type :param klass: class to be used as a factory for the given type + :type type_url: str + :param type_url: (Optional) URL naming the type. If not provided, + infers the URL from the type descriptor. + :raises: ValueError if a registration already exists for the URL. """ + if type_url is None: + type_url = _compute_type_url(klass) if type_url in _TYPE_URL_MAP: if _TYPE_URL_MAP[type_url] is not klass: raise ValueError("Conflict: %s" % (_TYPE_URL_MAP[type_url],)) @@ -123,7 +126,7 @@ class Operation(object): """Metadata about the current operation (as a protobuf). Code that uses operations must register the metadata types (via - :func:`register_type_url`) to ensure that the metadata fields can be + :func:`register_type`) to ensure that the metadata fields can be converted into the correct types. """ diff --git a/packages/google-cloud-core/unit_tests/test_operation.py b/packages/google-cloud-core/unit_tests/test_operation.py index e67bf21f6f23..a30ec97f19ae 100644 --- a/packages/google-cloud-core/unit_tests/test_operation.py +++ b/packages/google-cloud-core/unit_tests/test_operation.py @@ -44,48 +44,63 @@ def test_w_prefix(self): '%s/%s' % (PREFIX, Struct.DESCRIPTOR.full_name)) -class Test_register_type_url(unittest.TestCase): +class Test_register_type(unittest.TestCase): - def _callFUT(self, type_url, klass): - from google.cloud.operation import register_type_url - register_type_url(type_url, klass) + def _callFUT(self, klass, type_url=None): + from google.cloud.operation import register_type + register_type(klass, type_url=type_url) - def test_simple(self): + def test_explicit(self): from google.cloud import operation as MUT from google.cloud._testing import _Monkey - TYPE_URI = 'testing.google-cloud-python.com/testing' + + type_url = 'testing.google-cloud-python.com/testing' klass = object() type_url_map = {} with _Monkey(MUT, _TYPE_URL_MAP=type_url_map): - self._callFUT(TYPE_URI, klass) + self._callFUT(klass, type_url) + + self.assertEqual(type_url_map, {type_url: klass}) - self.assertEqual(type_url_map, {TYPE_URI: klass}) + def test_default(self): + from google.protobuf.struct_pb2 import Struct + from google.cloud._testing import _Monkey + from google.cloud import operation as MUT + + type_url_map = {} + with _Monkey(MUT, _TYPE_URL_MAP=type_url_map): + self._callFUT(Struct) + + type_url = MUT._compute_type_url(Struct) + self.assertEqual(type_url_map, {type_url: Struct}) def test_w_same_class(self): from google.cloud import operation as MUT from google.cloud._testing import _Monkey - TYPE_URI = 'testing.google-cloud-python.com/testing' + + type_url = 'testing.google-cloud-python.com/testing' klass = object() - type_url_map = {TYPE_URI: klass} + type_url_map = {type_url: klass} with _Monkey(MUT, _TYPE_URL_MAP=type_url_map): - self._callFUT(TYPE_URI, klass) + self._callFUT(klass, type_url) - self.assertEqual(type_url_map, {TYPE_URI: klass}) + self.assertEqual(type_url_map, {type_url: klass}) def test_w_conflict(self): from google.cloud import operation as MUT from google.cloud._testing import _Monkey - TYPE_URI = 'testing.google-cloud-python.com/testing' + + type_url = 'testing.google-cloud-python.com/testing' klass, other = object(), object() - type_url_map = {TYPE_URI: other} + type_url_map = {type_url: other} with _Monkey(MUT, _TYPE_URL_MAP=type_url_map): with self.assertRaises(ValueError): - self._callFUT(TYPE_URI, klass) + self._callFUT(klass, type_url) - self.assertEqual(type_url_map, {TYPE_URI: other}) + self.assertEqual(type_url_map, {type_url: other}) class OperationTests(unittest.TestCase): From e7a6f246e37c87b9be169b79bcedb7580229ec4c Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Sat, 29 Oct 2016 14:03:33 -0700 Subject: [PATCH 076/468] Adding HTTP support to core Operation class. Will likely need updates to Speech operation class. --- .../google/cloud/operation.py | 63 ++++++++++++++- .../unit_tests/test_operation.py | 80 ++++++++++++++++++- 2 files changed, 139 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/operation.py b/packages/google-cloud-core/google/cloud/operation.py index 839e5e3eaf30..8f00ab903168 100644 --- a/packages/google-cloud-core/google/cloud/operation.py +++ b/packages/google-cloud-core/google/cloud/operation.py @@ -15,6 +15,7 @@ """Wrap long-running operations returned from Google Cloud APIs.""" from google.longrunning import operations_pb2 +from google.protobuf import json_format _GOOGLE_APIS_PREFIX = 'type.googleapis.com' @@ -97,8 +98,13 @@ class Operation(object): :type name: str :param name: The fully-qualified path naming the operation. - :type client: object: must provide ``_operations_stub`` accessor. + :type client: :class:`~google.cloud.client.Client` :param client: The client used to poll for the status of the operation. + If the operation was created via JSON/HTTP, the client + must own a :class:`~google.cloud.connection.Connection` + to send polling requests. If created via protobuf, the + client must have a gRPC stub in the ``_operations_stub`` + attribute. :type caller_metadata: dict :param caller_metadata: caller-assigned metadata about the operation @@ -127,6 +133,8 @@ class Operation(object): converted into the correct types. """ + _use_grpc = True + def __init__(self, name, client, **caller_metadata): self.name = name self.client = client @@ -152,6 +160,30 @@ def from_pb(cls, operation_pb, client, **caller_metadata): """ result = cls(operation_pb.name, client, **caller_metadata) result._update_state(operation_pb) + result._use_grpc = True + return result + + @classmethod + def from_dict(cls, operation, client, **caller_metadata): + """Factory: construct an instance from a dictionary. + + :type operation: dict + :param operation: Operation as a JSON object. + + :type client: :class:`~google.cloud.client.Client` + :param client: The client used to poll for the status of the operation. + + :type caller_metadata: dict + :param caller_metadata: caller-assigned metadata about the operation + + :rtype: :class:`Operation` + :returns: new instance, with attributes based on the protobuf. + """ + operation_pb = json_format.ParseDict( + operation, operations_pb2.Operation()) + result = cls(operation_pb.name, client, **caller_metadata) + result._update_state(operation_pb) + result._use_grpc = False return result @property @@ -166,12 +198,39 @@ def complete(self): def _get_operation_rpc(self): """Polls the status of the current operation. + Uses gRPC request to check. + :rtype: :class:`~google.longrunning.operations_pb2.Operation` :returns: The latest status of the current operation. """ request_pb = operations_pb2.GetOperationRequest(name=self.name) return self.client._operations_stub.GetOperation(request_pb) + def _get_operation_http(self): + """Checks the status of the current operation. + + Uses HTTP request to check. + + :rtype: :class:`~google.longrunning.operations_pb2.Operation` + :returns: The latest status of the current operation. + """ + path = 'operations/%s' % (self.name,) + api_response = self.client.connection.api_request( + method='GET', path=path) + return json_format.ParseDict( + api_response, operations_pb2.Operation()) + + def _get_operation(self): + """Checks the status of the current operation. + + :rtype: :class:`~google.longrunning.operations_pb2.Operation` + :returns: The latest status of the current operation. + """ + if self._use_grpc: + return self._get_operation_rpc() + else: + return self._get_operation_http() + def _update_state(self, operation_pb): """Update the state of the current object based on operation. @@ -202,7 +261,7 @@ def poll(self): if self.complete: raise ValueError('The operation has completed.') - operation_pb = self._get_operation_rpc() + operation_pb = self._get_operation() self._update_state(operation_pb) return self.complete diff --git a/packages/google-cloud-core/unit_tests/test_operation.py b/packages/google-cloud-core/unit_tests/test_operation.py index e67bf21f6f23..11bcadfbd7eb 100644 --- a/packages/google-cloud-core/unit_tests/test_operation.py +++ b/packages/google-cloud-core/unit_tests/test_operation.py @@ -88,7 +88,7 @@ def test_w_conflict(self): self.assertEqual(type_url_map, {TYPE_URI: other}) -class OperationTests(unittest.TestCase): +class TestOperation(unittest.TestCase): OPERATION_NAME = 'operations/projects/foo/instances/bar/operations/123' @@ -110,6 +110,7 @@ def test_ctor_defaults(self): self.assertIsNone(operation.error) self.assertIsNone(operation.metadata) self.assertEqual(operation.caller_metadata, {}) + self.assertTrue(operation._use_grpc) def test_ctor_explicit(self): client = _Client() @@ -123,6 +124,7 @@ def test_ctor_explicit(self): self.assertIsNone(operation.error) self.assertIsNone(operation.metadata) self.assertEqual(operation.caller_metadata, {'foo': 'bar'}) + self.assertTrue(operation._use_grpc) def test_from_pb_wo_metadata_or_kw(self): from google.longrunning import operations_pb2 @@ -187,6 +189,38 @@ def test_from_pb_w_metadata_and_kwargs(self): self.assertEqual(operation.metadata, meta) self.assertEqual(operation.caller_metadata, {'baz': 'qux'}) + def test_from_dict(self): + from google.protobuf.struct_pb2 import Struct + from google.cloud._testing import _Monkey + from google.cloud import operation as MUT + + type_url = 'type.googleapis.com/%s' % (Struct.DESCRIPTOR.full_name,) + api_response = { + 'name': self.OPERATION_NAME, + 'metadata': { + '@type': type_url, + 'value': {'foo': 'Bar'}, + }, + } + + client = _Client() + klass = self._getTargetClass() + + with _Monkey(MUT, _TYPE_URL_MAP={type_url: Struct}): + operation = klass.from_dict(api_response, client) + + self.assertEqual(operation.name, self.OPERATION_NAME) + self.assertIs(operation.client, client) + self.assertIsNone(operation.target) + self.assertIsNone(operation.response) + self.assertIsNone(operation.error) + self.assertIsInstance(operation.metadata, Struct) + self.assertEqual(len(operation.metadata.fields), 1) + self.assertEqual( + operation.metadata.fields['foo'].string_value, 'Bar') + self.assertEqual(operation.caller_metadata, {}) + self.assertFalse(operation._use_grpc) + def test_complete_property(self): client = _Client() operation = self._makeOne(self.OPERATION_NAME, client) @@ -234,6 +268,35 @@ def test_poll_true(self): self.assertIsInstance(request_pb, operations_pb2.GetOperationRequest) self.assertEqual(request_pb.name, self.OPERATION_NAME) + def test_poll_http(self): + from google.protobuf.struct_pb2 import Struct + from google.cloud._testing import _Monkey + from google.cloud import operation as MUT + + type_url = 'type.googleapis.com/%s' % (Struct.DESCRIPTOR.full_name,) + name = '2302903294023' + api_response = { + 'name': name, + 'done': True, + 'metadata': { + '@type': type_url, + 'value': {'foo': 'Bar'}, + }, + } + connection = _Connection(api_response) + client = _Client(connection) + operation = self._makeOne(name, client) + operation._use_grpc = False + + with _Monkey(MUT, _TYPE_URL_MAP={type_url: Struct}): + self.assertTrue(operation.poll()) + + expected_path = 'operations/%s' % (name,) + self.assertEqual(connection._requested, [{ + 'method': 'GET', + 'path': expected_path, + }]) + def test__update_state_done(self): from google.longrunning import operations_pb2 @@ -324,7 +387,20 @@ def GetOperation(self, request_pb): return self._get_operation_response +class _Connection(object): + + def __init__(self, *responses): + self._responses = responses + self._requested = [] + + def api_request(self, **kw): + self._requested.append(kw) + response, self._responses = self._responses[0], self._responses[1:] + return response + + class _Client(object): - def __init__(self): + def __init__(self, connection=None): self._operations_stub = _OperationsStub() + self.connection = connection From c05b66c8f65b90292d1ee5b5f5942f56956f28b5 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 31 Oct 2016 08:43:37 -0700 Subject: [PATCH 077/468] Changing Operation._use_grpc to _from_grpc. --- packages/google-cloud-core/google/cloud/operation.py | 8 ++++---- packages/google-cloud-core/unit_tests/test_operation.py | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/operation.py b/packages/google-cloud-core/google/cloud/operation.py index 8f00ab903168..b632601b6257 100644 --- a/packages/google-cloud-core/google/cloud/operation.py +++ b/packages/google-cloud-core/google/cloud/operation.py @@ -133,7 +133,7 @@ class Operation(object): converted into the correct types. """ - _use_grpc = True + _from_grpc = True def __init__(self, name, client, **caller_metadata): self.name = name @@ -160,7 +160,7 @@ def from_pb(cls, operation_pb, client, **caller_metadata): """ result = cls(operation_pb.name, client, **caller_metadata) result._update_state(operation_pb) - result._use_grpc = True + result._from_grpc = True return result @classmethod @@ -183,7 +183,7 @@ def from_dict(cls, operation, client, **caller_metadata): operation, operations_pb2.Operation()) result = cls(operation_pb.name, client, **caller_metadata) result._update_state(operation_pb) - result._use_grpc = False + result._from_grpc = False return result @property @@ -226,7 +226,7 @@ def _get_operation(self): :rtype: :class:`~google.longrunning.operations_pb2.Operation` :returns: The latest status of the current operation. """ - if self._use_grpc: + if self._from_grpc: return self._get_operation_rpc() else: return self._get_operation_http() diff --git a/packages/google-cloud-core/unit_tests/test_operation.py b/packages/google-cloud-core/unit_tests/test_operation.py index 11bcadfbd7eb..5213713c772b 100644 --- a/packages/google-cloud-core/unit_tests/test_operation.py +++ b/packages/google-cloud-core/unit_tests/test_operation.py @@ -110,7 +110,7 @@ def test_ctor_defaults(self): self.assertIsNone(operation.error) self.assertIsNone(operation.metadata) self.assertEqual(operation.caller_metadata, {}) - self.assertTrue(operation._use_grpc) + self.assertTrue(operation._from_grpc) def test_ctor_explicit(self): client = _Client() @@ -124,7 +124,7 @@ def test_ctor_explicit(self): self.assertIsNone(operation.error) self.assertIsNone(operation.metadata) self.assertEqual(operation.caller_metadata, {'foo': 'bar'}) - self.assertTrue(operation._use_grpc) + self.assertTrue(operation._from_grpc) def test_from_pb_wo_metadata_or_kw(self): from google.longrunning import operations_pb2 @@ -219,7 +219,7 @@ def test_from_dict(self): self.assertEqual( operation.metadata.fields['foo'].string_value, 'Bar') self.assertEqual(operation.caller_metadata, {}) - self.assertFalse(operation._use_grpc) + self.assertFalse(operation._from_grpc) def test_complete_property(self): client = _Client() @@ -286,7 +286,7 @@ def test_poll_http(self): connection = _Connection(api_response) client = _Client(connection) operation = self._makeOne(name, client) - operation._use_grpc = False + operation._from_grpc = False with _Monkey(MUT, _TYPE_URL_MAP={type_url: Struct}): self.assertTrue(operation.poll()) From 35a19da8438c142c38921207a7cca59aac2fd431 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 18 Oct 2016 17:21:42 -0700 Subject: [PATCH 078/468] Making BigQuery table.fetch_data() into an iterator. --- packages/google-cloud-core/google/cloud/iterator.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index 021fab0c1653..49f70fb54a59 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -297,6 +297,7 @@ class HTTPIterator(Iterator): _PAGE_TOKEN = 'pageToken' _MAX_RESULTS = 'maxResults' + _NEXT_TOKEN = 'nextPageToken' _RESERVED_PARAMS = frozenset([_PAGE_TOKEN, _MAX_RESULTS]) _HTTP_METHOD = 'GET' From f033e133e829f2ebfa223dedc663f353c72c168b Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 31 Oct 2016 22:53:05 -0700 Subject: [PATCH 079/468] Rebase fixes. --- packages/google-cloud-core/google/cloud/iterator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index 49f70fb54a59..5f774aa4a846 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -340,7 +340,7 @@ def _next_page(self): items = response.get(self._items_key, ()) page = Page(self, items, self._item_to_value) self._page_start(self, page, response) - self.next_page_token = response.get('nextPageToken') + self.next_page_token = response.get(self._NEXT_TOKEN) return page else: return None From 32c5d94a7bcebd41a60181b12df5b4e747553702 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 4 Nov 2016 10:12:12 -0700 Subject: [PATCH 080/468] Adding PyPI badges to package READMEs. --- packages/google-cloud-core/README.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/google-cloud-core/README.rst b/packages/google-cloud-core/README.rst index 8115c049c89c..5088505addc7 100644 --- a/packages/google-cloud-core/README.rst +++ b/packages/google-cloud-core/README.rst @@ -5,6 +5,8 @@ This library is not meant to stand-alone. Instead it defines common helpers (e.g. base ``Client`` and ``Connection`` classes) used by all of the ``google-cloud-*``. +|pypi| |versions| + - `Documentation`_ .. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/google-cloud-api.html @@ -15,3 +17,8 @@ Quick Start .. code-block:: console $ pip install --upgrade google-cloud-core + +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-core.svg + :target: https://pypi.python.org/pypi/google-cloud-core +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-core.svg + :target: https://pypi.python.org/pypi/google-cloud-core From cd777a8cc296527fa4c51d7df6b9443fd727bc30 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 3 Nov 2016 22:56:45 -0700 Subject: [PATCH 081/468] Factoring out the parts of make_secure_stub() that create a channel. --- .../google/cloud/_helpers.py | 41 ++++++++++---- .../unit_tests/test__helpers.py | 56 ++++++++++++++----- 2 files changed, 72 insertions(+), 25 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/_helpers.py b/packages/google-cloud-core/google/cloud/_helpers.py index b9dc5a83b1be..f4f6b43b1ce2 100644 --- a/packages/google-cloud-core/google/cloud/_helpers.py +++ b/packages/google-cloud-core/google/cloud/_helpers.py @@ -620,8 +620,8 @@ def __call__(self, unused_context, callback): callback(headers, None) -def make_secure_stub(credentials, user_agent, stub_class, host): - """Makes a secure stub for an RPC service. +def make_secure_channel(credentials, user_agent, host): + """Makes a secure channel for an RPC service. Uses / depends on gRPC. @@ -630,16 +630,13 @@ def make_secure_stub(credentials, user_agent, stub_class, host): access tokens. :type user_agent: str - :param user_agent: (Optional) The user agent to be used with API requests. - - :type stub_class: type - :param stub_class: A gRPC stub type for a given service. + :param user_agent: The user agent to be used with API requests. :type host: str :param host: The host for the service. - :rtype: object, instance of ``stub_class`` - :returns: The stub object used to make gRPC requests to a given API. + :rtype: :class:`grpc._channel.Channel` + :returns: gRPC secure channel with credentials attached. """ # ssl_channel_credentials() loads root certificates from # `grpc/_adapter/credentials/roots.pem`. @@ -653,8 +650,32 @@ def make_secure_stub(credentials, user_agent, stub_class, host): channel_args = ( ('grpc.primary_user_agent', user_agent), ) - channel = grpc.secure_channel(target, channel_creds, - options=channel_args) + return grpc.secure_channel(target, channel_creds, + options=channel_args) + + +def make_secure_stub(credentials, user_agent, stub_class, host): + """Makes a secure stub for an RPC service. + + Uses / depends on gRPC. + + :type credentials: :class:`oauth2client.client.OAuth2Credentials` + :param credentials: The OAuth2 Credentials to use for creating + access tokens. + + :type user_agent: str + :param user_agent: The user agent to be used with API requests. + + :type stub_class: type + :param stub_class: A gRPC stub type for a given service. + + :type host: str + :param host: The host for the service. + + :rtype: object, instance of ``stub_class`` + :returns: The stub object used to make gRPC requests to a given API. + """ + channel = make_secure_channel(credentials, user_agent, host) return stub_class(channel) diff --git a/packages/google-cloud-core/unit_tests/test__helpers.py b/packages/google-cloud-core/unit_tests/test__helpers.py index b1863b8ce5e0..779f9a88abbe 100644 --- a/packages/google-cloud-core/unit_tests/test__helpers.py +++ b/packages/google-cloud-core/unit_tests/test__helpers.py @@ -917,20 +917,17 @@ def callback(*args): self.assertEqual(len(credentials._tokens), 1) -class Test_make_secure_stub(unittest.TestCase): +class Test_make_secure_channel(unittest.TestCase): def _callFUT(self, *args, **kwargs): - from google.cloud._helpers import make_secure_stub - return make_secure_stub(*args, **kwargs) + from google.cloud._helpers import make_secure_channel + return make_secure_channel(*args, **kwargs) def test_it(self): from six.moves import http_client from google.cloud._testing import _Monkey from google.cloud import _helpers as MUT - mock_result = object() - stub_inputs = [] - SSL_CREDS = object() METADATA_CREDS = object() COMPOSITE_CREDS = object() @@ -961,11 +958,6 @@ def secure_channel(self, *args, **kwargs): return CHANNEL grpc_mod = _GRPCModule() - - def mock_stub_class(channel): - stub_inputs.append(channel) - return mock_result - metadata_plugin = object() plugin_args = [] @@ -978,11 +970,9 @@ def mock_plugin(*args): user_agent = 'USER_AGENT' with _Monkey(MUT, grpc=grpc_mod, MetadataPlugin=mock_plugin): - result = self._callFUT(credentials, user_agent, - mock_stub_class, host) + result = self._callFUT(credentials, user_agent, host) - self.assertIs(result, mock_result) - self.assertEqual(stub_inputs, [CHANNEL]) + self.assertIs(result, CHANNEL) self.assertEqual(plugin_args, [(credentials,)]) self.assertEqual(grpc_mod.ssl_channel_credentials_args, ()) self.assertEqual(grpc_mod.metadata_call_credentials_args, @@ -999,6 +989,42 @@ def mock_plugin(*args): (secure_args, secure_kwargs)) +class Test_make_secure_stub(unittest.TestCase): + + def _callFUT(self, *args, **kwargs): + from google.cloud._helpers import make_secure_stub + return make_secure_stub(*args, **kwargs) + + def test_it(self): + from google.cloud._testing import _Monkey + from google.cloud import _helpers as MUT + + result = object() + channel_obj = object() + channels = [] + channel_args = [] + + def stub_class(channel): + channels.append(channel) + return result + + def mock_channel(*args): + channel_args.append(args) + return channel_obj + + credentials = object() + user_agent = 'you-sir-age-int' + host = 'localhost' + with _Monkey(MUT, make_secure_channel=mock_channel): + stub = self._callFUT(credentials, user_agent, + stub_class, host) + + self.assertIs(stub, result) + self.assertEqual(channels, [channel_obj]) + self.assertEqual(channel_args, + [(credentials, user_agent, host)]) + + class Test_make_insecure_stub(unittest.TestCase): def _callFUT(self, *args, **kwargs): From bef90908f394b9ac84289441b599ecadebd87711 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 4 Nov 2016 14:02:30 -0700 Subject: [PATCH 082/468] Lint fixes needed for the latest (2.1) pycodestyle. --- packages/google-cloud-core/unit_tests/streaming/test_transfer.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/google-cloud-core/unit_tests/streaming/test_transfer.py b/packages/google-cloud-core/unit_tests/streaming/test_transfer.py index 58681585fd5b..828e0351f418 100644 --- a/packages/google-cloud-core/unit_tests/streaming/test_transfer.py +++ b/packages/google-cloud-core/unit_tests/streaming/test_transfer.py @@ -1952,5 +1952,6 @@ def _tempdir_mgr(): return _tempdir_mgr + _tempdir = _tempdir_maker() del _tempdir_maker From cc511aa41c6621a484d816d2a2653131adc4611d Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Sun, 6 Nov 2016 09:52:01 -0800 Subject: [PATCH 083/468] Handle missing nanos. --- .../google-cloud-core/google/cloud/_helpers.py | 15 ++++++++++----- .../unit_tests/test__helpers.py | 18 ++++++++++++++++++ 2 files changed, 28 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/_helpers.py b/packages/google-cloud-core/google/cloud/_helpers.py index f4f6b43b1ce2..33d9161cbf58 100644 --- a/packages/google-cloud-core/google/cloud/_helpers.py +++ b/packages/google-cloud-core/google/cloud/_helpers.py @@ -53,8 +53,10 @@ (?P \d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2} # YYYY-MM-DDTHH:MM:SS ) - \. # decimal point - (?P\d{1,9}) # nanoseconds, maybe truncated + ( # Optional decimal part + \. # decimal point + (?P\d{1,9}) # nanoseconds, maybe truncated + )? Z # Zulu """, re.VERBOSE) # NOTE: Catching this ImportError is a workaround for GAE not supporting the @@ -429,9 +431,12 @@ def _rfc3339_nanos_to_datetime(dt_str): bare_seconds = datetime.datetime.strptime( with_nanos.group('no_fraction'), _RFC3339_NO_FRACTION) fraction = with_nanos.group('nanos') - scale = 9 - len(fraction) - nanos = int(fraction) * (10 ** scale) - micros = nanos // 1000 + if fraction is None: + micros = 0 + else: + scale = 9 - len(fraction) + nanos = int(fraction) * (10 ** scale) + micros = nanos // 1000 return bare_seconds.replace(microsecond=micros, tzinfo=UTC) diff --git a/packages/google-cloud-core/unit_tests/test__helpers.py b/packages/google-cloud-core/unit_tests/test__helpers.py index 779f9a88abbe..f939d34ee74e 100644 --- a/packages/google-cloud-core/unit_tests/test__helpers.py +++ b/packages/google-cloud-core/unit_tests/test__helpers.py @@ -667,6 +667,24 @@ def test_w_truncated_nanos(self): year, month, day, hour, minute, seconds, micros, UTC) self.assertEqual(result, expected_result) + def test_without_nanos(self): + import datetime + from google.cloud._helpers import UTC + + year = 1988 + month = 4 + day = 29 + hour = 12 + minute = 12 + seconds = 12 + + dt_str = '%d-%02d-%02dT%02d:%02d:%02dZ' % ( + year, month, day, hour, minute, seconds) + result = self._callFUT(dt_str) + expected_result = datetime.datetime( + year, month, day, hour, minute, seconds, 0, UTC) + self.assertEqual(result, expected_result) + def test_w_naonseconds(self): import datetime from google.cloud._helpers import UTC From 9ac9fe660954be46690186bfc23fd555e362fe7d Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 7 Nov 2016 21:04:46 -0800 Subject: [PATCH 084/468] Getting umbrella coverage back to 100%. --- packages/google-cloud-core/google/cloud/_testing.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/_testing.py b/packages/google-cloud-core/google/cloud/_testing.py index f03f36ed813b..49eb35ad50ae 100644 --- a/packages/google-cloud-core/google/cloud/_testing.py +++ b/packages/google-cloud-core/google/cloud/_testing.py @@ -89,6 +89,3 @@ def next(self): return six.next(self._pages) __next__ = next - - def __iter__(self): - return self From 865d2461d844fedd35d2c712a2f54afc5b8edb8d Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 8 Nov 2016 20:20:59 -0800 Subject: [PATCH 085/468] Renaming _getTargetClass to _get_target_class. Done via: $ git grep -l 'def _getTargetClass(self)' | \ > xargs sed -i s/'def _getTargetClass(self)'/'@staticmethod\n def _get_target_class()'/g --- .../unit_tests/streaming/test_buffered_stream.py | 3 ++- .../unit_tests/streaming/test_exceptions.py | 6 ++++-- .../unit_tests/streaming/test_http_wrapper.py | 9 ++++++--- .../unit_tests/streaming/test_stream_slice.py | 3 ++- .../unit_tests/streaming/test_transfer.py | 9 ++++++--- .../google-cloud-core/unit_tests/test__helpers.py | 9 ++++++--- packages/google-cloud-core/unit_tests/test_client.py | 9 ++++++--- .../google-cloud-core/unit_tests/test_connection.py | 6 ++++-- .../google-cloud-core/unit_tests/test_exceptions.py | 3 ++- .../google-cloud-core/unit_tests/test_iterator.py | 12 ++++++++---- .../google-cloud-core/unit_tests/test_operation.py | 3 ++- 11 files changed, 48 insertions(+), 24 deletions(-) diff --git a/packages/google-cloud-core/unit_tests/streaming/test_buffered_stream.py b/packages/google-cloud-core/unit_tests/streaming/test_buffered_stream.py index b6f4066b11c2..b93e5d05b827 100644 --- a/packages/google-cloud-core/unit_tests/streaming/test_buffered_stream.py +++ b/packages/google-cloud-core/unit_tests/streaming/test_buffered_stream.py @@ -17,7 +17,8 @@ class Test_BufferedStream(unittest.TestCase): - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.streaming.buffered_stream import BufferedStream return BufferedStream diff --git a/packages/google-cloud-core/unit_tests/streaming/test_exceptions.py b/packages/google-cloud-core/unit_tests/streaming/test_exceptions.py index b72dfabac38b..89745a5d12c7 100644 --- a/packages/google-cloud-core/unit_tests/streaming/test_exceptions.py +++ b/packages/google-cloud-core/unit_tests/streaming/test_exceptions.py @@ -17,7 +17,8 @@ class Test_HttpError(unittest.TestCase): - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.streaming.exceptions import HttpError return HttpError @@ -58,7 +59,8 @@ class _Response(object): class Test_RetryAfterError(unittest.TestCase): - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.streaming.exceptions import RetryAfterError return RetryAfterError diff --git a/packages/google-cloud-core/unit_tests/streaming/test_http_wrapper.py b/packages/google-cloud-core/unit_tests/streaming/test_http_wrapper.py index 8aad20cfb2f1..560984db49cf 100644 --- a/packages/google-cloud-core/unit_tests/streaming/test_http_wrapper.py +++ b/packages/google-cloud-core/unit_tests/streaming/test_http_wrapper.py @@ -17,7 +17,8 @@ class Test__httplib2_debug_level(unittest.TestCase): - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.streaming.http_wrapper import _httplib2_debug_level return _httplib2_debug_level @@ -76,7 +77,8 @@ def set_debuglevel(self, value): class Test_Request(unittest.TestCase): - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.streaming.http_wrapper import Request return Request @@ -116,7 +118,8 @@ def test_body_setter_w_non_string(self): class Test_Response(unittest.TestCase): - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.streaming.http_wrapper import Response return Response diff --git a/packages/google-cloud-core/unit_tests/streaming/test_stream_slice.py b/packages/google-cloud-core/unit_tests/streaming/test_stream_slice.py index 10e9d9bbf25b..9db3b76d6d01 100644 --- a/packages/google-cloud-core/unit_tests/streaming/test_stream_slice.py +++ b/packages/google-cloud-core/unit_tests/streaming/test_stream_slice.py @@ -17,7 +17,8 @@ class Test_StreamSlice(unittest.TestCase): - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.streaming.stream_slice import StreamSlice return StreamSlice diff --git a/packages/google-cloud-core/unit_tests/streaming/test_transfer.py b/packages/google-cloud-core/unit_tests/streaming/test_transfer.py index 828e0351f418..e765e359eb6d 100644 --- a/packages/google-cloud-core/unit_tests/streaming/test_transfer.py +++ b/packages/google-cloud-core/unit_tests/streaming/test_transfer.py @@ -18,7 +18,8 @@ class Test__Transfer(unittest.TestCase): URL = 'http://example.com/api' - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.streaming.transfer import _Transfer return _Transfer @@ -162,7 +163,8 @@ def test___del___closes_stream(self): class Test_Download(unittest.TestCase): URL = "http://example.com/api" - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.streaming.transfer import Download return Download @@ -797,7 +799,8 @@ class Test_Upload(unittest.TestCase): MIME_TYPE = 'application/octet-stream' UPLOAD_URL = 'http://example.com/upload/id=foobar' - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.streaming.transfer import Upload return Upload diff --git a/packages/google-cloud-core/unit_tests/test__helpers.py b/packages/google-cloud-core/unit_tests/test__helpers.py index f939d34ee74e..786cf1e7e7e8 100644 --- a/packages/google-cloud-core/unit_tests/test__helpers.py +++ b/packages/google-cloud-core/unit_tests/test__helpers.py @@ -18,7 +18,8 @@ class Test__LocalStack(unittest.TestCase): - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud._helpers import _LocalStack return _LocalStack @@ -46,7 +47,8 @@ def test_it(self): class Test__UTC(unittest.TestCase): - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud._helpers import _UTC return _UTC @@ -905,7 +907,8 @@ def test_w_project_passed_as_none(self): class TestMetadataPlugin(unittest.TestCase): - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud._helpers import MetadataPlugin return MetadataPlugin diff --git a/packages/google-cloud-core/unit_tests/test_client.py b/packages/google-cloud-core/unit_tests/test_client.py index c7e4c6536e1b..f5927b3c0032 100644 --- a/packages/google-cloud-core/unit_tests/test_client.py +++ b/packages/google-cloud-core/unit_tests/test_client.py @@ -17,7 +17,8 @@ class Test_ClientFactoryMixin(unittest.TestCase): - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.client import _ClientFactoryMixin return _ClientFactoryMixin @@ -37,7 +38,8 @@ def tearDown(self): KLASS = self._getTargetClass() KLASS._connection_class = self.original_cnxn_class - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.client import Client return Client @@ -124,7 +126,8 @@ def tearDown(self): KLASS = self._getTargetClass() KLASS._connection_class = self.original_cnxn_class - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.client import JSONClient return JSONClient diff --git a/packages/google-cloud-core/unit_tests/test_connection.py b/packages/google-cloud-core/unit_tests/test_connection.py index e2a02e83bdf7..55cc80650c82 100644 --- a/packages/google-cloud-core/unit_tests/test_connection.py +++ b/packages/google-cloud-core/unit_tests/test_connection.py @@ -17,7 +17,8 @@ class TestConnection(unittest.TestCase): - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.connection import Connection return Connection @@ -108,7 +109,8 @@ def test__create_scoped_credentials_no_credentials(self): class TestJSONConnection(unittest.TestCase): - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.connection import JSONConnection return JSONConnection diff --git a/packages/google-cloud-core/unit_tests/test_exceptions.py b/packages/google-cloud-core/unit_tests/test_exceptions.py index 8460d6d8f1c4..2130d795fe50 100644 --- a/packages/google-cloud-core/unit_tests/test_exceptions.py +++ b/packages/google-cloud-core/unit_tests/test_exceptions.py @@ -17,7 +17,8 @@ class Test_GoogleCloudError(unittest.TestCase): - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.exceptions import GoogleCloudError return GoogleCloudError diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index 68b2f65aadc0..b3c2d834ff00 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -28,7 +28,8 @@ def test_do_nothing(self): class TestPage(unittest.TestCase): - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.iterator import Page return Page @@ -91,7 +92,8 @@ def item_to_value(self, item): class TestIterator(unittest.TestCase): - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.iterator import Iterator return Iterator @@ -233,7 +235,8 @@ def test__next_page_virtual(self): class TestHTTPIterator(unittest.TestCase): - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.iterator import HTTPIterator return HTTPIterator @@ -465,7 +468,8 @@ def test__get_next_page_bad_http_method(self): class TestGAXIterator(unittest.TestCase): - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.iterator import GAXIterator return GAXIterator diff --git a/packages/google-cloud-core/unit_tests/test_operation.py b/packages/google-cloud-core/unit_tests/test_operation.py index 7c204278e6b3..d2743f0cf18a 100644 --- a/packages/google-cloud-core/unit_tests/test_operation.py +++ b/packages/google-cloud-core/unit_tests/test_operation.py @@ -107,7 +107,8 @@ class TestOperation(unittest.TestCase): OPERATION_NAME = 'operations/projects/foo/instances/bar/operations/123' - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.operation import Operation return Operation From 7bef1093894a5d0dd892186d7f6191da75c25ce5 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 8 Nov 2016 20:22:12 -0800 Subject: [PATCH 086/468] Changing uses of _getTargetClass to _get_target_class. Done via: $ git grep -l _getTargetClass | \ > xargs sed -i s/_getTargetClass/_get_target_class/g --- .../streaming/test_buffered_stream.py | 2 +- .../unit_tests/streaming/test_exceptions.py | 8 +++--- .../unit_tests/streaming/test_http_wrapper.py | 6 ++-- .../unit_tests/streaming/test_stream_slice.py | 2 +- .../unit_tests/streaming/test_transfer.py | 28 +++++++++---------- .../unit_tests/test__helpers.py | 8 +++--- .../unit_tests/test_client.py | 22 +++++++-------- .../unit_tests/test_connection.py | 16 +++++------ .../unit_tests/test_exceptions.py | 2 +- .../unit_tests/test_iterator.py | 8 +++--- .../unit_tests/test_operation.py | 10 +++---- 11 files changed, 56 insertions(+), 56 deletions(-) diff --git a/packages/google-cloud-core/unit_tests/streaming/test_buffered_stream.py b/packages/google-cloud-core/unit_tests/streaming/test_buffered_stream.py index b93e5d05b827..64b2066d7e3f 100644 --- a/packages/google-cloud-core/unit_tests/streaming/test_buffered_stream.py +++ b/packages/google-cloud-core/unit_tests/streaming/test_buffered_stream.py @@ -23,7 +23,7 @@ def _get_target_class(): return BufferedStream def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_ctor_closed_stream(self): class _Stream(object): diff --git a/packages/google-cloud-core/unit_tests/streaming/test_exceptions.py b/packages/google-cloud-core/unit_tests/streaming/test_exceptions.py index 89745a5d12c7..242490966ba2 100644 --- a/packages/google-cloud-core/unit_tests/streaming/test_exceptions.py +++ b/packages/google-cloud-core/unit_tests/streaming/test_exceptions.py @@ -23,7 +23,7 @@ def _get_target_class(): return HttpError def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_ctor(self): RESPONSE = {'status': '404'} @@ -49,7 +49,7 @@ class _Response(object): content = CONTENT request_url = URL - klass = self._getTargetClass() + klass = self._get_target_class() exception = klass.from_response(_Response()) self.assertIsInstance(exception, klass) self.assertEqual(exception.response, RESPONSE) @@ -65,7 +65,7 @@ def _get_target_class(): return RetryAfterError def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_ctor(self): RESPONSE = {'status': '404'} @@ -94,7 +94,7 @@ class _Response(object): request_url = URL retry_after = RETRY_AFTER - klass = self._getTargetClass() + klass = self._get_target_class() exception = klass.from_response(_Response()) self.assertIsInstance(exception, klass) self.assertEqual(exception.response, RESPONSE) diff --git a/packages/google-cloud-core/unit_tests/streaming/test_http_wrapper.py b/packages/google-cloud-core/unit_tests/streaming/test_http_wrapper.py index 560984db49cf..e430636fe7f1 100644 --- a/packages/google-cloud-core/unit_tests/streaming/test_http_wrapper.py +++ b/packages/google-cloud-core/unit_tests/streaming/test_http_wrapper.py @@ -23,7 +23,7 @@ def _get_target_class(): return _httplib2_debug_level def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_wo_loggable_body_wo_http(self): from google.cloud._testing import _Monkey @@ -83,7 +83,7 @@ def _get_target_class(): return Request def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): request = self._makeOne() @@ -124,7 +124,7 @@ def _get_target_class(): return Response def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_ctor(self): CONTENT = 'CONTENT' diff --git a/packages/google-cloud-core/unit_tests/streaming/test_stream_slice.py b/packages/google-cloud-core/unit_tests/streaming/test_stream_slice.py index 9db3b76d6d01..0a5376abad4e 100644 --- a/packages/google-cloud-core/unit_tests/streaming/test_stream_slice.py +++ b/packages/google-cloud-core/unit_tests/streaming/test_stream_slice.py @@ -23,7 +23,7 @@ def _get_target_class(): return StreamSlice def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_ctor(self): from io import BytesIO diff --git a/packages/google-cloud-core/unit_tests/streaming/test_transfer.py b/packages/google-cloud-core/unit_tests/streaming/test_transfer.py index e765e359eb6d..86b48fbf01ef 100644 --- a/packages/google-cloud-core/unit_tests/streaming/test_transfer.py +++ b/packages/google-cloud-core/unit_tests/streaming/test_transfer.py @@ -24,7 +24,7 @@ def _get_target_class(): return _Transfer def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): from google.cloud.streaming.transfer import _DEFAULT_CHUNKSIZE @@ -169,7 +169,7 @@ def _get_target_class(): return Download def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): stream = _Stream() @@ -196,7 +196,7 @@ def test_ctor_w_total_size(self): def test_from_file_w_existing_file_no_override(self): import os - klass = self._getTargetClass() + klass = self._get_target_class() with _tempdir() as tempdir: filename = os.path.join(tempdir, 'file.out') with open(filename, 'w') as fileobj: @@ -206,7 +206,7 @@ def test_from_file_w_existing_file_no_override(self): def test_from_file_w_existing_file_w_override_wo_auto_transfer(self): import os - klass = self._getTargetClass() + klass = self._get_target_class() with _tempdir() as tempdir: filename = os.path.join(tempdir, 'file.out') with open(filename, 'w') as fileobj: @@ -220,7 +220,7 @@ def test_from_file_w_existing_file_w_override_wo_auto_transfer(self): def test_from_stream_defaults(self): stream = _Stream() - klass = self._getTargetClass() + klass = self._get_target_class() download = klass.from_stream(stream) self.assertIs(download.stream, stream) self.assertTrue(download.auto_transfer) @@ -230,7 +230,7 @@ def test_from_stream_explicit(self): CHUNK_SIZE = 1 << 18 SIZE = 123 stream = _Stream() - klass = self._getTargetClass() + klass = self._get_target_class() download = klass.from_stream(stream, auto_transfer=False, total_size=SIZE, chunksize=CHUNK_SIZE) self.assertIs(download.stream, stream) @@ -805,7 +805,7 @@ def _get_target_class(): return Upload def _makeOne(self, stream, mime_type=MIME_TYPE, *args, **kw): - return self._getTargetClass()(stream, mime_type, *args, **kw) + return self._get_target_class()(stream, mime_type, *args, **kw) def test_ctor_defaults(self): from google.cloud.streaming.transfer import _DEFAULT_CHUNKSIZE @@ -830,14 +830,14 @@ def test_ctor_w_kwds(self): self.assertEqual(upload.chunksize, CHUNK_SIZE) def test_from_file_w_nonesuch_file(self): - klass = self._getTargetClass() + klass = self._get_target_class() filename = '~nosuchuser/file.txt' with self.assertRaises(OSError): klass.from_file(filename) def test_from_file_wo_mimetype_w_unguessable_filename(self): import os - klass = self._getTargetClass() + klass = self._get_target_class() CONTENT = b'EXISTING FILE W/ UNGUESSABLE MIMETYPE' with _tempdir() as tempdir: filename = os.path.join(tempdir, 'file.unguessable') @@ -848,7 +848,7 @@ def test_from_file_wo_mimetype_w_unguessable_filename(self): def test_from_file_wo_mimetype_w_guessable_filename(self): import os - klass = self._getTargetClass() + klass = self._get_target_class() CONTENT = b'EXISTING FILE W/ GUESSABLE MIMETYPE' with _tempdir() as tempdir: filename = os.path.join(tempdir, 'file.txt') @@ -862,7 +862,7 @@ def test_from_file_wo_mimetype_w_guessable_filename(self): def test_from_file_w_mimetype_w_auto_transfer_w_kwds(self): import os - klass = self._getTargetClass() + klass = self._get_target_class() CONTENT = b'EXISTING FILE W/ GUESSABLE MIMETYPE' CHUNK_SIZE = 3 with _tempdir() as tempdir: @@ -881,13 +881,13 @@ def test_from_file_w_mimetype_w_auto_transfer_w_kwds(self): upload._stream.close() def test_from_stream_wo_mimetype(self): - klass = self._getTargetClass() + klass = self._get_target_class() stream = _Stream() with self.assertRaises(ValueError): klass.from_stream(stream, mime_type=None) def test_from_stream_defaults(self): - klass = self._getTargetClass() + klass = self._get_target_class() stream = _Stream() upload = klass.from_stream(stream, mime_type=self.MIME_TYPE) self.assertEqual(upload.mime_type, self.MIME_TYPE) @@ -895,7 +895,7 @@ def test_from_stream_defaults(self): self.assertIsNone(upload.total_size) def test_from_stream_explicit(self): - klass = self._getTargetClass() + klass = self._get_target_class() stream = _Stream() SIZE = 10 CHUNK_SIZE = 3 diff --git a/packages/google-cloud-core/unit_tests/test__helpers.py b/packages/google-cloud-core/unit_tests/test__helpers.py index 786cf1e7e7e8..fc64c1eb547d 100644 --- a/packages/google-cloud-core/unit_tests/test__helpers.py +++ b/packages/google-cloud-core/unit_tests/test__helpers.py @@ -25,7 +25,7 @@ def _get_target_class(): return _LocalStack def _makeOne(self): - return self._getTargetClass()() + return self._get_target_class()() def test_it(self): batch1, batch2 = object(), object() @@ -53,11 +53,11 @@ def _get_target_class(): return _UTC def _makeOne(self): - return self._getTargetClass()() + return self._get_target_class()() def test_module_property(self): from google.cloud import _helpers as MUT - klass = self._getTargetClass() + klass = self._get_target_class() try: import pytz except ImportError: @@ -913,7 +913,7 @@ def _get_target_class(): return MetadataPlugin def _makeOne(self, *args, **kwargs): - return self._getTargetClass()(*args, **kwargs) + return self._get_target_class()(*args, **kwargs) def test_constructor(self): credentials = object() diff --git a/packages/google-cloud-core/unit_tests/test_client.py b/packages/google-cloud-core/unit_tests/test_client.py index f5927b3c0032..6dc92b4f625f 100644 --- a/packages/google-cloud-core/unit_tests/test_client.py +++ b/packages/google-cloud-core/unit_tests/test_client.py @@ -23,19 +23,19 @@ def _get_target_class(): return _ClientFactoryMixin def test_virtual(self): - klass = self._getTargetClass() + klass = self._get_target_class() self.assertFalse('__init__' in klass.__dict__) class TestClient(unittest.TestCase): def setUp(self): - KLASS = self._getTargetClass() + KLASS = self._get_target_class() self.original_cnxn_class = KLASS._connection_class KLASS._connection_class = _MockConnection def tearDown(self): - KLASS = self._getTargetClass() + KLASS = self._get_target_class() KLASS._connection_class = self.original_cnxn_class @staticmethod @@ -44,7 +44,7 @@ def _get_target_class(): return Client def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): from google.cloud._testing import _Monkey @@ -77,7 +77,7 @@ def test_from_service_account_json(self): from google.cloud._testing import _Monkey from google.cloud import client - KLASS = self._getTargetClass() + KLASS = self._get_target_class() MOCK_FILENAME = 'foo.path' mock_creds = _MockServiceAccountCredentials() with _Monkey(client, ServiceAccountCredentials=mock_creds): @@ -87,7 +87,7 @@ def test_from_service_account_json(self): self.assertEqual(mock_creds.json_called, [MOCK_FILENAME]) def test_from_service_account_json_fail(self): - KLASS = self._getTargetClass() + KLASS = self._get_target_class() CREDENTIALS = object() self.assertRaises(TypeError, KLASS.from_service_account_json, None, credentials=CREDENTIALS) @@ -96,7 +96,7 @@ def test_from_service_account_p12(self): from google.cloud._testing import _Monkey from google.cloud import client - KLASS = self._getTargetClass() + KLASS = self._get_target_class() CLIENT_EMAIL = 'phred@example.com' MOCK_FILENAME = 'foo.path' mock_creds = _MockServiceAccountCredentials() @@ -109,7 +109,7 @@ def test_from_service_account_p12(self): [(CLIENT_EMAIL, MOCK_FILENAME)]) def test_from_service_account_p12_fail(self): - KLASS = self._getTargetClass() + KLASS = self._get_target_class() CREDENTIALS = object() self.assertRaises(TypeError, KLASS.from_service_account_p12, None, None, credentials=CREDENTIALS) @@ -118,12 +118,12 @@ def test_from_service_account_p12_fail(self): class TestJSONClient(unittest.TestCase): def setUp(self): - KLASS = self._getTargetClass() + KLASS = self._get_target_class() self.original_cnxn_class = KLASS._connection_class KLASS._connection_class = _MockConnection def tearDown(self): - KLASS = self._getTargetClass() + KLASS = self._get_target_class() KLASS._connection_class = self.original_cnxn_class @staticmethod @@ -132,7 +132,7 @@ def _get_target_class(): return JSONClient def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): from google.cloud._testing import _Monkey diff --git a/packages/google-cloud-core/unit_tests/test_connection.py b/packages/google-cloud-core/unit_tests/test_connection.py index 55cc80650c82..3ecbcf8d71f6 100644 --- a/packages/google-cloud-core/unit_tests/test_connection.py +++ b/packages/google-cloud-core/unit_tests/test_connection.py @@ -23,7 +23,7 @@ def _get_target_class(): return Connection def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): conn = self._makeOne() @@ -76,7 +76,7 @@ def test_user_agent_format(self): self.assertEqual(conn.USER_AGENT, expected_ua) def test__create_scoped_credentials_with_scoped_credentials(self): - klass = self._getTargetClass() + klass = self._get_target_class() scoped_creds = object() scope = 'google-specific-scope' credentials = _Credentials(scoped=scoped_creds) @@ -87,7 +87,7 @@ def test__create_scoped_credentials_with_scoped_credentials(self): self.assertEqual(credentials._scopes, [scope]) def test__create_scoped_credentials_without_scope_required(self): - klass = self._getTargetClass() + klass = self._get_target_class() credentials = _Credentials() result = klass._create_scoped_credentials(credentials, None) @@ -96,13 +96,13 @@ def test__create_scoped_credentials_without_scope_required(self): self.assertEqual(credentials._scopes, []) def test__create_scoped_credentials_non_scoped_credentials(self): - klass = self._getTargetClass() + klass = self._get_target_class() credentials = object() result = klass._create_scoped_credentials(credentials, None) self.assertIs(result, credentials) def test__create_scoped_credentials_no_credentials(self): - klass = self._getTargetClass() + klass = self._get_target_class() result = klass._create_scoped_credentials(None, None) self.assertIsNone(result) @@ -115,17 +115,17 @@ def _get_target_class(): return JSONConnection def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def _makeMockOne(self, *args, **kw): - class MockConnection(self._getTargetClass()): + class MockConnection(self._get_target_class()): API_URL_TEMPLATE = '{api_base_url}/mock/{api_version}{path}' API_BASE_URL = 'http://mock' API_VERSION = 'vMOCK' return MockConnection(*args, **kw) def test_class_defaults(self): - klass = self._getTargetClass() + klass = self._get_target_class() self.assertIsNone(klass.API_URL_TEMPLATE) self.assertIsNone(klass.API_BASE_URL) self.assertIsNone(klass.API_VERSION) diff --git a/packages/google-cloud-core/unit_tests/test_exceptions.py b/packages/google-cloud-core/unit_tests/test_exceptions.py index 2130d795fe50..5b145bb02a5f 100644 --- a/packages/google-cloud-core/unit_tests/test_exceptions.py +++ b/packages/google-cloud-core/unit_tests/test_exceptions.py @@ -23,7 +23,7 @@ def _get_target_class(): return GoogleCloudError def _makeOne(self, message, errors=()): - return self._getTargetClass()(message, errors=errors) + return self._get_target_class()(message, errors=errors) def test_ctor_defaults(self): e = self._makeOne('Testing') diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index b3c2d834ff00..cd3dcad5d370 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -34,7 +34,7 @@ def _get_target_class(): return Page def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_constructor(self): parent = object() @@ -98,7 +98,7 @@ def _get_target_class(): return Iterator def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_constructor(self): connection = _Connection() @@ -241,7 +241,7 @@ def _get_target_class(): return HTTPIterator def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_constructor(self): from google.cloud.iterator import _do_nothing_page_start @@ -474,7 +474,7 @@ def _get_target_class(): return GAXIterator def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_constructor(self): client = _Client(None) diff --git a/packages/google-cloud-core/unit_tests/test_operation.py b/packages/google-cloud-core/unit_tests/test_operation.py index d2743f0cf18a..32e791b3798a 100644 --- a/packages/google-cloud-core/unit_tests/test_operation.py +++ b/packages/google-cloud-core/unit_tests/test_operation.py @@ -113,7 +113,7 @@ def _get_target_class(): return Operation def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): client = _Client() @@ -146,7 +146,7 @@ def test_from_pb_wo_metadata_or_kw(self): from google.longrunning import operations_pb2 client = _Client() operation_pb = operations_pb2.Operation(name=self.OPERATION_NAME) - klass = self._getTargetClass() + klass = self._get_target_class() operation = klass.from_pb(operation_pb, client) @@ -169,7 +169,7 @@ def test_from_pb_w_unknown_metadata(self): metadata_pb = Any(type_url=type_url, value=meta.SerializeToString()) operation_pb = operations_pb2.Operation( name=self.OPERATION_NAME, metadata=metadata_pb) - klass = self._getTargetClass() + klass = self._get_target_class() with _Monkey(MUT, _TYPE_URL_MAP={type_url: Struct}): operation = klass.from_pb(operation_pb, client) @@ -195,7 +195,7 @@ def test_from_pb_w_metadata_and_kwargs(self): metadata_pb = Any(type_url=type_url, value=meta.SerializeToString()) operation_pb = operations_pb2.Operation( name=self.OPERATION_NAME, metadata=metadata_pb) - klass = self._getTargetClass() + klass = self._get_target_class() with _Monkey(MUT, _TYPE_URL_MAP=type_url_map): operation = klass.from_pb(operation_pb, client, baz='qux') @@ -220,7 +220,7 @@ def test_from_dict(self): } client = _Client() - klass = self._getTargetClass() + klass = self._get_target_class() with _Monkey(MUT, _TYPE_URL_MAP={type_url: Struct}): operation = klass.from_dict(api_response, client) From 38403e69a59677f036459eccce8cb844cf264520 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 10 Nov 2016 11:05:35 -0800 Subject: [PATCH 087/468] Changing all instances of _makeOne to _make_one. Done via: $ git grep -l _makeOne | \ > xargs sed -i s/_makeOne/_make_one/g --- .../streaming/test_buffered_stream.py | 20 +- .../unit_tests/streaming/test_exceptions.py | 8 +- .../unit_tests/streaming/test_http_wrapper.py | 36 +-- .../unit_tests/streaming/test_stream_slice.py | 14 +- .../unit_tests/streaming/test_transfer.py | 248 +++++++++--------- .../unit_tests/test__helpers.py | 24 +- .../unit_tests/test_client.py | 16 +- .../unit_tests/test_connection.py | 36 +-- .../unit_tests/test_exceptions.py | 6 +- .../unit_tests/test_iterator.py | 78 +++--- .../unit_tests/test_operation.py | 26 +- 11 files changed, 256 insertions(+), 256 deletions(-) diff --git a/packages/google-cloud-core/unit_tests/streaming/test_buffered_stream.py b/packages/google-cloud-core/unit_tests/streaming/test_buffered_stream.py index 64b2066d7e3f..8a8793b0c49c 100644 --- a/packages/google-cloud-core/unit_tests/streaming/test_buffered_stream.py +++ b/packages/google-cloud-core/unit_tests/streaming/test_buffered_stream.py @@ -22,7 +22,7 @@ def _get_target_class(): from google.cloud.streaming.buffered_stream import BufferedStream return BufferedStream - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor_closed_stream(self): @@ -31,7 +31,7 @@ class _Stream(object): start = 0 bufsize = 4 - bufstream = self._makeOne(_Stream, start, bufsize) + bufstream = self._make_one(_Stream, start, bufsize) self.assertIs(bufstream._stream, _Stream) self.assertEqual(bufstream._start_pos, start) self.assertEqual(bufstream._buffer_pos, 0) @@ -45,7 +45,7 @@ def test_ctor_start_zero_longer_than_buffer(self): START = 0 BUFSIZE = 4 stream = BytesIO(CONTENT) - bufstream = self._makeOne(stream, START, BUFSIZE) + bufstream = self._make_one(stream, START, BUFSIZE) self.assertIs(bufstream._stream, stream) self.assertEqual(bufstream._start_pos, START) self.assertEqual(bufstream._buffer_pos, 0) @@ -61,7 +61,7 @@ def test_ctor_start_nonzero_shorter_than_buffer(self): BUFSIZE = 10 stream = BytesIO(CONTENT) stream.read(START) # already consumed - bufstream = self._makeOne(stream, START, BUFSIZE) + bufstream = self._make_one(stream, START, BUFSIZE) self.assertIs(bufstream._stream, stream) self.assertEqual(bufstream._start_pos, START) self.assertEqual(bufstream._buffer_pos, 0) @@ -76,7 +76,7 @@ def test__bytes_remaining_start_zero_longer_than_buffer(self): START = 0 BUFSIZE = 4 stream = BytesIO(CONTENT) - bufstream = self._makeOne(stream, START, BUFSIZE) + bufstream = self._make_one(stream, START, BUFSIZE) self.assertEqual(bufstream._bytes_remaining, BUFSIZE) def test__bytes_remaining_start_zero_shorter_than_buffer(self): @@ -86,7 +86,7 @@ def test__bytes_remaining_start_zero_shorter_than_buffer(self): BUFSIZE = 10 stream = BytesIO(CONTENT) stream.read(START) # already consumed - bufstream = self._makeOne(stream, START, BUFSIZE) + bufstream = self._make_one(stream, START, BUFSIZE) self.assertEqual(bufstream._bytes_remaining, len(CONTENT) - START) def test_read_w_none(self): @@ -95,7 +95,7 @@ def test_read_w_none(self): START = 0 BUFSIZE = 4 stream = BytesIO(CONTENT) - bufstream = self._makeOne(stream, START, BUFSIZE) + bufstream = self._make_one(stream, START, BUFSIZE) with self.assertRaises(ValueError): bufstream.read(None) @@ -105,7 +105,7 @@ def test_read_w_negative_size(self): START = 0 BUFSIZE = 4 stream = BytesIO(CONTENT) - bufstream = self._makeOne(stream, START, BUFSIZE) + bufstream = self._make_one(stream, START, BUFSIZE) with self.assertRaises(ValueError): bufstream.read(-2) @@ -115,7 +115,7 @@ def test_read_from_start(self): START = 0 BUFSIZE = 4 stream = BytesIO(CONTENT) - bufstream = self._makeOne(stream, START, BUFSIZE) + bufstream = self._make_one(stream, START, BUFSIZE) self.assertEqual(bufstream.read(4), CONTENT[:4]) def test_read_exhausted(self): @@ -125,7 +125,7 @@ def test_read_exhausted(self): BUFSIZE = 10 stream = BytesIO(CONTENT) stream.read(START) # already consumed - bufstream = self._makeOne(stream, START, BUFSIZE) + bufstream = self._make_one(stream, START, BUFSIZE) self.assertTrue(bufstream.stream_exhausted) self.assertEqual(bufstream.stream_end_position, len(CONTENT)) self.assertEqual(bufstream._bytes_remaining, 0) diff --git a/packages/google-cloud-core/unit_tests/streaming/test_exceptions.py b/packages/google-cloud-core/unit_tests/streaming/test_exceptions.py index 242490966ba2..0cb1c724bf99 100644 --- a/packages/google-cloud-core/unit_tests/streaming/test_exceptions.py +++ b/packages/google-cloud-core/unit_tests/streaming/test_exceptions.py @@ -22,14 +22,14 @@ def _get_target_class(): from google.cloud.streaming.exceptions import HttpError return HttpError - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor(self): RESPONSE = {'status': '404'} CONTENT = b'CONTENT' URL = 'http://www.example.com' - exception = self._makeOne(RESPONSE, CONTENT, URL) + exception = self._make_one(RESPONSE, CONTENT, URL) self.assertEqual(exception.response, RESPONSE) self.assertEqual(exception.content, CONTENT) self.assertEqual(exception.url, URL) @@ -64,7 +64,7 @@ def _get_target_class(): from google.cloud.streaming.exceptions import RetryAfterError return RetryAfterError - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor(self): @@ -72,7 +72,7 @@ def test_ctor(self): CONTENT = b'CONTENT' URL = 'http://www.example.com' RETRY_AFTER = 60 - exception = self._makeOne(RESPONSE, CONTENT, URL, RETRY_AFTER) + exception = self._make_one(RESPONSE, CONTENT, URL, RETRY_AFTER) self.assertEqual(exception.response, RESPONSE) self.assertEqual(exception.content, CONTENT) self.assertEqual(exception.url, URL) diff --git a/packages/google-cloud-core/unit_tests/streaming/test_http_wrapper.py b/packages/google-cloud-core/unit_tests/streaming/test_http_wrapper.py index e430636fe7f1..c0608f5476a0 100644 --- a/packages/google-cloud-core/unit_tests/streaming/test_http_wrapper.py +++ b/packages/google-cloud-core/unit_tests/streaming/test_http_wrapper.py @@ -22,7 +22,7 @@ def _get_target_class(): from google.cloud.streaming.http_wrapper import _httplib2_debug_level return _httplib2_debug_level - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_wo_loggable_body_wo_http(self): @@ -33,7 +33,7 @@ def test_wo_loggable_body_wo_http(self): LEVEL = 1 _httplib2 = _Dummy(debuglevel=0) with _Monkey(MUT, httplib2=_httplib2): - with self._makeOne(request, LEVEL): + with self._make_one(request, LEVEL): self.assertEqual(_httplib2.debuglevel, 0) def test_w_loggable_body_wo_http(self): @@ -44,7 +44,7 @@ def test_w_loggable_body_wo_http(self): LEVEL = 1 _httplib2 = _Dummy(debuglevel=0) with _Monkey(MUT, httplib2=_httplib2): - with self._makeOne(request, LEVEL): + with self._make_one(request, LEVEL): self.assertEqual(_httplib2.debuglevel, LEVEL) self.assertEqual(_httplib2.debuglevel, 0) @@ -66,7 +66,7 @@ def set_debuglevel(self, value): connections = {'update:me': update_me, 'skip_me': skip_me} _http = _Dummy(connections=connections) with _Monkey(MUT, httplib2=_httplib2): - with self._makeOne(request, LEVEL, _http): + with self._make_one(request, LEVEL, _http): self.assertEqual(_httplib2.debuglevel, LEVEL) self.assertEqual(update_me.debuglevel, LEVEL) self.assertEqual(skip_me.debuglevel, 0) @@ -82,11 +82,11 @@ def _get_target_class(): from google.cloud.streaming.http_wrapper import Request return Request - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): - request = self._makeOne() + request = self._make_one() self.assertEqual(request.url, '') self.assertEqual(request.http_method, 'GET') self.assertEqual(request.headers, {'content-length': '0'}) @@ -95,12 +95,12 @@ def test_ctor_defaults(self): def test_loggable_body_setter_w_body_None(self): from google.cloud.streaming.exceptions import RequestError - request = self._makeOne(body=None) + request = self._make_one(body=None) with self.assertRaises(RequestError): request.loggable_body = 'abc' def test_body_setter_w_None(self): - request = self._makeOne() + request = self._make_one() request.loggable_body = 'abc' request.body = None self.assertEqual(request.headers, {}) @@ -108,7 +108,7 @@ def test_body_setter_w_None(self): self.assertEqual(request.loggable_body, 'abc') def test_body_setter_w_non_string(self): - request = self._makeOne() + request = self._make_one() request.loggable_body = 'abc' request.body = body = _Dummy(length=123) self.assertEqual(request.headers, {'content-length': '123'}) @@ -123,14 +123,14 @@ def _get_target_class(): from google.cloud.streaming.http_wrapper import Response return Response - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor(self): CONTENT = 'CONTENT' URL = 'http://example.com/api' info = {'status': '200'} - response = self._makeOne(info, CONTENT, URL) + response = self._make_one(info, CONTENT, URL) self.assertEqual(len(response), len(CONTENT)) self.assertEqual(response.status_code, 200) self.assertIsNone(response.retry_after) @@ -146,7 +146,7 @@ def test_length_w_content_encoding_w_content_range(self): 'content-encoding': 'testing', 'content-range': RANGE, } - response = self._makeOne(info, CONTENT, URL) + response = self._make_one(info, CONTENT, URL) self.assertEqual(len(response), 123) def test_length_w_content_encoding_wo_content_range(self): @@ -157,7 +157,7 @@ def test_length_w_content_encoding_wo_content_range(self): 'content-length': len(CONTENT), 'content-encoding': 'testing', } - response = self._makeOne(info, CONTENT, URL) + response = self._make_one(info, CONTENT, URL) self.assertEqual(len(response), len(CONTENT)) def test_length_w_content_length_w_content_range(self): @@ -169,7 +169,7 @@ def test_length_w_content_length_w_content_range(self): 'content-length': len(CONTENT) * 2, 'content-range': RANGE, } - response = self._makeOne(info, CONTENT, URL) + response = self._make_one(info, CONTENT, URL) self.assertEqual(len(response), len(CONTENT) * 2) def test_length_wo_content_length_w_content_range(self): @@ -180,7 +180,7 @@ def test_length_wo_content_length_w_content_range(self): 'status': '200', 'content-range': RANGE, } - response = self._makeOne(info, CONTENT, URL) + response = self._make_one(info, CONTENT, URL) self.assertEqual(len(response), 123) def test_retry_after_w_header(self): @@ -190,7 +190,7 @@ def test_retry_after_w_header(self): 'status': '200', 'retry-after': '123', } - response = self._makeOne(info, CONTENT, URL) + response = self._make_one(info, CONTENT, URL) self.assertEqual(response.retry_after, 123) def test_is_redirect_w_code_wo_location(self): @@ -199,7 +199,7 @@ def test_is_redirect_w_code_wo_location(self): info = { 'status': '301', } - response = self._makeOne(info, CONTENT, URL) + response = self._make_one(info, CONTENT, URL) self.assertFalse(response.is_redirect) def test_is_redirect_w_code_w_location(self): @@ -209,7 +209,7 @@ def test_is_redirect_w_code_w_location(self): 'status': '301', 'location': 'http://example.com/other', } - response = self._makeOne(info, CONTENT, URL) + response = self._make_one(info, CONTENT, URL) self.assertTrue(response.is_redirect) diff --git a/packages/google-cloud-core/unit_tests/streaming/test_stream_slice.py b/packages/google-cloud-core/unit_tests/streaming/test_stream_slice.py index 0a5376abad4e..c0c5ff375a96 100644 --- a/packages/google-cloud-core/unit_tests/streaming/test_stream_slice.py +++ b/packages/google-cloud-core/unit_tests/streaming/test_stream_slice.py @@ -22,7 +22,7 @@ def _get_target_class(): from google.cloud.streaming.stream_slice import StreamSlice return StreamSlice - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor(self): @@ -30,7 +30,7 @@ def test_ctor(self): CONTENT = b'CONTENT GOES HERE' MAXSIZE = 4 stream = BytesIO(CONTENT) - stream_slice = self._makeOne(stream, MAXSIZE) + stream_slice = self._make_one(stream, MAXSIZE) self.assertIs(stream_slice._stream, stream) self.assertEqual(stream_slice._remaining_bytes, MAXSIZE) self.assertEqual(stream_slice._max_bytes, MAXSIZE) @@ -42,7 +42,7 @@ def test___nonzero___empty(self): CONTENT = b'' MAXSIZE = 0 stream = BytesIO(CONTENT) - stream_slice = self._makeOne(stream, MAXSIZE) + stream_slice = self._make_one(stream, MAXSIZE) self.assertFalse(stream_slice) def test___nonzero___nonempty(self): @@ -50,7 +50,7 @@ def test___nonzero___nonempty(self): CONTENT = b'CONTENT GOES HERE' MAXSIZE = 4 stream = BytesIO(CONTENT) - stream_slice = self._makeOne(stream, MAXSIZE) + stream_slice = self._make_one(stream, MAXSIZE) self.assertTrue(stream_slice) def test_read_exhausted(self): @@ -59,7 +59,7 @@ def test_read_exhausted(self): CONTENT = b'' MAXSIZE = 4 stream = BytesIO(CONTENT) - stream_slice = self._makeOne(stream, MAXSIZE) + stream_slice = self._make_one(stream, MAXSIZE) with self.assertRaises(http_client.IncompleteRead): stream_slice.read() @@ -68,7 +68,7 @@ def test_read_implicit_size(self): CONTENT = b'CONTENT GOES HERE' MAXSIZE = 4 stream = BytesIO(CONTENT) - stream_slice = self._makeOne(stream, MAXSIZE) + stream_slice = self._make_one(stream, MAXSIZE) self.assertEqual(stream_slice.read(), CONTENT[:MAXSIZE]) self.assertEqual(stream_slice._remaining_bytes, 0) @@ -78,6 +78,6 @@ def test_read_explicit_size(self): MAXSIZE = 4 SIZE = 3 stream = BytesIO(CONTENT) - stream_slice = self._makeOne(stream, MAXSIZE) + stream_slice = self._make_one(stream, MAXSIZE) self.assertEqual(stream_slice.read(SIZE), CONTENT[:SIZE]) self.assertEqual(stream_slice._remaining_bytes, MAXSIZE - SIZE) diff --git a/packages/google-cloud-core/unit_tests/streaming/test_transfer.py b/packages/google-cloud-core/unit_tests/streaming/test_transfer.py index 86b48fbf01ef..9f2fb6764cec 100644 --- a/packages/google-cloud-core/unit_tests/streaming/test_transfer.py +++ b/packages/google-cloud-core/unit_tests/streaming/test_transfer.py @@ -23,13 +23,13 @@ def _get_target_class(): from google.cloud.streaming.transfer import _Transfer return _Transfer - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): from google.cloud.streaming.transfer import _DEFAULT_CHUNKSIZE stream = _Stream() - xfer = self._makeOne(stream) + xfer = self._make_one(stream) self.assertIs(xfer.stream, stream) self.assertFalse(xfer.close_stream) self.assertEqual(xfer.chunksize, _DEFAULT_CHUNKSIZE) @@ -45,7 +45,7 @@ def test_ctor_explicit(self): HTTP = object() CHUNK_SIZE = 1 << 18 NUM_RETRIES = 8 - xfer = self._makeOne(stream, + xfer = self._make_one(stream, close_stream=True, chunksize=CHUNK_SIZE, auto_transfer=False, @@ -62,33 +62,33 @@ def test_ctor_explicit(self): def test_bytes_http_fallback_to_http(self): stream = _Stream() HTTP = object() - xfer = self._makeOne(stream, http=HTTP) + xfer = self._make_one(stream, http=HTTP) self.assertIs(xfer.bytes_http, HTTP) def test_bytes_http_setter(self): stream = _Stream() HTTP = object() BYTES_HTTP = object() - xfer = self._makeOne(stream, http=HTTP) + xfer = self._make_one(stream, http=HTTP) xfer.bytes_http = BYTES_HTTP self.assertIs(xfer.bytes_http, BYTES_HTTP) def test_num_retries_setter_invalid(self): stream = _Stream() - xfer = self._makeOne(stream) + xfer = self._make_one(stream) with self.assertRaises(ValueError): xfer.num_retries = object() def test_num_retries_setter_negative(self): stream = _Stream() - xfer = self._makeOne(stream) + xfer = self._make_one(stream) with self.assertRaises(ValueError): xfer.num_retries = -1 def test__initialize_not_already_initialized_w_http(self): HTTP = object() stream = _Stream() - xfer = self._makeOne(stream) + xfer = self._make_one(stream) xfer._initialize(HTTP, self.URL) self.assertTrue(xfer.initialized) self.assertIs(xfer.http, HTTP) @@ -97,7 +97,7 @@ def test__initialize_not_already_initialized_w_http(self): def test__initialize_not_already_initialized_wo_http(self): from httplib2 import Http stream = _Stream() - xfer = self._makeOne(stream) + xfer = self._make_one(stream) xfer._initialize(None, self.URL) self.assertTrue(xfer.initialized) self.assertIsInstance(xfer.http, Http) @@ -106,7 +106,7 @@ def test__initialize_not_already_initialized_wo_http(self): def test__initialize_w_existing_http(self): HTTP_1, HTTP_2 = object(), object() stream = _Stream() - xfer = self._makeOne(stream, http=HTTP_1) + xfer = self._make_one(stream, http=HTTP_1) xfer._initialize(HTTP_2, self.URL) self.assertTrue(xfer.initialized) self.assertIs(xfer.http, HTTP_1) @@ -117,7 +117,7 @@ def test__initialize_already_initialized(self): URL_2 = 'http://example.com/other' HTTP_1, HTTP_2 = object(), object() stream = _Stream() - xfer = self._makeOne(stream) + xfer = self._make_one(stream) xfer._initialize(HTTP_1, self.URL) with self.assertRaises(TransferInvalidError): xfer._initialize(HTTP_2, URL_2) @@ -125,27 +125,27 @@ def test__initialize_already_initialized(self): def test__ensure_initialized_hit(self): HTTP = object() stream = _Stream() - xfer = self._makeOne(stream) + xfer = self._make_one(stream) xfer._initialize(HTTP, self.URL) xfer._ensure_initialized() # no raise def test__ensure_initialized_miss(self): from google.cloud.streaming.exceptions import TransferInvalidError stream = _Stream() - xfer = self._makeOne(stream) + xfer = self._make_one(stream) with self.assertRaises(TransferInvalidError): xfer._ensure_initialized() def test__ensure_uninitialized_hit(self): stream = _Stream() - xfer = self._makeOne(stream) + xfer = self._make_one(stream) xfer._ensure_uninitialized() # no raise def test__ensure_uninitialized_miss(self): from google.cloud.streaming.exceptions import TransferInvalidError stream = _Stream() HTTP = object() - xfer = self._makeOne(stream) + xfer = self._make_one(stream) xfer._initialize(HTTP, self.URL) with self.assertRaises(TransferInvalidError): xfer._ensure_uninitialized() @@ -153,7 +153,7 @@ def test__ensure_uninitialized_miss(self): def test___del___closes_stream(self): stream = _Stream() - xfer = self._makeOne(stream, close_stream=True) + xfer = self._make_one(stream, close_stream=True) self.assertFalse(stream._closed) del xfer @@ -168,12 +168,12 @@ def _get_target_class(): from google.cloud.streaming.transfer import Download return Download - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): stream = _Stream() - download = self._makeOne(stream) + download = self._make_one(stream) self.assertIs(download.stream, stream) self.assertIsNone(download._initial_response) self.assertEqual(download.progress, 0) @@ -183,14 +183,14 @@ def test_ctor_defaults(self): def test_ctor_w_kwds(self): stream = _Stream() CHUNK_SIZE = 123 - download = self._makeOne(stream, chunksize=CHUNK_SIZE) + download = self._make_one(stream, chunksize=CHUNK_SIZE) self.assertIs(download.stream, stream) self.assertEqual(download.chunksize, CHUNK_SIZE) def test_ctor_w_total_size(self): stream = _Stream() SIZE = 123 - download = self._makeOne(stream, total_size=SIZE) + download = self._make_one(stream, total_size=SIZE) self.assertIs(download.stream, stream) self.assertEqual(download.total_size, SIZE) @@ -240,7 +240,7 @@ def test_from_stream_explicit(self): def test_configure_request(self): CHUNK_SIZE = 100 - download = self._makeOne(_Stream(), chunksize=CHUNK_SIZE) + download = self._make_one(_Stream(), chunksize=CHUNK_SIZE) request = _Dummy(headers={}) url_builder = _Dummy(query_params={}) download.configure_request(request, url_builder) @@ -249,34 +249,34 @@ def test_configure_request(self): def test__set_total_wo_content_range_wo_existing_total(self): info = {} - download = self._makeOne(_Stream()) + download = self._make_one(_Stream()) download._set_total(info) self.assertEqual(download.total_size, 0) def test__set_total_wo_content_range_w_existing_total(self): SIZE = 123 info = {} - download = self._makeOne(_Stream(), total_size=SIZE) + download = self._make_one(_Stream(), total_size=SIZE) download._set_total(info) self.assertEqual(download.total_size, SIZE) def test__set_total_w_content_range_w_existing_total(self): SIZE = 123 info = {'content-range': 'bytes 123-234/4567'} - download = self._makeOne(_Stream(), total_size=SIZE) + download = self._make_one(_Stream(), total_size=SIZE) download._set_total(info) self.assertEqual(download.total_size, 4567) def test__set_total_w_content_range_w_asterisk_total(self): info = {'content-range': 'bytes 123-234/*'} - download = self._makeOne(_Stream()) + download = self._make_one(_Stream()) download._set_total(info) self.assertEqual(download.total_size, 0) def test_initialize_download_already_initialized(self): from google.cloud.streaming.exceptions import TransferInvalidError request = _Request() - download = self._makeOne(_Stream()) + download = self._make_one(_Stream()) download._initialize(None, self.URL) with self.assertRaises(TransferInvalidError): download.initialize_download(request, http=object()) @@ -284,7 +284,7 @@ def test_initialize_download_already_initialized(self): def test_initialize_download_wo_autotransfer(self): request = _Request() http = object() - download = self._makeOne(_Stream(), auto_transfer=False) + download = self._make_one(_Stream(), auto_transfer=False) download.initialize_download(request, http) self.assertIs(download.http, http) self.assertEqual(download.url, request.url) @@ -296,7 +296,7 @@ def test_initialize_download_w_autotransfer_failing(self): from google.cloud.streaming.exceptions import HttpError request = _Request() http = object() - download = self._makeOne(_Stream(), auto_transfer=True) + download = self._make_one(_Stream(), auto_transfer=True) response = _makeResponse(http_client.BAD_REQUEST) requester = _MakeRequest(response) @@ -316,7 +316,7 @@ def test_initialize_download_w_autotransfer_w_content_location(self): request = _Request() http = object() info = {'content-location': REDIRECT_URL} - download = self._makeOne(_Stream(), auto_transfer=True) + download = self._make_one(_Stream(), auto_transfer=True) response = _makeResponse(http_client.NO_CONTENT, info) requester = _MakeRequest(response) @@ -333,14 +333,14 @@ def test_initialize_download_w_autotransfer_w_content_location(self): def test__normalize_start_end_w_end_w_start_lt_0(self): from google.cloud.streaming.exceptions import TransferInvalidError - download = self._makeOne(_Stream()) + download = self._make_one(_Stream()) with self.assertRaises(TransferInvalidError): download._normalize_start_end(-1, 0) def test__normalize_start_end_w_end_w_start_gt_total(self): from google.cloud.streaming.exceptions import TransferInvalidError - download = self._makeOne(_Stream()) + download = self._make_one(_Stream()) download._set_total({'content-range': 'bytes 0-1/2'}) with self.assertRaises(TransferInvalidError): @@ -348,65 +348,65 @@ def test__normalize_start_end_w_end_w_start_gt_total(self): def test__normalize_start_end_w_end_lt_start(self): from google.cloud.streaming.exceptions import TransferInvalidError - download = self._makeOne(_Stream()) + download = self._make_one(_Stream()) download._set_total({'content-range': 'bytes 0-1/2'}) with self.assertRaises(TransferInvalidError): download._normalize_start_end(1, 0) def test__normalize_start_end_w_end_gt_start(self): - download = self._makeOne(_Stream()) + download = self._make_one(_Stream()) download._set_total({'content-range': 'bytes 0-1/2'}) self.assertEqual(download._normalize_start_end(1, 2), (1, 1)) def test__normalize_start_end_wo_end_w_start_lt_0(self): - download = self._makeOne(_Stream()) + download = self._make_one(_Stream()) download._set_total({'content-range': 'bytes 0-1/2'}) self.assertEqual(download._normalize_start_end(-2), (0, 1)) self.assertEqual(download._normalize_start_end(-1), (1, 1)) def test__normalize_start_end_wo_end_w_start_ge_0(self): - download = self._makeOne(_Stream()) + download = self._make_one(_Stream()) download._set_total({'content-range': 'bytes 0-1/100'}) self.assertEqual(download._normalize_start_end(0), (0, 99)) self.assertEqual(download._normalize_start_end(1), (1, 99)) def test__set_range_header_w_start_lt_0(self): request = _Request() - download = self._makeOne(_Stream()) + download = self._make_one(_Stream()) download._set_range_header(request, -1) self.assertEqual(request.headers['range'], 'bytes=-1') def test__set_range_header_w_start_ge_0_wo_end(self): request = _Request() - download = self._makeOne(_Stream()) + download = self._make_one(_Stream()) download._set_range_header(request, 0) self.assertEqual(request.headers['range'], 'bytes=0-') def test__set_range_header_w_start_ge_0_w_end(self): request = _Request() - download = self._makeOne(_Stream()) + download = self._make_one(_Stream()) download._set_range_header(request, 0, 1) self.assertEqual(request.headers['range'], 'bytes=0-1') def test__compute_end_byte_w_start_lt_0_w_end(self): - download = self._makeOne(_Stream()) + download = self._make_one(_Stream()) self.assertEqual(download._compute_end_byte(-1, 1), 1) def test__compute_end_byte_w_start_ge_0_wo_end_w_use_chunks(self): CHUNK_SIZE = 5 - download = self._makeOne(_Stream(), chunksize=CHUNK_SIZE) + download = self._make_one(_Stream(), chunksize=CHUNK_SIZE) self.assertEqual(download._compute_end_byte(0, use_chunks=True), 4) def test__compute_end_byte_w_start_ge_0_w_end_w_use_chunks(self): CHUNK_SIZE = 5 - download = self._makeOne(_Stream(), chunksize=CHUNK_SIZE) + download = self._make_one(_Stream(), chunksize=CHUNK_SIZE) self.assertEqual(download._compute_end_byte(0, 3, use_chunks=True), 3) self.assertEqual(download._compute_end_byte(0, 5, use_chunks=True), 4) def test__compute_end_byte_w_start_ge_0_w_end_w_total_size(self): CHUNK_SIZE = 50 - download = self._makeOne(_Stream(), chunksize=CHUNK_SIZE) + download = self._make_one(_Stream(), chunksize=CHUNK_SIZE) download._set_total({'content-range': 'bytes 0-1/10'}) self.assertEqual(download._compute_end_byte(0, 100, use_chunks=False), 9) @@ -414,13 +414,13 @@ def test__compute_end_byte_w_start_ge_0_w_end_w_total_size(self): def test__compute_end_byte_w_start_ge_0_wo_end_w_total_size(self): CHUNK_SIZE = 50 - download = self._makeOne(_Stream(), chunksize=CHUNK_SIZE) + download = self._make_one(_Stream(), chunksize=CHUNK_SIZE) download._set_total({'content-range': 'bytes 0-1/10'}) self.assertEqual(download._compute_end_byte(0, use_chunks=False), 9) def test__get_chunk_not_initialized(self): from google.cloud.streaming.exceptions import TransferInvalidError - download = self._makeOne(_Stream()) + download = self._make_one(_Stream()) with self.assertRaises(TransferInvalidError): download._get_chunk(0, 10) @@ -430,7 +430,7 @@ def test__get_chunk(self): from google.cloud._testing import _Monkey from google.cloud.streaming import transfer as MUT http = object() - download = self._makeOne(_Stream()) + download = self._make_one(_Stream()) download._initialize(http, self.URL) response = _makeResponse(http_client.OK) requester = _MakeRequest(response) @@ -448,7 +448,7 @@ def test__get_chunk(self): def test__process_response_w_FORBIDDEN(self): from google.cloud.streaming.exceptions import HttpError from six.moves import http_client - download = self._makeOne(_Stream()) + download = self._make_one(_Stream()) response = _makeResponse(http_client.FORBIDDEN) with self.assertRaises(HttpError): download._process_response(response) @@ -456,7 +456,7 @@ def test__process_response_w_FORBIDDEN(self): def test__process_response_w_NOT_FOUND(self): from google.cloud.streaming.exceptions import HttpError from six.moves import http_client - download = self._makeOne(_Stream()) + download = self._make_one(_Stream()) response = _makeResponse(http_client.NOT_FOUND) with self.assertRaises(HttpError): download._process_response(response) @@ -464,7 +464,7 @@ def test__process_response_w_NOT_FOUND(self): def test__process_response_w_other_error(self): from google.cloud.streaming.exceptions import TransferRetryError from six.moves import http_client - download = self._makeOne(_Stream()) + download = self._make_one(_Stream()) response = _makeResponse(http_client.BAD_REQUEST) with self.assertRaises(TransferRetryError): download._process_response(response) @@ -472,7 +472,7 @@ def test__process_response_w_other_error(self): def test__process_response_w_OK_wo_encoding(self): from six.moves import http_client stream = _Stream() - download = self._makeOne(stream) + download = self._make_one(stream) response = _makeResponse(http_client.OK, content='OK') found = download._process_response(response) self.assertIs(found, response) @@ -483,7 +483,7 @@ def test__process_response_w_OK_wo_encoding(self): def test__process_response_w_PARTIAL_CONTENT_w_encoding(self): from six.moves import http_client stream = _Stream() - download = self._makeOne(stream) + download = self._make_one(stream) info = {'content-encoding': 'blah'} response = _makeResponse(http_client.OK, info, 'PARTIAL') found = download._process_response(response) @@ -495,7 +495,7 @@ def test__process_response_w_PARTIAL_CONTENT_w_encoding(self): def test__process_response_w_REQUESTED_RANGE_NOT_SATISFIABLE(self): from six.moves import http_client stream = _Stream() - download = self._makeOne(stream) + download = self._make_one(stream) response = _makeResponse( http_client.REQUESTED_RANGE_NOT_SATISFIABLE) found = download._process_response(response) @@ -507,7 +507,7 @@ def test__process_response_w_REQUESTED_RANGE_NOT_SATISFIABLE(self): def test__process_response_w_NO_CONTENT(self): from six.moves import http_client stream = _Stream() - download = self._makeOne(stream) + download = self._make_one(stream) response = _makeResponse(status_code=http_client.NO_CONTENT) found = download._process_response(response) self.assertIs(found, response) @@ -517,7 +517,7 @@ def test__process_response_w_NO_CONTENT(self): def test_get_range_not_initialized(self): from google.cloud.streaming.exceptions import TransferInvalidError - download = self._makeOne(_Stream()) + download = self._make_one(_Stream()) with self.assertRaises(TransferInvalidError): download.get_range(0, 10) @@ -531,7 +531,7 @@ def test_get_range_wo_total_size_complete(self): RESP_RANGE = 'bytes 0-%d/%d' % (LEN - 1, LEN) http = object() stream = _Stream() - download = self._makeOne(stream) + download = self._make_one(stream) download._initialize(http, self.URL) info = {'content-range': RESP_RANGE} response = _makeResponse(http_client.OK, info, CONTENT) @@ -560,7 +560,7 @@ def test_get_range_wo_total_size_wo_end(self): RESP_RANGE = 'bytes %d-%d/%d' % (START, LEN - 1, LEN) http = object() stream = _Stream() - download = self._makeOne(stream, chunksize=CHUNK_SIZE) + download = self._make_one(stream, chunksize=CHUNK_SIZE) download._initialize(http, self.URL) info = {'content-range': RESP_RANGE} response = _makeResponse(http_client.OK, info, CONTENT[START:]) @@ -588,7 +588,7 @@ def test_get_range_w_total_size_partial(self): RESP_RANGE = 'bytes 0-%d/%d' % (PARTIAL_LEN, LEN,) http = object() stream = _Stream() - download = self._makeOne(stream, total_size=LEN) + download = self._make_one(stream, total_size=LEN) download._initialize(http, self.URL) info = {'content-range': RESP_RANGE} response = _makeResponse(http_client.OK, info, CONTENT[:PARTIAL_LEN]) @@ -619,7 +619,7 @@ def test_get_range_w_empty_chunk(self): RESP_RANGE = 'bytes %d-%d/%d' % (START, LEN - 1, LEN) http = object() stream = _Stream() - download = self._makeOne(stream, chunksize=CHUNK_SIZE) + download = self._make_one(stream, chunksize=CHUNK_SIZE) download._initialize(http, self.URL) info = {'content-range': RESP_RANGE} response = _makeResponse(http_client.OK, info) @@ -648,7 +648,7 @@ def test_get_range_w_total_size_wo_use_chunks(self): RESP_RANGE = 'bytes 0-%d/%d' % (LEN - 1, LEN,) http = object() stream = _Stream() - download = self._makeOne(stream, total_size=LEN, chunksize=CHUNK_SIZE) + download = self._make_one(stream, total_size=LEN, chunksize=CHUNK_SIZE) download._initialize(http, self.URL) info = {'content-range': RESP_RANGE} response = _makeResponse(http_client.OK, info, CONTENT) @@ -678,7 +678,7 @@ def test_get_range_w_multiple_chunks(self): RESP_RANGE_2 = 'bytes %d-%d/%d' % (CHUNK_SIZE, LEN - 1, LEN) http = object() stream = _Stream() - download = self._makeOne(stream, chunksize=CHUNK_SIZE) + download = self._make_one(stream, chunksize=CHUNK_SIZE) download._initialize(http, self.URL) info_1 = {'content-range': RESP_RANGE_1} response_1 = _makeResponse(http_client.PARTIAL_CONTENT, info_1, @@ -703,7 +703,7 @@ def test_get_range_w_multiple_chunks(self): def test_stream_file_not_initialized(self): from google.cloud.streaming.exceptions import TransferInvalidError - download = self._makeOne(_Stream()) + download = self._make_one(_Stream()) with self.assertRaises(TransferInvalidError): download.stream_file() @@ -714,7 +714,7 @@ def test_stream_file_w_initial_response_complete(self): LEN = len(CONTENT) RESP_RANGE = 'bytes 0-%d/%d' % (LEN - 1, LEN,) stream = _Stream() - download = self._makeOne(stream, total_size=LEN) + download = self._make_one(stream, total_size=LEN) info = {'content-range': RESP_RANGE} download._initial_response = _makeResponse( http_client.OK, info, CONTENT) @@ -738,7 +738,7 @@ def test_stream_file_w_initial_response_incomplete(self): RESP_RANGE_2 = 'bytes %d-%d/%d' % (CHUNK_SIZE, LEN - 1, LEN,) stream = _Stream() http = object() - download = self._makeOne(stream, chunksize=CHUNK_SIZE) + download = self._make_one(stream, chunksize=CHUNK_SIZE) info_1 = {'content-range': RESP_RANGE_1} download._initial_response = _makeResponse( http_client.PARTIAL_CONTENT, info_1, CONTENT[:CHUNK_SIZE]) @@ -774,7 +774,7 @@ def test_stream_file_wo_initial_response_wo_total_size(self): RESP_RANGE = 'bytes 0-%d/%d' % (LEN - 1, LEN,) stream = _Stream() http = object() - download = self._makeOne(stream, chunksize=CHUNK_SIZE) + download = self._make_one(stream, chunksize=CHUNK_SIZE) info = {'content-range': RESP_RANGE} response = _makeResponse(http_client.OK, info, CONTENT) requester = _MakeRequest(response) @@ -804,13 +804,13 @@ def _get_target_class(): from google.cloud.streaming.transfer import Upload return Upload - def _makeOne(self, stream, mime_type=MIME_TYPE, *args, **kw): + def _make_one(self, stream, mime_type=MIME_TYPE, *args, **kw): return self._get_target_class()(stream, mime_type, *args, **kw) def test_ctor_defaults(self): from google.cloud.streaming.transfer import _DEFAULT_CHUNKSIZE stream = _Stream() - upload = self._makeOne(stream) + upload = self._make_one(stream) self.assertIs(upload.stream, stream) self.assertIsNone(upload._final_response) self.assertIsNone(upload._server_chunk_granularity) @@ -824,7 +824,7 @@ def test_ctor_defaults(self): def test_ctor_w_kwds(self): stream = _Stream() CHUNK_SIZE = 123 - upload = self._makeOne(stream, chunksize=CHUNK_SIZE) + upload = self._make_one(stream, chunksize=CHUNK_SIZE) self.assertIs(upload.stream, stream) self.assertEqual(upload.mime_type, self.MIME_TYPE) self.assertEqual(upload.chunksize, CHUNK_SIZE) @@ -911,7 +911,7 @@ def test_from_stream_explicit(self): self.assertEqual(upload.chunksize, CHUNK_SIZE) def test_strategy_setter_invalid(self): - upload = self._makeOne(_Stream()) + upload = self._make_one(_Stream()) with self.assertRaises(ValueError): upload.strategy = object() with self.assertRaises(ValueError): @@ -919,20 +919,20 @@ def test_strategy_setter_invalid(self): def test_strategy_setter_SIMPLE_UPLOAD(self): from google.cloud.streaming.transfer import SIMPLE_UPLOAD - upload = self._makeOne(_Stream()) + upload = self._make_one(_Stream()) upload.strategy = SIMPLE_UPLOAD self.assertEqual(upload.strategy, SIMPLE_UPLOAD) def test_strategy_setter_RESUMABLE_UPLOAD(self): from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - upload = self._makeOne(_Stream()) + upload = self._make_one(_Stream()) upload.strategy = RESUMABLE_UPLOAD self.assertEqual(upload.strategy, RESUMABLE_UPLOAD) def test_total_size_setter_initialized(self): from google.cloud.streaming.exceptions import TransferInvalidError SIZE = 123 - upload = self._makeOne(_Stream) + upload = self._make_one(_Stream) http = object() upload._initialize(http, _Request.URL) with self.assertRaises(TransferInvalidError): @@ -940,7 +940,7 @@ def test_total_size_setter_initialized(self): def test_total_size_setter_not_initialized(self): SIZE = 123 - upload = self._makeOne(_Stream) + upload = self._make_one(_Stream) upload.total_size = SIZE self.assertEqual(upload.total_size, SIZE) @@ -952,7 +952,7 @@ def test__set_default_strategy_w_existing_strategy(self): simple_path='/upload/endpoint', ) request = _Request() - upload = self._makeOne(_Stream) + upload = self._make_one(_Stream) upload.strategy = RESUMABLE_UPLOAD upload._set_default_strategy(config, request) self.assertEqual(upload.strategy, RESUMABLE_UPLOAD) @@ -965,7 +965,7 @@ def test__set_default_strategy_wo_resumable_path(self): simple_path='/upload/endpoint', ) request = _Request() - upload = self._makeOne(_Stream()) + upload = self._make_one(_Stream()) upload._set_default_strategy(config, request) self.assertEqual(upload.strategy, SIMPLE_UPLOAD) @@ -974,7 +974,7 @@ def test__set_default_strategy_w_total_size_gt_threshhold(self): from google.cloud.streaming.transfer import RESUMABLE_UPLOAD config = _UploadConfig() request = _Request() - upload = self._makeOne( + upload = self._make_one( _Stream(), total_size=RESUMABLE_UPLOAD_THRESHOLD + 1) upload._set_default_strategy(config, request) self.assertEqual(upload.strategy, RESUMABLE_UPLOAD) @@ -985,7 +985,7 @@ def test__set_default_strategy_w_body_wo_multipart(self): config = _UploadConfig() config.simple_multipart = False request = _Request(body=CONTENT) - upload = self._makeOne(_Stream(), total_size=len(CONTENT)) + upload = self._make_one(_Stream(), total_size=len(CONTENT)) upload._set_default_strategy(config, request) self.assertEqual(upload.strategy, RESUMABLE_UPLOAD) @@ -995,7 +995,7 @@ def test__set_default_strategy_w_body_w_multipart_wo_simple_path(self): config = _UploadConfig() config.simple_path = None request = _Request(body=CONTENT) - upload = self._makeOne(_Stream(), total_size=len(CONTENT)) + upload = self._make_one(_Stream(), total_size=len(CONTENT)) upload._set_default_strategy(config, request) self.assertEqual(upload.strategy, RESUMABLE_UPLOAD) @@ -1004,7 +1004,7 @@ def test__set_default_strategy_w_body_w_multipart_w_simple_path(self): CONTENT = b'ABCDEFGHIJ' config = _UploadConfig() request = _Request(body=CONTENT) - upload = self._makeOne(_Stream(), total_size=len(CONTENT)) + upload = self._make_one(_Stream(), total_size=len(CONTENT)) upload._set_default_strategy(config, request) self.assertEqual(upload.strategy, SIMPLE_UPLOAD) @@ -1014,7 +1014,7 @@ def test_configure_request_w_total_size_gt_max_size(self): config.max_size = MAX_SIZE request = _Request() url_builder = _Dummy() - upload = self._makeOne(_Stream(), total_size=MAX_SIZE + 1) + upload = self._make_one(_Stream(), total_size=MAX_SIZE + 1) with self.assertRaises(ValueError): upload.configure_request(config, request, url_builder) @@ -1023,7 +1023,7 @@ def test_configure_request_w_invalid_mimetype(self): config.accept = ('text/*',) request = _Request() url_builder = _Dummy() - upload = self._makeOne(_Stream()) + upload = self._make_one(_Stream()) with self.assertRaises(ValueError): upload.configure_request(config, request, url_builder) @@ -1033,7 +1033,7 @@ def test_configure_request_w_simple_wo_body(self): config = _UploadConfig() request = _Request() url_builder = _Dummy(query_params={}) - upload = self._makeOne(_Stream(CONTENT)) + upload = self._make_one(_Stream(CONTENT)) upload.strategy = SIMPLE_UPLOAD upload.configure_request(config, request, url_builder) @@ -1054,7 +1054,7 @@ def test_configure_request_w_simple_w_body(self): request = _Request(body=BODY) request.headers['content-type'] = 'text/plain' url_builder = _Dummy(query_params={}) - upload = self._makeOne(_Stream(CONTENT)) + upload = self._make_one(_Stream(CONTENT)) upload.strategy = SIMPLE_UPLOAD upload.configure_request(config, request, url_builder) @@ -1094,7 +1094,7 @@ def test_configure_request_w_resumable_wo_total_size(self): config = _UploadConfig() request = _Request() url_builder = _Dummy(query_params={}) - upload = self._makeOne(_Stream(CONTENT)) + upload = self._make_one(_Stream(CONTENT)) upload.strategy = RESUMABLE_UPLOAD upload.configure_request(config, request, url_builder) @@ -1112,7 +1112,7 @@ def test_configure_request_w_resumable_w_total_size(self): config = _UploadConfig() request = _Request() url_builder = _Dummy(query_params={}) - upload = self._makeOne(_Stream(CONTENT)) + upload = self._make_one(_Stream(CONTENT)) upload.total_size = LEN upload.strategy = RESUMABLE_UPLOAD @@ -1127,14 +1127,14 @@ def test_configure_request_w_resumable_w_total_size(self): def test_refresh_upload_state_w_simple_strategy(self): from google.cloud.streaming.transfer import SIMPLE_UPLOAD - upload = self._makeOne(_Stream()) + upload = self._make_one(_Stream()) upload.strategy = SIMPLE_UPLOAD upload.refresh_upload_state() # no-op def test_refresh_upload_state_not_initialized(self): from google.cloud.streaming.exceptions import TransferInvalidError from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - upload = self._makeOne(_Stream()) + upload = self._make_one(_Stream()) upload.strategy = RESUMABLE_UPLOAD with self.assertRaises(TransferInvalidError): upload.refresh_upload_state() @@ -1149,7 +1149,7 @@ def test_refresh_upload_state_w_OK(self): RESP_RANGE = 'bytes 0-%d/%d' % (LEN - 1, LEN,) http = object() stream = _Stream() - upload = self._makeOne(stream, total_size=LEN) + upload = self._make_one(stream, total_size=LEN) upload.strategy = RESUMABLE_UPLOAD upload._initialize(http, _Request.URL) info = {'content-range': RESP_RANGE} @@ -1176,7 +1176,7 @@ def test_refresh_upload_state_w_CREATED(self): RESP_RANGE = 'bytes 0-%d/%d' % (LEN - 1, LEN,) http = object() stream = _Stream() - upload = self._makeOne(stream, total_size=LEN) + upload = self._make_one(stream, total_size=LEN) upload.strategy = RESUMABLE_UPLOAD upload._initialize(http, _Request.URL) info = {'content-range': RESP_RANGE} @@ -1203,7 +1203,7 @@ def test_refresh_upload_state_w_RESUME_INCOMPLETE_w_range(self): LAST = 5 http = object() stream = _Stream() - upload = self._makeOne(stream, total_size=LEN) + upload = self._make_one(stream, total_size=LEN) upload.strategy = RESUMABLE_UPLOAD upload._initialize(http, _Request.URL) info = {'range': '0-%d' % (LAST - 1,)} @@ -1229,7 +1229,7 @@ def test_refresh_upload_state_w_RESUME_INCOMPLETE_wo_range(self): LEN = len(CONTENT) http = object() stream = _Stream() - upload = self._makeOne(stream, total_size=LEN) + upload = self._make_one(stream, total_size=LEN) upload.strategy = RESUMABLE_UPLOAD upload._initialize(http, _Request.URL) response = _makeResponse(RESUME_INCOMPLETE, content=CONTENT) @@ -1255,7 +1255,7 @@ def test_refresh_upload_state_w_error(self): LEN = len(CONTENT) http = object() stream = _Stream() - upload = self._makeOne(stream, total_size=LEN) + upload = self._make_one(stream, total_size=LEN) upload.strategy = RESUMABLE_UPLOAD upload._initialize(http, _Request.URL) response = _makeResponse(http_client.FORBIDDEN) @@ -1268,30 +1268,30 @@ def test_refresh_upload_state_w_error(self): upload.refresh_upload_state() def test__get_range_header_miss(self): - upload = self._makeOne(_Stream()) + upload = self._make_one(_Stream()) response = _makeResponse(None) self.assertIsNone(upload._get_range_header(response)) def test__get_range_header_w_Range(self): - upload = self._makeOne(_Stream()) + upload = self._make_one(_Stream()) response = _makeResponse(None, {'Range': '123'}) self.assertEqual(upload._get_range_header(response), '123') def test__get_range_header_w_range(self): - upload = self._makeOne(_Stream()) + upload = self._make_one(_Stream()) response = _makeResponse(None, {'range': '123'}) self.assertEqual(upload._get_range_header(response), '123') def test_initialize_upload_no_strategy(self): request = _Request() - upload = self._makeOne(_Stream()) + upload = self._make_one(_Stream()) with self.assertRaises(ValueError): upload.initialize_upload(request, http=object()) def test_initialize_upload_simple_w_http(self): from google.cloud.streaming.transfer import SIMPLE_UPLOAD request = _Request() - upload = self._makeOne(_Stream()) + upload = self._make_one(_Stream()) upload.strategy = SIMPLE_UPLOAD upload.initialize_upload(request, http=object()) # no-op @@ -1299,7 +1299,7 @@ def test_initialize_upload_resumable_already_initialized(self): from google.cloud.streaming.exceptions import TransferInvalidError from google.cloud.streaming.transfer import RESUMABLE_UPLOAD request = _Request() - upload = self._makeOne(_Stream()) + upload = self._make_one(_Stream()) upload.strategy = RESUMABLE_UPLOAD upload._initialize(None, self.URL) with self.assertRaises(TransferInvalidError): @@ -1312,7 +1312,7 @@ def test_initialize_upload_w_http_resumable_not_initialized_w_error(self): from google.cloud.streaming.exceptions import HttpError from google.cloud.streaming.transfer import RESUMABLE_UPLOAD request = _Request() - upload = self._makeOne(_Stream()) + upload = self._make_one(_Stream()) upload.strategy = RESUMABLE_UPLOAD response = _makeResponse(http_client.FORBIDDEN) requester = _MakeRequest(response) @@ -1327,7 +1327,7 @@ def test_initialize_upload_w_http_wo_auto_transfer_w_OK(self): from google.cloud.streaming import transfer as MUT from google.cloud.streaming.transfer import RESUMABLE_UPLOAD request = _Request() - upload = self._makeOne(_Stream(), auto_transfer=False) + upload = self._make_one(_Stream(), auto_transfer=False) upload.strategy = RESUMABLE_UPLOAD info = {'location': self.UPLOAD_URL} response = _makeResponse(http_client.OK, info) @@ -1350,7 +1350,7 @@ def test_initialize_upload_w_granularity_w_auto_transfer_w_OK(self): CONTENT = b'ABCDEFGHIJ' http = object() request = _Request() - upload = self._makeOne(_Stream(CONTENT), chunksize=1000) + upload = self._make_one(_Stream(CONTENT), chunksize=1000) upload.strategy = RESUMABLE_UPLOAD info = {'X-Goog-Upload-Chunk-Granularity': '100', 'location': self.UPLOAD_URL} @@ -1375,34 +1375,34 @@ def test_initialize_upload_w_granularity_w_auto_transfer_w_OK(self): self.assertEqual(chunk_request.body, CONTENT) def test__last_byte(self): - upload = self._makeOne(_Stream()) + upload = self._make_one(_Stream()) self.assertEqual(upload._last_byte('123-456'), 456) def test__validate_chunksize_wo__server_chunk_granularity(self): - upload = self._makeOne(_Stream()) + upload = self._make_one(_Stream()) upload._validate_chunksize(123) # no-op def test__validate_chunksize_w__server_chunk_granularity_miss(self): - upload = self._makeOne(_Stream()) + upload = self._make_one(_Stream()) upload._server_chunk_granularity = 100 with self.assertRaises(ValueError): upload._validate_chunksize(123) def test__validate_chunksize_w__server_chunk_granularity_hit(self): - upload = self._makeOne(_Stream()) + upload = self._make_one(_Stream()) upload._server_chunk_granularity = 100 upload._validate_chunksize(400) def test_stream_file_w_simple_strategy(self): from google.cloud.streaming.transfer import SIMPLE_UPLOAD - upload = self._makeOne(_Stream()) + upload = self._make_one(_Stream()) upload.strategy = SIMPLE_UPLOAD with self.assertRaises(ValueError): upload.stream_file() def test_stream_file_w_use_chunks_invalid_chunk_size(self): from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - upload = self._makeOne(_Stream(), chunksize=1024) + upload = self._make_one(_Stream(), chunksize=1024) upload.strategy = RESUMABLE_UPLOAD upload._server_chunk_granularity = 100 with self.assertRaises(ValueError): @@ -1411,7 +1411,7 @@ def test_stream_file_w_use_chunks_invalid_chunk_size(self): def test_stream_file_not_initialized(self): from google.cloud.streaming.exceptions import TransferInvalidError from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - upload = self._makeOne(_Stream(), chunksize=1024) + upload = self._make_one(_Stream(), chunksize=1024) upload.strategy = RESUMABLE_UPLOAD upload._server_chunk_granularity = 128 with self.assertRaises(TransferInvalidError): @@ -1422,7 +1422,7 @@ def test_stream_file_already_complete_w_unseekable_stream(self): http = object() stream = object() response = object() - upload = self._makeOne(stream, chunksize=1024) + upload = self._make_one(stream, chunksize=1024) upload.strategy = RESUMABLE_UPLOAD upload._server_chunk_granularity = 128 upload._initialize(http, _Request.URL) @@ -1437,7 +1437,7 @@ def test_stream_file_already_complete_w_seekable_stream_unsynced(self): http = object() stream = _Stream(CONTENT) response = object() - upload = self._makeOne(stream, chunksize=1024) + upload = self._make_one(stream, chunksize=1024) upload.strategy = RESUMABLE_UPLOAD upload._server_chunk_granularity = 128 upload._initialize(http, _Request.URL) @@ -1454,7 +1454,7 @@ def test_stream_file_already_complete_wo_seekable_method_synced(self): stream = _Stream(CONTENT) stream.seek(0, os.SEEK_END) response = object() - upload = self._makeOne(stream, chunksize=1024) + upload = self._make_one(stream, chunksize=1024) upload.strategy = RESUMABLE_UPLOAD upload._server_chunk_granularity = 128 upload._initialize(http, _Request.URL) @@ -1470,7 +1470,7 @@ def test_stream_file_already_complete_w_seekable_method_true_synced(self): stream = _StreamWithSeekableMethod(CONTENT, True) stream.seek(0, os.SEEK_END) response = object() - upload = self._makeOne(stream, chunksize=1024) + upload = self._make_one(stream, chunksize=1024) upload.strategy = RESUMABLE_UPLOAD upload._server_chunk_granularity = 128 upload._initialize(http, _Request.URL) @@ -1486,7 +1486,7 @@ def test_stream_file_already_complete_w_seekable_method_false(self): stream = _StreamWithSeekableMethod(CONTENT, False) stream.seek(0, os.SEEK_END) response = object() - upload = self._makeOne(stream, chunksize=1024) + upload = self._make_one(stream, chunksize=1024) upload.strategy = RESUMABLE_UPLOAD upload._server_chunk_granularity = 128 upload._initialize(http, _Request.URL) @@ -1503,7 +1503,7 @@ def test_stream_file_incomplete(self): CONTENT = b'ABCDEFGHIJ' http = object() stream = _Stream(CONTENT) - upload = self._makeOne(stream, chunksize=6) + upload = self._make_one(stream, chunksize=6) upload.strategy = RESUMABLE_UPLOAD upload._server_chunk_granularity = 6 upload._initialize(http, self.UPLOAD_URL) @@ -1548,7 +1548,7 @@ def test_stream_file_incomplete_w_transfer_error(self): CONTENT = b'ABCDEFGHIJ' http = object() stream = _Stream(CONTENT) - upload = self._makeOne(stream, chunksize=6) + upload = self._make_one(stream, chunksize=6) upload.strategy = RESUMABLE_UPLOAD upload._server_chunk_granularity = 6 upload._initialize(http, self.UPLOAD_URL) @@ -1584,7 +1584,7 @@ def test__send_media_request_wo_error(self): CONTENT = b'ABCDEFGHIJ' bytes_http = object() stream = _Stream(CONTENT) - upload = self._makeOne(stream) + upload = self._make_one(stream) upload.bytes_http = bytes_http headers = {'Content-Range': 'bytes 0-9/10', @@ -1615,7 +1615,7 @@ def test__send_media_request_w_error(self): bytes_http = object() http = object() stream = _Stream(CONTENT) - upload = self._makeOne(stream) + upload = self._make_one(stream) upload.strategy = RESUMABLE_UPLOAD upload._initialize(http, self.UPLOAD_URL) upload.bytes_http = bytes_http @@ -1647,14 +1647,14 @@ def test__send_media_request_w_error(self): def test__send_media_body_not_initialized(self): from google.cloud.streaming.exceptions import TransferInvalidError - upload = self._makeOne(_Stream()) + upload = self._make_one(_Stream()) with self.assertRaises(TransferInvalidError): upload._send_media_body(0) def test__send_media_body_wo_total_size(self): from google.cloud.streaming.exceptions import TransferInvalidError http = object() - upload = self._makeOne(_Stream()) + upload = self._make_one(_Stream()) upload._initialize(http, _Request.URL) with self.assertRaises(TransferInvalidError): upload._send_media_body(0) @@ -1664,7 +1664,7 @@ def test__send_media_body_start_lt_total_size(self): SIZE = 1234 http = object() stream = _Stream() - upload = self._makeOne(stream, total_size=SIZE) + upload = self._make_one(stream, total_size=SIZE) upload._initialize(http, self.UPLOAD_URL) response = object() streamer = _MediaStreamer(response) @@ -1691,7 +1691,7 @@ def test__send_media_body_start_eq_total_size(self): SIZE = 1234 http = object() stream = _Stream() - upload = self._makeOne(stream, total_size=SIZE) + upload = self._make_one(stream, total_size=SIZE) upload._initialize(http, self.UPLOAD_URL) response = object() streamer = _MediaStreamer(response) @@ -1715,7 +1715,7 @@ def test__send_media_body_start_eq_total_size(self): def test__send_chunk_not_initialized(self): from google.cloud.streaming.exceptions import TransferInvalidError - upload = self._makeOne(_Stream()) + upload = self._make_one(_Stream()) with self.assertRaises(TransferInvalidError): upload._send_chunk(0) @@ -1723,7 +1723,7 @@ def test__send_chunk_wo_total_size_stream_exhausted(self): CONTENT = b'ABCDEFGHIJ' SIZE = len(CONTENT) http = object() - upload = self._makeOne(_Stream(CONTENT), chunksize=1000) + upload = self._make_one(_Stream(CONTENT), chunksize=1000) upload._initialize(http, self.UPLOAD_URL) response = object() streamer = _MediaStreamer(response) @@ -1749,7 +1749,7 @@ def test__send_chunk_wo_total_size_stream_not_exhausted(self): SIZE = len(CONTENT) CHUNK_SIZE = SIZE - 5 http = object() - upload = self._makeOne(_Stream(CONTENT), chunksize=CHUNK_SIZE) + upload = self._make_one(_Stream(CONTENT), chunksize=CHUNK_SIZE) upload._initialize(http, self.UPLOAD_URL) response = object() streamer = _MediaStreamer(response) @@ -1779,7 +1779,7 @@ def test__send_chunk_w_total_size_stream_not_exhausted(self): CHUNK_SIZE = SIZE - 5 http = object() stream = _Stream(CONTENT) - upload = self._makeOne(stream, total_size=SIZE, chunksize=CHUNK_SIZE) + upload = self._make_one(stream, total_size=SIZE, chunksize=CHUNK_SIZE) upload._initialize(http, self.UPLOAD_URL) response = object() streamer = _MediaStreamer(response) @@ -1810,7 +1810,7 @@ def test__send_chunk_w_total_size_stream_exhausted(self): CHUNK_SIZE = 1000 http = object() stream = _Stream(CONTENT) - upload = self._makeOne(stream, total_size=SIZE, chunksize=CHUNK_SIZE) + upload = self._make_one(stream, total_size=SIZE, chunksize=CHUNK_SIZE) upload._initialize(http, self.UPLOAD_URL) response = object() streamer = _MediaStreamer(response) diff --git a/packages/google-cloud-core/unit_tests/test__helpers.py b/packages/google-cloud-core/unit_tests/test__helpers.py index fc64c1eb547d..c6760cfc8e6a 100644 --- a/packages/google-cloud-core/unit_tests/test__helpers.py +++ b/packages/google-cloud-core/unit_tests/test__helpers.py @@ -24,12 +24,12 @@ def _get_target_class(): return _LocalStack - def _makeOne(self): + def _make_one(self): return self._get_target_class()() def test_it(self): batch1, batch2 = object(), object() - batches = self._makeOne() + batches = self._make_one() self.assertEqual(list(batches), []) self.assertIsNone(batches.top) batches.push(batch1) @@ -52,7 +52,7 @@ def _get_target_class(): from google.cloud._helpers import _UTC return _UTC - def _makeOne(self): + def _make_one(self): return self._get_target_class()() def test_module_property(self): @@ -68,7 +68,7 @@ def test_module_property(self): def test_dst(self): import datetime - tz = self._makeOne() + tz = self._make_one() self.assertEqual(tz.dst(None), datetime.timedelta(0)) def test_fromutc(self): @@ -76,26 +76,26 @@ def test_fromutc(self): naive_epoch = datetime.datetime.utcfromtimestamp(0) self.assertIsNone(naive_epoch.tzinfo) - tz = self._makeOne() + tz = self._make_one() epoch = tz.fromutc(naive_epoch) self.assertEqual(epoch.tzinfo, tz) def test_tzname(self): - tz = self._makeOne() + tz = self._make_one() self.assertEqual(tz.tzname(None), 'UTC') def test_utcoffset(self): import datetime - tz = self._makeOne() + tz = self._make_one() self.assertEqual(tz.utcoffset(None), datetime.timedelta(0)) def test___repr__(self): - tz = self._makeOne() + tz = self._make_one() self.assertEqual(repr(tz), '') def test___str__(self): - tz = self._makeOne() + tz = self._make_one() self.assertEqual(str(tz), 'UTC') @@ -912,12 +912,12 @@ def _get_target_class(): from google.cloud._helpers import MetadataPlugin return MetadataPlugin - def _makeOne(self, *args, **kwargs): + def _make_one(self, *args, **kwargs): return self._get_target_class()(*args, **kwargs) def test_constructor(self): credentials = object() - plugin = self._makeOne(credentials) + plugin = self._make_one(credentials) self.assertIs(plugin._credentials, credentials) def test___call__(self): @@ -928,7 +928,7 @@ def test___call__(self): def callback(*args): callback_args.append(args) - transformer = self._makeOne(credentials) + transformer = self._make_one(credentials) result = transformer(None, callback) cb_headers = [ ('authorization', 'Bearer ' + access_token_expected), diff --git a/packages/google-cloud-core/unit_tests/test_client.py b/packages/google-cloud-core/unit_tests/test_client.py index 6dc92b4f625f..d7638dff2330 100644 --- a/packages/google-cloud-core/unit_tests/test_client.py +++ b/packages/google-cloud-core/unit_tests/test_client.py @@ -43,7 +43,7 @@ def _get_target_class(): from google.cloud.client import Client return Client - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): @@ -58,7 +58,7 @@ def mock_get_credentials(): return CREDENTIALS with _Monkey(client, get_credentials=mock_get_credentials): - client_obj = self._makeOne() + client_obj = self._make_one() self.assertIsInstance(client_obj.connection, _MockConnection) self.assertIs(client_obj.connection.credentials, CREDENTIALS) @@ -67,7 +67,7 @@ def mock_get_credentials(): def test_ctor_explicit(self): CREDENTIALS = object() HTTP = object() - client_obj = self._makeOne(credentials=CREDENTIALS, http=HTTP) + client_obj = self._make_one(credentials=CREDENTIALS, http=HTTP) self.assertIsInstance(client_obj.connection, _MockConnection) self.assertIs(client_obj.connection.credentials, CREDENTIALS) @@ -131,7 +131,7 @@ def _get_target_class(): from google.cloud.client import JSONClient return JSONClient - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): @@ -152,7 +152,7 @@ def mock_get_credentials(): with _Monkey(client, get_credentials=mock_get_credentials, _determine_default_project=mock_determine_proj): - client_obj = self._makeOne() + client_obj = self._make_one() self.assertEqual(client_obj.project, PROJECT) self.assertIsInstance(client_obj.connection, _MockConnection) @@ -172,7 +172,7 @@ def mock_determine_proj(project): return None with _Monkey(client, _determine_default_project=mock_determine_proj): - self.assertRaises(EnvironmentError, self._makeOne) + self.assertRaises(EnvironmentError, self._make_one) self.assertEqual(FUNC_CALLS, [(None, '_determine_default_project')]) @@ -180,7 +180,7 @@ def test_ctor_w_invalid_project(self): CREDENTIALS = object() HTTP = object() with self.assertRaises(ValueError): - self._makeOne(project=object(), credentials=CREDENTIALS, http=HTTP) + self._make_one(project=object(), credentials=CREDENTIALS, http=HTTP) def _explicit_ctor_helper(self, project): import six @@ -188,7 +188,7 @@ def _explicit_ctor_helper(self, project): CREDENTIALS = object() HTTP = object() - client_obj = self._makeOne(project=project, credentials=CREDENTIALS, + client_obj = self._make_one(project=project, credentials=CREDENTIALS, http=HTTP) if isinstance(project, six.binary_type): diff --git a/packages/google-cloud-core/unit_tests/test_connection.py b/packages/google-cloud-core/unit_tests/test_connection.py index 3ecbcf8d71f6..e2eaa7e267aa 100644 --- a/packages/google-cloud-core/unit_tests/test_connection.py +++ b/packages/google-cloud-core/unit_tests/test_connection.py @@ -22,41 +22,41 @@ def _get_target_class(): from google.cloud.connection import Connection return Connection - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): - conn = self._makeOne() + conn = self._make_one() self.assertIsNone(conn.credentials) def test_ctor_explicit(self): credentials = _Credentials() self.assertEqual(credentials._create_scoped_calls, 0) - conn = self._makeOne(credentials) + conn = self._make_one(credentials) self.assertEqual(credentials._create_scoped_calls, 1) self.assertIs(conn.credentials, credentials) self.assertIsNone(conn._http) def test_ctor_explicit_http(self): http = object() - conn = self._makeOne(http=http) + conn = self._make_one(http=http) self.assertIsNone(conn.credentials) self.assertIs(conn.http, http) def test_ctor_credentials_wo_create_scoped(self): credentials = object() - conn = self._makeOne(credentials) + conn = self._make_one(credentials) self.assertIs(conn.credentials, credentials) self.assertIsNone(conn._http) def test_http_w_existing(self): - conn = self._makeOne() + conn = self._make_one() conn._http = http = object() self.assertIs(conn.http, http) def test_http_wo_creds(self): import httplib2 - conn = self._makeOne() + conn = self._make_one() self.assertIsInstance(conn.http, httplib2.Http) def test_http_w_creds(self): @@ -64,7 +64,7 @@ def test_http_w_creds(self): authorized = object() credentials = _Credentials(authorized) - conn = self._makeOne(credentials) + conn = self._make_one(credentials) self.assertIs(conn.http, authorized) self.assertIsInstance(credentials._called_with, httplib2.Http) @@ -72,7 +72,7 @@ def test_user_agent_format(self): from pkg_resources import get_distribution expected_ua = 'gcloud-python/{0}'.format( get_distribution('google-cloud-core').version) - conn = self._makeOne() + conn = self._make_one() self.assertEqual(conn.USER_AGENT, expected_ua) def test__create_scoped_credentials_with_scoped_credentials(self): @@ -114,7 +114,7 @@ def _get_target_class(): from google.cloud.connection import JSONConnection return JSONConnection - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def _makeMockOne(self, *args, **kw): @@ -131,22 +131,22 @@ def test_class_defaults(self): self.assertIsNone(klass.API_VERSION) def test_ctor_defaults(self): - conn = self._makeOne() + conn = self._make_one() self.assertIsNone(conn.credentials) def test_ctor_explicit(self): credentials = _Credentials() - conn = self._makeOne(credentials) + conn = self._make_one(credentials) self.assertIs(conn.credentials, credentials) def test_http_w_existing(self): - conn = self._makeOne() + conn = self._make_one() conn._http = http = object() self.assertIs(conn.http, http) def test_http_wo_creds(self): import httplib2 - conn = self._makeOne() + conn = self._make_one() self.assertIsInstance(conn.http, httplib2.Http) def test_http_w_creds(self): @@ -154,7 +154,7 @@ def test_http_w_creds(self): authorized = object() credentials = _Credentials(authorized) - conn = self._makeOne(credentials) + conn = self._make_one(credentials) self.assertIs(conn.http, authorized) self.assertIsInstance(credentials._called_with, httplib2.Http) @@ -189,7 +189,7 @@ def test_build_api_url_w_extra_query_params(self): self.assertEqual(parms['bar'], 'baz') def test__make_request_no_data_no_content_type_no_headers(self): - conn = self._makeOne() + conn = self._make_one() URI = 'http://example.com/test' http = conn._http = _Http( {'status': '200', 'content-type': 'text/plain'}, @@ -210,7 +210,7 @@ def test__make_request_no_data_no_content_type_no_headers(self): self.assertEqual(http._called_with['headers'], expected_headers) def test__make_request_w_data_no_extra_headers(self): - conn = self._makeOne() + conn = self._make_one() URI = 'http://example.com/test' http = conn._http = _Http( {'status': '200', 'content-type': 'text/plain'}, @@ -229,7 +229,7 @@ def test__make_request_w_data_no_extra_headers(self): self.assertEqual(http._called_with['headers'], expected_headers) def test__make_request_w_extra_headers(self): - conn = self._makeOne() + conn = self._make_one() URI = 'http://example.com/test' http = conn._http = _Http( {'status': '200', 'content-type': 'text/plain'}, diff --git a/packages/google-cloud-core/unit_tests/test_exceptions.py b/packages/google-cloud-core/unit_tests/test_exceptions.py index 5b145bb02a5f..b1c3a29546b8 100644 --- a/packages/google-cloud-core/unit_tests/test_exceptions.py +++ b/packages/google-cloud-core/unit_tests/test_exceptions.py @@ -22,11 +22,11 @@ def _get_target_class(): from google.cloud.exceptions import GoogleCloudError return GoogleCloudError - def _makeOne(self, message, errors=()): + def _make_one(self, message, errors=()): return self._get_target_class()(message, errors=errors) def test_ctor_defaults(self): - e = self._makeOne('Testing') + e = self._make_one('Testing') e.code = 600 self.assertEqual(str(e), '600 Testing') self.assertEqual(e.message, 'Testing') @@ -40,7 +40,7 @@ def test_ctor_explicit(self): 'message': 'Testing', 'reason': 'test', } - e = self._makeOne('Testing', [ERROR]) + e = self._make_one('Testing', [ERROR]) e.code = 600 self.assertEqual(str(e), '600 Testing') self.assertEqual(e.message, 'Testing') diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index cd3dcad5d370..82e5ff1c9e73 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -33,32 +33,32 @@ def _get_target_class(): from google.cloud.iterator import Page return Page - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_constructor(self): parent = object() item_to_value = object() - page = self._makeOne(parent, (1, 2, 3), item_to_value) + page = self._make_one(parent, (1, 2, 3), item_to_value) self.assertIs(page._parent, parent) self.assertEqual(page._num_items, 3) self.assertEqual(page._remaining, 3) self.assertIs(page._item_to_value, item_to_value) def test_num_items_property(self): - page = self._makeOne(None, (), None) + page = self._make_one(None, (), None) num_items = 42 page._num_items = num_items self.assertEqual(page.num_items, num_items) def test_remaining_property(self): - page = self._makeOne(None, (), None) + page = self._make_one(None, (), None) remaining = 1337 page._remaining = remaining self.assertEqual(page.remaining, remaining) def test___iter__(self): - page = self._makeOne(None, (), None) + page = self._make_one(None, (), None) self.assertIs(iter(page), page) def test_iterator_calls__item_to_value(self): @@ -73,7 +73,7 @@ def item_to_value(self, item): return item parent = Parent() - page = self._makeOne(parent, (10, 11, 12), + page = self._make_one(parent, (10, 11, 12), Parent.item_to_value) page._remaining = 100 @@ -97,7 +97,7 @@ def _get_target_class(): from google.cloud.iterator import Iterator return Iterator - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_constructor(self): @@ -106,7 +106,7 @@ def test_constructor(self): item_to_value = object() token = 'ab13nceor03' max_results = 1337 - iterator = self._makeOne(client, item_to_value, page_token=token, + iterator = self._make_one(client, item_to_value, page_token=token, max_results=max_results) self.assertFalse(iterator._started) @@ -119,7 +119,7 @@ def test_constructor(self): self.assertEqual(iterator.num_results, 0) def test_pages_property(self): - iterator = self._makeOne(None, None) + iterator = self._make_one(None, None) self.assertFalse(iterator._started) mock_iter = object() incremented = [] @@ -137,7 +137,7 @@ def page_iter(increment): def test_pages_property_started(self): import types - iterator = self._makeOne(None, None) + iterator = self._make_one(None, None) self.assertIsInstance(iterator.pages, types.GeneratorType) # Make sure we cannot restart. with self.assertRaises(ValueError): @@ -146,7 +146,7 @@ def test_pages_property_started(self): def test_pages_property_items_started(self): import types - iterator = self._makeOne(None, None) + iterator = self._make_one(None, None) self.assertIsInstance(iter(iterator), types.GeneratorType) with self.assertRaises(ValueError): getattr(iterator, 'pages') @@ -170,7 +170,7 @@ def test__items_iter(self): page1 = Page(parent, (item1, item2), self._do_nothing) page2 = Page(parent, (item3,), self._do_nothing) - iterator = self._makeOne(None, None) + iterator = self._make_one(None, None) # Fake the page iterator on the object. incremented = [] @@ -198,7 +198,7 @@ def page_iter(increment): self.assertEqual(incremented, [False]) def test___iter__(self): - iterator = self._makeOne(None, None) + iterator = self._make_one(None, None) self.assertFalse(iterator._started) incremented = [] @@ -214,7 +214,7 @@ def page_iter(increment): def test___iter___started(self): import types - iterator = self._makeOne(None, None) + iterator = self._make_one(None, None) self.assertIsInstance(iter(iterator), types.GeneratorType) with self.assertRaises(ValueError): iter(iterator) @@ -222,13 +222,13 @@ def test___iter___started(self): def test___iter___pages_started(self): import types - iterator = self._makeOne(None, None) + iterator = self._make_one(None, None) self.assertIsInstance(iterator.pages, types.GeneratorType) with self.assertRaises(ValueError): iter(iterator) def test__next_page_virtual(self): - iterator = self._makeOne(None, None) + iterator = self._make_one(None, None) with self.assertRaises(NotImplementedError): iterator._next_page() @@ -240,7 +240,7 @@ def _get_target_class(): from google.cloud.iterator import HTTPIterator return HTTPIterator - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_constructor(self): @@ -249,7 +249,7 @@ def test_constructor(self): connection = _Connection() client = _Client(connection) path = '/foo' - iterator = self._makeOne(client, path, None) + iterator = self._make_one(client, path, None) self.assertFalse(iterator._started) self.assertIs(iterator.client, client) self.assertEqual(iterator.path, path) @@ -269,7 +269,7 @@ def test_constructor_w_extra_param_collision(self): path = '/foo' extra_params = {'pageToken': 'val'} with self.assertRaises(ValueError): - self._makeOne(client, path, None, extra_params=extra_params) + self._make_one(client, path, None, extra_params=extra_params) def test_pages_iter_empty_then_another(self): import six @@ -277,7 +277,7 @@ def test_pages_iter_empty_then_another(self): from google.cloud import iterator as MUT items_key = 'its-key' - iterator = self._makeOne(None, None, None, items_key=items_key) + iterator = self._make_one(None, None, None, items_key=items_key) # Fake the next page class. fake_page = MUT.Page(None, (), None) page_args = [] @@ -312,7 +312,7 @@ def item_to_value(iterator, item): # pylint: disable=unused-argument connection = _Connection( {'items': [{'name': key1}, {'name': key2}]}) client = _Client(connection) - iterator = self._makeOne(client, path=path, + iterator = self._make_one(client, path=path, item_to_value=item_to_value) self.assertEqual(iterator.num_results, 0) @@ -337,14 +337,14 @@ def test__has_next_page_new(self): connection = _Connection() client = _Client(connection) path = '/foo' - iterator = self._makeOne(client, path, None) + iterator = self._make_one(client, path, None) self.assertTrue(iterator._has_next_page()) def test__has_next_page_w_number_no_token(self): connection = _Connection() client = _Client(connection) path = '/foo' - iterator = self._makeOne(client, path, None) + iterator = self._make_one(client, path, None) iterator.page_number = 1 self.assertFalse(iterator._has_next_page()) @@ -353,20 +353,20 @@ def test__has_next_page_w_number_w_token(self): client = _Client(connection) path = '/foo' token = 'token' - iterator = self._makeOne(client, path, None) + iterator = self._make_one(client, path, None) iterator.page_number = 1 iterator.next_page_token = token self.assertTrue(iterator._has_next_page()) def test__has_next_page_w_max_results_not_done(self): - iterator = self._makeOne(None, None, None, max_results=3, + iterator = self._make_one(None, None, None, max_results=3, page_token='definitely-not-none') iterator.page_number = 1 self.assertLess(iterator.num_results, iterator.max_results) self.assertTrue(iterator._has_next_page()) def test__has_next_page_w_max_results_done(self): - iterator = self._makeOne(None, None, None, max_results=3) + iterator = self._make_one(None, None, None, max_results=3) iterator.page_number = 1 iterator.num_results = iterator.max_results self.assertFalse(iterator._has_next_page()) @@ -375,7 +375,7 @@ def test__get_query_params_no_token(self): connection = _Connection() client = _Client(connection) path = '/foo' - iterator = self._makeOne(client, path, None) + iterator = self._make_one(client, path, None) self.assertEqual(iterator._get_query_params(), {}) def test__get_query_params_w_token(self): @@ -383,7 +383,7 @@ def test__get_query_params_w_token(self): client = _Client(connection) path = '/foo' token = 'token' - iterator = self._makeOne(client, path, None) + iterator = self._make_one(client, path, None) iterator.next_page_token = token self.assertEqual(iterator._get_query_params(), {'pageToken': token}) @@ -393,7 +393,7 @@ def test__get_query_params_w_max_results(self): client = _Client(connection) path = '/foo' max_results = 3 - iterator = self._makeOne(client, path, None, + iterator = self._make_one(client, path, None, max_results=max_results) iterator.num_results = 1 local_max = max_results - iterator.num_results @@ -405,7 +405,7 @@ def test__get_query_params_extra_params(self): client = _Client(connection) path = '/foo' extra_params = {'key': 'val'} - iterator = self._makeOne(client, path, None, + iterator = self._make_one(client, path, None, extra_params=extra_params) self.assertEqual(iterator._get_query_params(), extra_params) @@ -415,7 +415,7 @@ def test__get_query_params_w_token_and_extra_params(self): path = '/foo' token = 'token' extra_params = {'key': 'val'} - iterator = self._makeOne(client, path, None, + iterator = self._make_one(client, path, None, extra_params=extra_params) iterator.next_page_token = token @@ -431,7 +431,7 @@ def test__get_next_page_response_new_no_token_in_response(self): connection = _Connection({'items': [{'name': key1}, {'name': key2}], 'nextPageToken': token}) client = _Client(connection) - iterator = self._makeOne(client, path, None) + iterator = self._make_one(client, path, None) response = iterator._get_next_page_response() self.assertEqual(response['items'], [{'name': key1}, {'name': key2}]) kw, = connection._requested @@ -444,7 +444,7 @@ def test__get_next_page_response_with_post(self): returned = {'green': 'eggs', 'ham': 55} connection = _Connection(returned) client = _Client(connection) - iterator = self._makeOne(client, path, None) + iterator = self._make_one(client, path, None) iterator._HTTP_METHOD = 'POST' response = iterator._get_next_page_response() self.assertEqual(response, returned) @@ -460,7 +460,7 @@ def test__get_next_page_response_with_post(self): def test__get_next_page_bad_http_method(self): path = '/foo' client = _Client(None) - iterator = self._makeOne(client, path, None) + iterator = self._make_one(client, path, None) iterator._HTTP_METHOD = 'NOT-A-VERB' with self.assertRaises(ValueError): iterator._get_next_page_response() @@ -473,7 +473,7 @@ def _get_target_class(): from google.cloud.iterator import GAXIterator return GAXIterator - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_constructor(self): @@ -482,7 +482,7 @@ def test_constructor(self): page_iter = SimpleIter(token) item_to_value = object() max_results = 1337 - iterator = self._makeOne(client, page_iter, item_to_value, + iterator = self._make_one(client, page_iter, item_to_value, max_results=max_results) self.assertFalse(iterator._started) @@ -508,7 +508,7 @@ def test__next_page(self): page_token = '2sde98ds2s0hh' page_iter = _GAXPageIterator(page_items, page_token=page_token) # Wrap the GAX iterator. - iterator = self._makeOne(None, page_iter, self._do_nothing) + iterator = self._make_one(None, page_iter, self._do_nothing) page = iterator._next_page() # First check the page token. @@ -525,7 +525,7 @@ def test__next_page_empty(self): # Make a mock ``google.gax.PageIterator`` page_iter = _GAXPageIterator() # Wrap the GAX iterator. - iterator = self._makeOne(None, page_iter, self._do_nothing) + iterator = self._make_one(None, page_iter, self._do_nothing) page = iterator._next_page() self.assertIsNone(page) @@ -545,7 +545,7 @@ def test_iterate(self): page1 = (item1,) page2 = (item2, item3) page_iter = _GAXPageIterator(page1, page2, page_token=token1) - iterator = self._makeOne(None, page_iter, self._do_nothing) + iterator = self._make_one(None, page_iter, self._do_nothing) self.assertEqual(iterator.num_results, 0) diff --git a/packages/google-cloud-core/unit_tests/test_operation.py b/packages/google-cloud-core/unit_tests/test_operation.py index 32e791b3798a..6ce2bac3cfde 100644 --- a/packages/google-cloud-core/unit_tests/test_operation.py +++ b/packages/google-cloud-core/unit_tests/test_operation.py @@ -112,12 +112,12 @@ def _get_target_class(): from google.cloud.operation import Operation return Operation - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): client = _Client() - operation = self._makeOne( + operation = self._make_one( self.OPERATION_NAME, client) self.assertEqual(operation.name, self.OPERATION_NAME) self.assertIs(operation.client, client) @@ -130,7 +130,7 @@ def test_ctor_defaults(self): def test_ctor_explicit(self): client = _Client() - operation = self._makeOne( + operation = self._make_one( self.OPERATION_NAME, client, foo='bar') self.assertEqual(operation.name, self.OPERATION_NAME) @@ -239,7 +239,7 @@ def test_from_dict(self): def test_complete_property(self): client = _Client() - operation = self._makeOne(self.OPERATION_NAME, client) + operation = self._make_one(self.OPERATION_NAME, client) self.assertFalse(operation.complete) operation._complete = True self.assertTrue(operation.complete) @@ -248,7 +248,7 @@ def test_complete_property(self): def test_poll_already_complete(self): client = _Client() - operation = self._makeOne(self.OPERATION_NAME, client) + operation = self._make_one(self.OPERATION_NAME, client) operation._complete = True with self.assertRaises(ValueError): @@ -261,7 +261,7 @@ def test_poll_false(self): client = _Client() stub = client._operations_stub stub._get_operation_response = response_pb - operation = self._makeOne(self.OPERATION_NAME, client) + operation = self._make_one(self.OPERATION_NAME, client) self.assertFalse(operation.poll()) @@ -276,7 +276,7 @@ def test_poll_true(self): client = _Client() stub = client._operations_stub stub._get_operation_response = response_pb - operation = self._makeOne(self.OPERATION_NAME, client) + operation = self._make_one(self.OPERATION_NAME, client) self.assertTrue(operation.poll()) @@ -301,7 +301,7 @@ def test_poll_http(self): } connection = _Connection(api_response) client = _Client(connection) - operation = self._makeOne(name, client) + operation = self._make_one(name, client) operation._from_grpc = False with _Monkey(MUT, _TYPE_URL_MAP={type_url: Struct}): @@ -316,7 +316,7 @@ def test_poll_http(self): def test__update_state_done(self): from google.longrunning import operations_pb2 - operation = self._makeOne(None, None) + operation = self._make_one(None, None) self.assertFalse(operation.complete) operation_pb = operations_pb2.Operation(done=True) operation._update_state(operation_pb) @@ -329,7 +329,7 @@ def test__update_state_metadata(self): from google.cloud._testing import _Monkey from google.cloud import operation as MUT - operation = self._makeOne(None, None) + operation = self._make_one(None, None) self.assertIsNone(operation.metadata) val_pb = Value(number_value=1337) @@ -347,7 +347,7 @@ def test__update_state_error(self): from google.rpc.status_pb2 import Status from google.cloud._testing import _Monkey - operation = self._makeOne(None, None) + operation = self._make_one(None, None) self.assertIsNone(operation.error) self.assertIsNone(operation.response) @@ -365,7 +365,7 @@ def test__update_state_response(self): from google.cloud._testing import _Monkey from google.cloud import operation as MUT - operation = self._makeOne(None, None) + operation = self._make_one(None, None) self.assertIsNone(operation.error) self.assertIsNone(operation.response) @@ -384,7 +384,7 @@ def test__update_state_response(self): def test__update_state_no_result(self): from google.longrunning import operations_pb2 - operation = self._makeOne(None, None) + operation = self._make_one(None, None) self.assertIsNone(operation.error) self.assertIsNone(operation.response) From de737d3a2a72eee5df975d22a5096f74ff361cf1 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 10 Nov 2016 11:06:21 -0800 Subject: [PATCH 088/468] Changing all instances of _callFUT to _call_fut. Done via: $ git grep -l _callFUT | \ > xargs sed -i s/_callFUT/_call_fut/g --- .../unit_tests/streaming/test_http_wrapper.py | 38 ++-- .../unit_tests/streaming/test_util.py | 16 +- .../unit_tests/test__helpers.py | 170 +++++++++--------- .../unit_tests/test_credentials.py | 32 ++-- .../unit_tests/test_exceptions.py | 14 +- .../unit_tests/test_iterator.py | 4 +- .../unit_tests/test_operation.py | 16 +- 7 files changed, 145 insertions(+), 145 deletions(-) diff --git a/packages/google-cloud-core/unit_tests/streaming/test_http_wrapper.py b/packages/google-cloud-core/unit_tests/streaming/test_http_wrapper.py index c0608f5476a0..f05e1b0a6f9f 100644 --- a/packages/google-cloud-core/unit_tests/streaming/test_http_wrapper.py +++ b/packages/google-cloud-core/unit_tests/streaming/test_http_wrapper.py @@ -215,62 +215,62 @@ def test_is_redirect_w_code_w_location(self): class Test__check_response(unittest.TestCase): - def _callFUT(self, *args, **kw): + def _call_fut(self, *args, **kw): from google.cloud.streaming.http_wrapper import _check_response return _check_response(*args, **kw) def test_w_none(self): from google.cloud.streaming.exceptions import RequestError with self.assertRaises(RequestError): - self._callFUT(None) + self._call_fut(None) def test_w_TOO_MANY_REQUESTS(self): from google.cloud.streaming.exceptions import BadStatusCodeError from google.cloud.streaming.http_wrapper import TOO_MANY_REQUESTS with self.assertRaises(BadStatusCodeError): - self._callFUT(_Response(TOO_MANY_REQUESTS)) + self._call_fut(_Response(TOO_MANY_REQUESTS)) def test_w_50x(self): from google.cloud.streaming.exceptions import BadStatusCodeError with self.assertRaises(BadStatusCodeError): - self._callFUT(_Response(500)) + self._call_fut(_Response(500)) with self.assertRaises(BadStatusCodeError): - self._callFUT(_Response(503)) + self._call_fut(_Response(503)) def test_w_retry_after(self): from google.cloud.streaming.exceptions import RetryAfterError with self.assertRaises(RetryAfterError): - self._callFUT(_Response(200, 20)) + self._call_fut(_Response(200, 20)) def test_pass(self): - self._callFUT(_Response(200)) + self._call_fut(_Response(200)) class Test__reset_http_connections(unittest.TestCase): - def _callFUT(self, *args, **kw): + def _call_fut(self, *args, **kw): from google.cloud.streaming.http_wrapper import _reset_http_connections return _reset_http_connections(*args, **kw) def test_wo_connections(self): http = object() - self._callFUT(http) + self._call_fut(http) def test_w_connections(self): connections = {'delete:me': object(), 'skip_me': object()} http = _Dummy(connections=connections) - self._callFUT(http) + self._call_fut(http) self.assertFalse('delete:me' in connections) self.assertTrue('skip_me' in connections) class Test___make_api_request_no_retry(unittest.TestCase): - def _callFUT(self, *args, **kw): + def _call_fut(self, *args, **kw): from google.cloud.streaming.http_wrapper import ( _make_api_request_no_retry) return _make_api_request_no_retry(*args, **kw) @@ -297,7 +297,7 @@ def test_defaults_wo_connections(self): _checked = [] with _Monkey(MUT, httplib2=_httplib2, _check_response=_checked.append): - response = self._callFUT(_http, _request) + response = self._call_fut(_http, _request) self.assertIsInstance(response, MUT.Response) self.assertEqual(response.info, INFO) @@ -319,7 +319,7 @@ def test_w_http_connections_miss(self): _checked = [] with _Monkey(MUT, httplib2=_httplib2, _check_response=_checked.append): - response = self._callFUT(_http, _request) + response = self._call_fut(_http, _request) self.assertIsInstance(response, MUT.Response) self.assertEqual(response.info, INFO) @@ -341,7 +341,7 @@ def test_w_http_connections_hit(self): _checked = [] with _Monkey(MUT, httplib2=_httplib2, _check_response=_checked.append): - response = self._callFUT(_http, _request) + response = self._call_fut(_http, _request) self.assertIsInstance(response, MUT.Response) self.assertEqual(response.info, INFO) @@ -363,13 +363,13 @@ def test_w_request_returning_None(self): _request = _Request() with _Monkey(MUT, httplib2=_httplib2): with self.assertRaises(RequestError): - self._callFUT(_http, _request) + self._call_fut(_http, _request) self._verify_requested(_http, _request, connection_type=CONN_TYPE) class Test_make_api_request(unittest.TestCase): - def _callFUT(self, *args, **kw): + def _call_fut(self, *args, **kw): from google.cloud.streaming.http_wrapper import make_api_request return make_api_request(*args, **kw) @@ -386,7 +386,7 @@ def _wo_exception(*args, **kw): with _Monkey(MUT, _make_api_request_no_retry=_wo_exception, _check_response=_checked.append): - response = self._callFUT(HTTP, REQUEST) + response = self._call_fut(HTTP, REQUEST) self.assertIs(response, RESPONSE) expected_kw = {'redirections': MUT._REDIRECTIONS} @@ -412,7 +412,7 @@ def _wo_exception(*args, **kw): with _Monkey(MUT, _make_api_request_no_retry=_wo_exception, _check_response=_checked.append): - response = self._callFUT(HTTP, REQUEST, retries=5) + response = self._call_fut(HTTP, REQUEST, retries=5) self.assertIs(response, RESPONSE) self.assertEqual(len(_created), 5) @@ -436,7 +436,7 @@ def _wo_exception(*args, **kw): _make_api_request_no_retry=_wo_exception, _check_response=_checked.append): with self.assertRaises(ValueError): - self._callFUT(HTTP, REQUEST, retries=3) + self._call_fut(HTTP, REQUEST, retries=3) self.assertEqual(len(_created), 3) expected_kw = {'redirections': MUT._REDIRECTIONS} diff --git a/packages/google-cloud-core/unit_tests/streaming/test_util.py b/packages/google-cloud-core/unit_tests/streaming/test_util.py index c760a1955610..1ee1c03d073f 100644 --- a/packages/google-cloud-core/unit_tests/streaming/test_util.py +++ b/packages/google-cloud-core/unit_tests/streaming/test_util.py @@ -17,7 +17,7 @@ class Test_calculate_wait_for_retry(unittest.TestCase): - def _callFUT(self, *args, **kw): + def _call_fut(self, *args, **kw): from google.cloud.streaming.util import calculate_wait_for_retry return calculate_wait_for_retry(*args, **kw) @@ -25,38 +25,38 @@ def test_w_negative_jitter_lt_max_wait(self): import random from google.cloud._testing import _Monkey with _Monkey(random, uniform=lambda lower, upper: lower): - self.assertEqual(self._callFUT(1), 1.5) + self.assertEqual(self._call_fut(1), 1.5) def test_w_positive_jitter_gt_max_wait(self): import random from google.cloud._testing import _Monkey with _Monkey(random, uniform=lambda lower, upper: upper): - self.assertEqual(self._callFUT(4), 20) + self.assertEqual(self._call_fut(4), 20) class Test_acceptable_mime_type(unittest.TestCase): - def _callFUT(self, *args, **kw): + def _call_fut(self, *args, **kw): from google.cloud.streaming.util import acceptable_mime_type return acceptable_mime_type(*args, **kw) def test_pattern_wo_slash(self): with self.assertRaises(ValueError) as err: - self._callFUT(['text/*'], 'BOGUS') + self._call_fut(['text/*'], 'BOGUS') self.assertEqual( err.exception.args, ('Invalid MIME type: "BOGUS"',)) def test_accept_pattern_w_semicolon(self): with self.assertRaises(ValueError) as err: - self._callFUT(['text/*;charset=utf-8'], 'text/plain') + self._call_fut(['text/*;charset=utf-8'], 'text/plain') self.assertEqual( err.exception.args, ('MIME patterns with parameter unsupported: ' '"text/*;charset=utf-8"',)) def test_miss(self): - self.assertFalse(self._callFUT(['image/*'], 'text/plain')) + self.assertFalse(self._call_fut(['image/*'], 'text/plain')) def test_hit(self): - self.assertTrue(self._callFUT(['text/*'], 'text/plain')) + self.assertTrue(self._call_fut(['text/*'], 'text/plain')) diff --git a/packages/google-cloud-core/unit_tests/test__helpers.py b/packages/google-cloud-core/unit_tests/test__helpers.py index c6760cfc8e6a..0669193fc657 100644 --- a/packages/google-cloud-core/unit_tests/test__helpers.py +++ b/packages/google-cloud-core/unit_tests/test__helpers.py @@ -101,34 +101,34 @@ def test___str__(self): class Test__ensure_tuple_or_list(unittest.TestCase): - def _callFUT(self, arg_name, tuple_or_list): + def _call_fut(self, arg_name, tuple_or_list): from google.cloud._helpers import _ensure_tuple_or_list return _ensure_tuple_or_list(arg_name, tuple_or_list) def test_valid_tuple(self): valid_tuple_or_list = ('a', 'b', 'c', 'd') - result = self._callFUT('ARGNAME', valid_tuple_or_list) + result = self._call_fut('ARGNAME', valid_tuple_or_list) self.assertEqual(result, ['a', 'b', 'c', 'd']) def test_valid_list(self): valid_tuple_or_list = ['a', 'b', 'c', 'd'] - result = self._callFUT('ARGNAME', valid_tuple_or_list) + result = self._call_fut('ARGNAME', valid_tuple_or_list) self.assertEqual(result, valid_tuple_or_list) def test_invalid(self): invalid_tuple_or_list = object() with self.assertRaises(TypeError): - self._callFUT('ARGNAME', invalid_tuple_or_list) + self._call_fut('ARGNAME', invalid_tuple_or_list) def test_invalid_iterable(self): invalid_tuple_or_list = 'FOO' with self.assertRaises(TypeError): - self._callFUT('ARGNAME', invalid_tuple_or_list) + self._call_fut('ARGNAME', invalid_tuple_or_list) class Test__app_engine_id(unittest.TestCase): - def _callFUT(self): + def _call_fut(self): from google.cloud._helpers import _app_engine_id return _app_engine_id() @@ -137,7 +137,7 @@ def test_no_value(self): from google.cloud import _helpers with _Monkey(_helpers, app_identity=None): - dataset_id = self._callFUT() + dataset_id = self._call_fut() self.assertIsNone(dataset_id) def test_value_set(self): @@ -147,13 +147,13 @@ def test_value_set(self): APP_ENGINE_ID = object() APP_IDENTITY = _AppIdentity(APP_ENGINE_ID) with _Monkey(_helpers, app_identity=APP_IDENTITY): - dataset_id = self._callFUT() + dataset_id = self._call_fut() self.assertEqual(dataset_id, APP_ENGINE_ID) class Test__file_project_id(unittest.TestCase): - def _callFUT(self): + def _call_fut(self): from google.cloud._helpers import _file_project_id return _file_project_id() @@ -170,7 +170,7 @@ def test_success(self): environ = {CREDENTIALS: temp.name} with _Monkey(os, getenv=environ.get): - result = self._callFUT() + result = self._call_fut() self.assertEqual(result, project_id) @@ -179,14 +179,14 @@ def test_no_environment_variable_set(self): environ = {} with _Monkey(os, getenv=environ.get): - result = self._callFUT() + result = self._call_fut() self.assertIsNone(result) class Test__get_nix_config_path(unittest.TestCase): - def _callFUT(self): + def _call_fut(self): from google.cloud._helpers import _get_nix_config_path return _get_nix_config_path() @@ -198,7 +198,7 @@ def test_it(self): config_file = 'b' with _Monkey(MUT, _USER_ROOT=user_root, _GCLOUD_CONFIG_FILE=config_file): - result = self._callFUT() + result = self._call_fut() expected = os.path.join(user_root, '.config', config_file) self.assertEqual(result, expected) @@ -206,7 +206,7 @@ def test_it(self): class Test__get_windows_config_path(unittest.TestCase): - def _callFUT(self): + def _call_fut(self): from google.cloud._helpers import _get_windows_config_path return _get_windows_config_path() @@ -219,7 +219,7 @@ def test_it(self): config_file = 'b' with _Monkey(os, getenv=environ.get): with _Monkey(MUT, _GCLOUD_CONFIG_FILE=config_file): - result = self._callFUT() + result = self._call_fut() expected = os.path.join(appdata_dir, config_file) self.assertEqual(result, expected) @@ -229,7 +229,7 @@ class Test__default_service_project_id(unittest.TestCase): CONFIG_TEMPLATE = '[%s]\n%s = %s\n' - def _callFUT(self): + def _call_fut(self): from google.cloud._helpers import _default_service_project_id return _default_service_project_id() @@ -252,7 +252,7 @@ def mock_get_path(): with _Monkey(os, name='not-nt'): with _Monkey(MUT, _get_nix_config_path=mock_get_path, _USER_ROOT='not-None'): - result = self._callFUT() + result = self._call_fut() self.assertEqual(result, project_id) @@ -272,7 +272,7 @@ def mock_get_path(): with _Monkey(os, name='not-nt'): with _Monkey(MUT, _get_nix_config_path=mock_get_path, _USER_ROOT='not-None'): - result = self._callFUT() + result = self._call_fut() self.assertEqual(result, None) @@ -295,7 +295,7 @@ def mock_get_path(): with _Monkey(os, name='nt'): with _Monkey(MUT, _get_windows_config_path=mock_get_path, _USER_ROOT=None): - result = self._callFUT() + result = self._call_fut() self.assertEqual(result, project_id) @@ -305,14 +305,14 @@ def test_gae(self): with _Monkey(os, name='not-nt'): with _Monkey(MUT, _USER_ROOT=None): - result = self._callFUT() + result = self._call_fut() self.assertIsNone(result) class Test__compute_engine_id(unittest.TestCase): - def _callFUT(self): + def _call_fut(self): from google.cloud._helpers import _compute_engine_id return _compute_engine_id() @@ -330,26 +330,26 @@ def _connection_factory(host, timeout): def test_bad_status(self): connection = _HTTPConnection(404, None) with self._monkeyConnection(connection): - dataset_id = self._callFUT() + dataset_id = self._call_fut() self.assertIsNone(dataset_id) def test_success(self): COMPUTE_ENGINE_ID = object() connection = _HTTPConnection(200, COMPUTE_ENGINE_ID) with self._monkeyConnection(connection): - dataset_id = self._callFUT() + dataset_id = self._call_fut() self.assertEqual(dataset_id, COMPUTE_ENGINE_ID) def test_socket_raises(self): connection = _TimeoutHTTPConnection() with self._monkeyConnection(connection): - dataset_id = self._callFUT() + dataset_id = self._call_fut() self.assertIsNone(dataset_id) class Test__get_production_project(unittest.TestCase): - def _callFUT(self): + def _call_fut(self): from google.cloud._helpers import _get_production_project return _get_production_project() @@ -358,7 +358,7 @@ def test_no_value(self): environ = {} with _Monkey(os, getenv=environ.get): - project = self._callFUT() + project = self._call_fut() self.assertIsNone(project) def test_value_set(self): @@ -368,13 +368,13 @@ def test_value_set(self): MOCK_PROJECT = object() environ = {PROJECT: MOCK_PROJECT} with _Monkey(os, getenv=environ.get): - project = self._callFUT() + project = self._call_fut() self.assertEqual(project, MOCK_PROJECT) class Test__determine_default_project(unittest.TestCase): - def _callFUT(self, project=None): + def _call_fut(self, project=None): from google.cloud._helpers import _determine_default_project return _determine_default_project(project=project) @@ -414,7 +414,7 @@ def gce_mock(): } with _Monkey(_helpers, **patched_methods): - returned_project = self._callFUT(project) + returned_project = self._call_fut(project) return returned_project, _callers @@ -453,7 +453,7 @@ def test_gce(self): class Test__millis(unittest.TestCase): - def _callFUT(self, value): + def _call_fut(self, value): from google.cloud._helpers import _millis return _millis(value) @@ -462,12 +462,12 @@ def test_one_second_from_epoch(self): from google.cloud._helpers import UTC WHEN = datetime.datetime(1970, 1, 1, 0, 0, 1, tzinfo=UTC) - self.assertEqual(self._callFUT(WHEN), 1000) + self.assertEqual(self._call_fut(WHEN), 1000) class Test__microseconds_from_datetime(unittest.TestCase): - def _callFUT(self, value): + def _call_fut(self, value): from google.cloud._helpers import _microseconds_from_datetime return _microseconds_from_datetime(value) @@ -478,18 +478,18 @@ def test_it(self): timestamp = datetime.datetime(1970, 1, 1, hour=0, minute=0, second=0, microsecond=microseconds) - result = self._callFUT(timestamp) + result = self._call_fut(timestamp) self.assertEqual(result, microseconds) class Test__millis_from_datetime(unittest.TestCase): - def _callFUT(self, value): + def _call_fut(self, value): from google.cloud._helpers import _millis_from_datetime return _millis_from_datetime(value) def test_w_none(self): - self.assertIsNone(self._callFUT(None)) + self.assertIsNone(self._call_fut(None)) def test_w_utc_datetime(self): import datetime @@ -500,7 +500,7 @@ def test_w_utc_datetime(self): NOW = datetime.datetime.utcnow().replace(tzinfo=UTC) NOW_MICROS = _microseconds_from_datetime(NOW) MILLIS = NOW_MICROS // 1000 - result = self._callFUT(NOW) + result = self._call_fut(NOW) self.assertIsInstance(result, six.integer_types) self.assertEqual(result, MILLIS) @@ -518,7 +518,7 @@ class CET(_UTC): NOW = datetime.datetime(2015, 7, 28, 16, 34, 47, tzinfo=zone) NOW_MICROS = _microseconds_from_datetime(NOW) MILLIS = NOW_MICROS // 1000 - result = self._callFUT(NOW) + result = self._call_fut(NOW) self.assertIsInstance(result, six.integer_types) self.assertEqual(result, MILLIS) @@ -532,14 +532,14 @@ def test_w_naive_datetime(self): UTC_NOW = NOW.replace(tzinfo=UTC) UTC_NOW_MICROS = _microseconds_from_datetime(UTC_NOW) MILLIS = UTC_NOW_MICROS // 1000 - result = self._callFUT(NOW) + result = self._call_fut(NOW) self.assertIsInstance(result, six.integer_types) self.assertEqual(result, MILLIS) class Test__datetime_from_microseconds(unittest.TestCase): - def _callFUT(self, value): + def _call_fut(self, value): from google.cloud._helpers import _datetime_from_microseconds return _datetime_from_microseconds(value) @@ -551,24 +551,24 @@ def test_it(self): NOW = datetime.datetime(2015, 7, 29, 17, 45, 21, 123456, tzinfo=UTC) NOW_MICROS = _microseconds_from_datetime(NOW) - self.assertEqual(self._callFUT(NOW_MICROS), NOW) + self.assertEqual(self._call_fut(NOW_MICROS), NOW) class Test___date_from_iso8601_date(unittest.TestCase): - def _callFUT(self, value): + def _call_fut(self, value): from google.cloud._helpers import _date_from_iso8601_date return _date_from_iso8601_date(value) def test_todays_date(self): import datetime TODAY = datetime.date.today() - self.assertEqual(self._callFUT(TODAY.strftime("%Y-%m-%d")), TODAY) + self.assertEqual(self._call_fut(TODAY.strftime("%Y-%m-%d")), TODAY) class Test__rfc3339_to_datetime(unittest.TestCase): - def _callFUT(self, dt_str): + def _call_fut(self, dt_str): from google.cloud._helpers import _rfc3339_to_datetime return _rfc3339_to_datetime(dt_str) @@ -584,7 +584,7 @@ def test_w_bogus_zone(self): dt_str = '%d-%02d-%02dT%02d:%02d:%02d.%06dBOGUS' % ( year, month, day, hour, minute, seconds, micros) with self.assertRaises(ValueError): - self._callFUT(dt_str) + self._call_fut(dt_str) def test_w_microseconds(self): import datetime @@ -600,7 +600,7 @@ def test_w_microseconds(self): dt_str = '%d-%02d-%02dT%02d:%02d:%02d.%06dZ' % ( year, month, day, hour, minute, seconds, micros) - result = self._callFUT(dt_str) + result = self._call_fut(dt_str) expected_result = datetime.datetime( year, month, day, hour, minute, seconds, micros, UTC) self.assertEqual(result, expected_result) @@ -617,12 +617,12 @@ def test_w_naonseconds(self): dt_str = '%d-%02d-%02dT%02d:%02d:%02d.%09dZ' % ( year, month, day, hour, minute, seconds, nanos) with self.assertRaises(ValueError): - self._callFUT(dt_str) + self._call_fut(dt_str) class Test__rfc3339_nanos_to_datetime(unittest.TestCase): - def _callFUT(self, dt_str): + def _call_fut(self, dt_str): from google.cloud._helpers import _rfc3339_nanos_to_datetime return _rfc3339_nanos_to_datetime(dt_str) @@ -638,7 +638,7 @@ def test_w_bogus_zone(self): dt_str = '%d-%02d-%02dT%02d:%02d:%02d.%06dBOGUS' % ( year, month, day, hour, minute, seconds, micros) with self.assertRaises(ValueError): - self._callFUT(dt_str) + self._call_fut(dt_str) def test_w_truncated_nanos(self): import datetime @@ -664,7 +664,7 @@ def test_w_truncated_nanos(self): for truncated, micros in truncateds_and_micros: dt_str = '%d-%02d-%02dT%02d:%02d:%02d.%sZ' % ( year, month, day, hour, minute, seconds, truncated) - result = self._callFUT(dt_str) + result = self._call_fut(dt_str) expected_result = datetime.datetime( year, month, day, hour, minute, seconds, micros, UTC) self.assertEqual(result, expected_result) @@ -682,7 +682,7 @@ def test_without_nanos(self): dt_str = '%d-%02d-%02dT%02d:%02d:%02dZ' % ( year, month, day, hour, minute, seconds) - result = self._callFUT(dt_str) + result = self._call_fut(dt_str) expected_result = datetime.datetime( year, month, day, hour, minute, seconds, 0, UTC) self.assertEqual(result, expected_result) @@ -702,7 +702,7 @@ def test_w_naonseconds(self): dt_str = '%d-%02d-%02dT%02d:%02d:%02d.%09dZ' % ( year, month, day, hour, minute, seconds, nanos) - result = self._callFUT(dt_str) + result = self._call_fut(dt_str) expected_result = datetime.datetime( year, month, day, hour, minute, seconds, micros, UTC) self.assertEqual(result, expected_result) @@ -710,7 +710,7 @@ def test_w_naonseconds(self): class Test__datetime_to_rfc3339(unittest.TestCase): - def _callFUT(self, *args, **kwargs): + def _call_fut(self, *args, **kwargs): from google.cloud._helpers import _datetime_to_rfc3339 return _datetime_to_rfc3339(*args, **kwargs) @@ -729,7 +729,7 @@ def test_w_utc_datetime(self): from google.cloud._helpers import UTC TIMESTAMP = datetime.datetime(2016, 4, 5, 13, 30, 0, tzinfo=UTC) - result = self._callFUT(TIMESTAMP, ignore_zone=False) + result = self._call_fut(TIMESTAMP, ignore_zone=False) self.assertEqual(result, '2016-04-05T13:30:00.000000Z') def test_w_non_utc_datetime(self): @@ -737,7 +737,7 @@ def test_w_non_utc_datetime(self): zone = self._make_timezone(offset=datetime.timedelta(hours=-1)) TIMESTAMP = datetime.datetime(2016, 4, 5, 13, 30, 0, tzinfo=zone) - result = self._callFUT(TIMESTAMP, ignore_zone=False) + result = self._call_fut(TIMESTAMP, ignore_zone=False) self.assertEqual(result, '2016-04-05T14:30:00.000000Z') def test_w_non_utc_datetime_and_ignore_zone(self): @@ -745,68 +745,68 @@ def test_w_non_utc_datetime_and_ignore_zone(self): zone = self._make_timezone(offset=datetime.timedelta(hours=-1)) TIMESTAMP = datetime.datetime(2016, 4, 5, 13, 30, 0, tzinfo=zone) - result = self._callFUT(TIMESTAMP) + result = self._call_fut(TIMESTAMP) self.assertEqual(result, '2016-04-05T13:30:00.000000Z') def test_w_naive_datetime(self): import datetime TIMESTAMP = datetime.datetime(2016, 4, 5, 13, 30, 0) - result = self._callFUT(TIMESTAMP) + result = self._call_fut(TIMESTAMP) self.assertEqual(result, '2016-04-05T13:30:00.000000Z') class Test__to_bytes(unittest.TestCase): - def _callFUT(self, *args, **kwargs): + def _call_fut(self, *args, **kwargs): from google.cloud._helpers import _to_bytes return _to_bytes(*args, **kwargs) def test_with_bytes(self): value = b'bytes-val' - self.assertEqual(self._callFUT(value), value) + self.assertEqual(self._call_fut(value), value) def test_with_unicode(self): value = u'string-val' encoded_value = b'string-val' - self.assertEqual(self._callFUT(value), encoded_value) + self.assertEqual(self._call_fut(value), encoded_value) def test_unicode_non_ascii(self): value = u'\u2013' # Long hyphen encoded_value = b'\xe2\x80\x93' - self.assertRaises(UnicodeEncodeError, self._callFUT, value) - self.assertEqual(self._callFUT(value, encoding='utf-8'), + self.assertRaises(UnicodeEncodeError, self._call_fut, value) + self.assertEqual(self._call_fut(value, encoding='utf-8'), encoded_value) def test_with_nonstring_type(self): value = object() - self.assertRaises(TypeError, self._callFUT, value) + self.assertRaises(TypeError, self._call_fut, value) class Test__bytes_to_unicode(unittest.TestCase): - def _callFUT(self, *args, **kwargs): + def _call_fut(self, *args, **kwargs): from google.cloud._helpers import _bytes_to_unicode return _bytes_to_unicode(*args, **kwargs) def test_with_bytes(self): value = b'bytes-val' encoded_value = 'bytes-val' - self.assertEqual(self._callFUT(value), encoded_value) + self.assertEqual(self._call_fut(value), encoded_value) def test_with_unicode(self): value = u'string-val' encoded_value = 'string-val' - self.assertEqual(self._callFUT(value), encoded_value) + self.assertEqual(self._call_fut(value), encoded_value) def test_with_nonstring_type(self): value = object() - self.assertRaises(ValueError, self._callFUT, value) + self.assertRaises(ValueError, self._call_fut, value) class Test__pb_timestamp_to_datetime(unittest.TestCase): - def _callFUT(self, timestamp): + def _call_fut(self, timestamp): from google.cloud._helpers import _pb_timestamp_to_datetime return _pb_timestamp_to_datetime(timestamp) @@ -822,12 +822,12 @@ def test_it(self): # ... so 1 minute and 1 second after is 61 seconds and 1234 # microseconds is 1234000 nanoseconds. timestamp = Timestamp(seconds=61, nanos=1234000) - self.assertEqual(self._callFUT(timestamp), dt_stamp) + self.assertEqual(self._call_fut(timestamp), dt_stamp) class Test__pb_timestamp_to_rfc3339(unittest.TestCase): - def _callFUT(self, timestamp): + def _call_fut(self, timestamp): from google.cloud._helpers import _pb_timestamp_to_rfc3339 return _pb_timestamp_to_rfc3339(timestamp) @@ -838,13 +838,13 @@ def test_it(self): # ... so 1 minute and 1 second after is 61 seconds and 1234 # microseconds is 1234000 nanoseconds. timestamp = Timestamp(seconds=61, nanos=1234000) - self.assertEqual(self._callFUT(timestamp), + self.assertEqual(self._call_fut(timestamp), '1970-01-01T00:01:01.001234Z') class Test__datetime_to_pb_timestamp(unittest.TestCase): - def _callFUT(self, when): + def _call_fut(self, when): from google.cloud._helpers import _datetime_to_pb_timestamp return _datetime_to_pb_timestamp(when) @@ -860,7 +860,7 @@ def test_it(self): # ... so 1 minute and 1 second after is 61 seconds and 1234 # microseconds is 1234000 nanoseconds. timestamp = Timestamp(seconds=61, nanos=1234000) - self.assertEqual(self._callFUT(dt_stamp), timestamp) + self.assertEqual(self._call_fut(dt_stamp), timestamp) class Test__name_from_project_path(unittest.TestCase): @@ -869,39 +869,39 @@ class Test__name_from_project_path(unittest.TestCase): THING_NAME = 'THING_NAME' TEMPLATE = r'projects/(?P\w+)/things/(?P\w+)' - def _callFUT(self, path, project, template): + def _call_fut(self, path, project, template): from google.cloud._helpers import _name_from_project_path return _name_from_project_path(path, project, template) def test_w_invalid_path_length(self): PATH = 'projects/foo' with self.assertRaises(ValueError): - self._callFUT(PATH, None, self.TEMPLATE) + self._call_fut(PATH, None, self.TEMPLATE) def test_w_invalid_path_segments(self): PATH = 'foo/%s/bar/%s' % (self.PROJECT, self.THING_NAME) with self.assertRaises(ValueError): - self._callFUT(PATH, self.PROJECT, self.TEMPLATE) + self._call_fut(PATH, self.PROJECT, self.TEMPLATE) def test_w_mismatched_project(self): PROJECT1 = 'PROJECT1' PROJECT2 = 'PROJECT2' PATH = 'projects/%s/things/%s' % (PROJECT1, self.THING_NAME) with self.assertRaises(ValueError): - self._callFUT(PATH, PROJECT2, self.TEMPLATE) + self._call_fut(PATH, PROJECT2, self.TEMPLATE) def test_w_valid_data_w_compiled_regex(self): import re template = re.compile(self.TEMPLATE) PATH = 'projects/%s/things/%s' % (self.PROJECT, self.THING_NAME) - name = self._callFUT(PATH, self.PROJECT, template) + name = self._call_fut(PATH, self.PROJECT, template) self.assertEqual(name, self.THING_NAME) def test_w_project_passed_as_none(self): PROJECT1 = 'PROJECT1' PATH = 'projects/%s/things/%s' % (PROJECT1, self.THING_NAME) - self._callFUT(PATH, None, self.TEMPLATE) - name = self._callFUT(PATH, None, self.TEMPLATE) + self._call_fut(PATH, None, self.TEMPLATE) + name = self._call_fut(PATH, None, self.TEMPLATE) self.assertEqual(name, self.THING_NAME) @@ -940,7 +940,7 @@ def callback(*args): class Test_make_secure_channel(unittest.TestCase): - def _callFUT(self, *args, **kwargs): + def _call_fut(self, *args, **kwargs): from google.cloud._helpers import make_secure_channel return make_secure_channel(*args, **kwargs) @@ -991,7 +991,7 @@ def mock_plugin(*args): user_agent = 'USER_AGENT' with _Monkey(MUT, grpc=grpc_mod, MetadataPlugin=mock_plugin): - result = self._callFUT(credentials, user_agent, host) + result = self._call_fut(credentials, user_agent, host) self.assertIs(result, CHANNEL) self.assertEqual(plugin_args, [(credentials,)]) @@ -1012,7 +1012,7 @@ def mock_plugin(*args): class Test_make_secure_stub(unittest.TestCase): - def _callFUT(self, *args, **kwargs): + def _call_fut(self, *args, **kwargs): from google.cloud._helpers import make_secure_stub return make_secure_stub(*args, **kwargs) @@ -1037,7 +1037,7 @@ def mock_channel(*args): user_agent = 'you-sir-age-int' host = 'localhost' with _Monkey(MUT, make_secure_channel=mock_channel): - stub = self._callFUT(credentials, user_agent, + stub = self._call_fut(credentials, user_agent, stub_class, host) self.assertIs(stub, result) @@ -1048,7 +1048,7 @@ def mock_channel(*args): class Test_make_insecure_stub(unittest.TestCase): - def _callFUT(self, *args, **kwargs): + def _call_fut(self, *args, **kwargs): from google.cloud._helpers import make_insecure_stub return make_insecure_stub(*args, **kwargs) @@ -1073,7 +1073,7 @@ def mock_stub_class(channel): return mock_result with _Monkey(MUT, grpc=grpc_mod): - result = self._callFUT(mock_stub_class, host, port=port) + result = self._call_fut(mock_stub_class, host, port=port) self.assertIs(result, mock_result) self.assertEqual(stub_inputs, [CHANNEL]) diff --git a/packages/google-cloud-core/unit_tests/test_credentials.py b/packages/google-cloud-core/unit_tests/test_credentials.py index 9fc10dcca4e8..3da65b928cd5 100644 --- a/packages/google-cloud-core/unit_tests/test_credentials.py +++ b/packages/google-cloud-core/unit_tests/test_credentials.py @@ -17,7 +17,7 @@ class Test_get_credentials(unittest.TestCase): - def _callFUT(self): + def _call_fut(self): from google.cloud import credentials return credentials.get_credentials() @@ -27,7 +27,7 @@ def test_it(self): client = _Client() with _Monkey(MUT, client=client): - found = self._callFUT() + found = self._call_fut() self.assertIsInstance(found, _Credentials) self.assertIs(found, client._signed) self.assertTrue(client._get_app_default_called) @@ -35,7 +35,7 @@ def test_it(self): class Test_generate_signed_url(unittest.TestCase): - def _callFUT(self, *args, **kwargs): + def _call_fut(self, *args, **kwargs): from google.cloud.credentials import generate_signed_url return generate_signed_url(*args, **kwargs) @@ -61,7 +61,7 @@ def _get_signed_query_params(*args): } with _Monkey(MUT, _get_signed_query_params=_get_signed_query_params): - url = self._callFUT(CREDENTIALS, RESOURCE, 1000, + url = self._call_fut(CREDENTIALS, RESOURCE, 1000, api_access_endpoint=ENDPOINT, response_type=response_type, response_disposition=response_disposition, @@ -115,7 +115,7 @@ def test_with_google_credentials(self): class Test__get_signed_query_params(unittest.TestCase): - def _callFUT(self, credentials, expiration, string_to_sign): + def _call_fut(self, credentials, expiration, string_to_sign): from google.cloud.credentials import _get_signed_query_params return _get_signed_query_params(credentials, expiration, string_to_sign) @@ -129,7 +129,7 @@ def test_it(self): service_account_email=ACCOUNT_NAME) EXPIRATION = 100 STRING_TO_SIGN = 'dummy_signature' - result = self._callFUT(CREDENTIALS, EXPIRATION, + result = self._call_fut(CREDENTIALS, EXPIRATION, STRING_TO_SIGN) self.assertEqual(result, { @@ -142,7 +142,7 @@ def test_it(self): class Test__get_expiration_seconds(unittest.TestCase): - def _callFUT(self, expiration): + def _call_fut(self, expiration): from google.cloud.credentials import _get_expiration_seconds return _get_expiration_seconds(expiration) @@ -151,11 +151,11 @@ def _utc_seconds(self, when): return int(calendar.timegm(when.timetuple())) def test_w_invalid(self): - self.assertRaises(TypeError, self._callFUT, object()) - self.assertRaises(TypeError, self._callFUT, None) + self.assertRaises(TypeError, self._call_fut, object()) + self.assertRaises(TypeError, self._call_fut, None) def test_w_int(self): - self.assertEqual(self._callFUT(123), 123) + self.assertEqual(self._call_fut(123), 123) def test_w_long(self): try: @@ -163,14 +163,14 @@ def test_w_long(self): except NameError: # pragma: NO COVER Py3K pass else: - self.assertEqual(self._callFUT(long(123)), 123) + self.assertEqual(self._call_fut(long(123)), 123) def test_w_naive_datetime(self): import datetime expiration_no_tz = datetime.datetime(2004, 8, 19, 0, 0, 0, 0) utc_seconds = self._utc_seconds(expiration_no_tz) - self.assertEqual(self._callFUT(expiration_no_tz), utc_seconds) + self.assertEqual(self._call_fut(expiration_no_tz), utc_seconds) def test_w_utc_datetime(self): import datetime @@ -178,7 +178,7 @@ def test_w_utc_datetime(self): expiration_utc = datetime.datetime(2004, 8, 19, 0, 0, 0, 0, UTC) utc_seconds = self._utc_seconds(expiration_utc) - self.assertEqual(self._callFUT(expiration_utc), utc_seconds) + self.assertEqual(self._call_fut(expiration_utc), utc_seconds) def test_w_other_zone_datetime(self): import datetime @@ -192,7 +192,7 @@ class CET(_UTC): expiration_other = datetime.datetime(2004, 8, 19, 0, 0, 0, 0, zone) utc_seconds = self._utc_seconds(expiration_other) cet_seconds = utc_seconds - (60 * 60) # CET one hour earlier than UTC - self.assertEqual(self._callFUT(expiration_other), cet_seconds) + self.assertEqual(self._call_fut(expiration_other), cet_seconds) def test_w_timedelta_seconds(self): import datetime @@ -204,7 +204,7 @@ def test_w_timedelta_seconds(self): expiration_as_delta = datetime.timedelta(seconds=10) with _Monkey(MUT, _NOW=lambda: dummy_utcnow): - result = self._callFUT(expiration_as_delta) + result = self._call_fut(expiration_as_delta) self.assertEqual(result, utc_seconds + 10) @@ -218,7 +218,7 @@ def test_w_timedelta_days(self): expiration_as_delta = datetime.timedelta(days=1) with _Monkey(MUT, _NOW=lambda: dummy_utcnow): - result = self._callFUT(expiration_as_delta) + result = self._call_fut(expiration_as_delta) self.assertEqual(result, utc_seconds + 86400) diff --git a/packages/google-cloud-core/unit_tests/test_exceptions.py b/packages/google-cloud-core/unit_tests/test_exceptions.py index b1c3a29546b8..36cdc3f3e360 100644 --- a/packages/google-cloud-core/unit_tests/test_exceptions.py +++ b/packages/google-cloud-core/unit_tests/test_exceptions.py @@ -49,7 +49,7 @@ def test_ctor_explicit(self): class Test_make_exception(unittest.TestCase): - def _callFUT(self, response, content, error_info=None, use_json=True): + def _call_fut(self, response, content, error_info=None, use_json=True): from google.cloud.exceptions import make_exception return make_exception(response, content, error_info=error_info, use_json=use_json) @@ -58,7 +58,7 @@ def test_hit_w_content_as_str(self): from google.cloud.exceptions import NotFound response = _Response(404) content = b'{"error": {"message": "Not Found"}}' - exception = self._callFUT(response, content) + exception = self._call_fut(response, content) self.assertIsInstance(exception, NotFound) self.assertEqual(exception.message, 'Not Found') self.assertEqual(list(exception.errors), []) @@ -73,7 +73,7 @@ def test_hit_w_content_as_unicode(self): response = _Response(404) content = u'{"error": {"message": "%s" }}' % (error_message,) - exception = self._callFUT(response, content) + exception = self._call_fut(response, content) if six.PY2: self.assertEqual(str(exception), _to_bytes(expected, encoding='utf-8')) @@ -94,7 +94,7 @@ def test_hit_w_content_as_unicode_as_py3(self): with _Monkey(six, PY2=False): response = _Response(404) content = u'{"error": {"message": "%s" }}' % (error_message,) - exception = self._callFUT(response, content) + exception = self._call_fut(response, content) self.assertIsInstance(exception, NotFound) self.assertEqual(exception.message, error_message) @@ -112,7 +112,7 @@ def test_miss_w_content_as_dict(self): } response = _Response(600) content = {"error": {"message": "Unknown Error", "errors": [ERROR]}} - exception = self._callFUT(response, content) + exception = self._call_fut(response, content) self.assertIsInstance(exception, GoogleCloudError) self.assertEqual(exception.message, 'Unknown Error') self.assertEqual(list(exception.errors), [ERROR]) @@ -121,7 +121,7 @@ def test_html_when_json_expected(self): from google.cloud.exceptions import NotFound response = _Response(NotFound.code) content = '404 Not Found' - exception = self._callFUT(response, content, use_json=True) + exception = self._call_fut(response, content, use_json=True) self.assertIsInstance(exception, NotFound) self.assertEqual(exception.message, content) self.assertEqual(list(exception.errors), []) @@ -131,7 +131,7 @@ def test_without_use_json(self): content = u'error-content' response = _Response(TooManyRequests.code) - exception = self._callFUT(response, content, use_json=False) + exception = self._call_fut(response, content, use_json=False) self.assertIsInstance(exception, TooManyRequests) self.assertEqual(exception.message, content) diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index 82e5ff1c9e73..c01157fcac0f 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -17,12 +17,12 @@ class Test__do_nothing_page_start(unittest.TestCase): - def _callFUT(self, iterator, page, response): + def _call_fut(self, iterator, page, response): from google.cloud.iterator import _do_nothing_page_start return _do_nothing_page_start(iterator, page, response) def test_do_nothing(self): - result = self._callFUT(None, None, None) + result = self._call_fut(None, None, None) self.assertIsNone(result) diff --git a/packages/google-cloud-core/unit_tests/test_operation.py b/packages/google-cloud-core/unit_tests/test_operation.py index 6ce2bac3cfde..2f59f4a5729b 100644 --- a/packages/google-cloud-core/unit_tests/test_operation.py +++ b/packages/google-cloud-core/unit_tests/test_operation.py @@ -17,7 +17,7 @@ class Test__compute_type_url(unittest.TestCase): - def _callFUT(self, klass, prefix=None): + def _call_fut(self, klass, prefix=None): from google.cloud.operation import _compute_type_url if prefix is None: return _compute_type_url(klass) @@ -27,7 +27,7 @@ def test_wo_prefix(self): from google.protobuf.struct_pb2 import Struct from google.cloud.operation import _GOOGLE_APIS_PREFIX - type_url = self._callFUT(Struct) + type_url = self._call_fut(Struct) self.assertEqual( type_url, @@ -37,7 +37,7 @@ def test_w_prefix(self): from google.protobuf.struct_pb2 import Struct PREFIX = 'test.google-cloud-python.com' - type_url = self._callFUT(Struct, PREFIX) + type_url = self._call_fut(Struct, PREFIX) self.assertEqual( type_url, @@ -46,7 +46,7 @@ def test_w_prefix(self): class Test_register_type(unittest.TestCase): - def _callFUT(self, klass, type_url=None): + def _call_fut(self, klass, type_url=None): from google.cloud.operation import register_type register_type(klass, type_url=type_url) @@ -59,7 +59,7 @@ def test_explicit(self): type_url_map = {} with _Monkey(MUT, _TYPE_URL_MAP=type_url_map): - self._callFUT(klass, type_url) + self._call_fut(klass, type_url) self.assertEqual(type_url_map, {type_url: klass}) @@ -70,7 +70,7 @@ def test_default(self): type_url_map = {} with _Monkey(MUT, _TYPE_URL_MAP=type_url_map): - self._callFUT(Struct) + self._call_fut(Struct) type_url = MUT._compute_type_url(Struct) self.assertEqual(type_url_map, {type_url: Struct}) @@ -84,7 +84,7 @@ def test_w_same_class(self): type_url_map = {type_url: klass} with _Monkey(MUT, _TYPE_URL_MAP=type_url_map): - self._callFUT(klass, type_url) + self._call_fut(klass, type_url) self.assertEqual(type_url_map, {type_url: klass}) @@ -98,7 +98,7 @@ def test_w_conflict(self): with _Monkey(MUT, _TYPE_URL_MAP=type_url_map): with self.assertRaises(ValueError): - self._callFUT(klass, type_url) + self._call_fut(klass, type_url) self.assertEqual(type_url_map, {type_url: other}) From fb6e5c8e5ec11d8fe80c49f8e2d2b49c992198c6 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 8 Nov 2016 21:02:17 -0800 Subject: [PATCH 089/468] Manually fixing up bad indents / long lines after renames. --- .../unit_tests/streaming/test_transfer.py | 10 +++++----- .../unit_tests/test__helpers.py | 2 +- .../google-cloud-core/unit_tests/test_client.py | 5 +++-- .../unit_tests/test_credentials.py | 10 +++++----- .../unit_tests/test_iterator.py | 16 ++++++++-------- 5 files changed, 22 insertions(+), 21 deletions(-) diff --git a/packages/google-cloud-core/unit_tests/streaming/test_transfer.py b/packages/google-cloud-core/unit_tests/streaming/test_transfer.py index 9f2fb6764cec..d3074f728e24 100644 --- a/packages/google-cloud-core/unit_tests/streaming/test_transfer.py +++ b/packages/google-cloud-core/unit_tests/streaming/test_transfer.py @@ -46,11 +46,11 @@ def test_ctor_explicit(self): CHUNK_SIZE = 1 << 18 NUM_RETRIES = 8 xfer = self._make_one(stream, - close_stream=True, - chunksize=CHUNK_SIZE, - auto_transfer=False, - http=HTTP, - num_retries=NUM_RETRIES) + close_stream=True, + chunksize=CHUNK_SIZE, + auto_transfer=False, + http=HTTP, + num_retries=NUM_RETRIES) self.assertIs(xfer.stream, stream) self.assertTrue(xfer.close_stream) self.assertEqual(xfer.chunksize, CHUNK_SIZE) diff --git a/packages/google-cloud-core/unit_tests/test__helpers.py b/packages/google-cloud-core/unit_tests/test__helpers.py index 0669193fc657..9430caf19967 100644 --- a/packages/google-cloud-core/unit_tests/test__helpers.py +++ b/packages/google-cloud-core/unit_tests/test__helpers.py @@ -1038,7 +1038,7 @@ def mock_channel(*args): host = 'localhost' with _Monkey(MUT, make_secure_channel=mock_channel): stub = self._call_fut(credentials, user_agent, - stub_class, host) + stub_class, host) self.assertIs(stub, result) self.assertEqual(channels, [channel_obj]) diff --git a/packages/google-cloud-core/unit_tests/test_client.py b/packages/google-cloud-core/unit_tests/test_client.py index d7638dff2330..3ae51b9e82cd 100644 --- a/packages/google-cloud-core/unit_tests/test_client.py +++ b/packages/google-cloud-core/unit_tests/test_client.py @@ -180,7 +180,8 @@ def test_ctor_w_invalid_project(self): CREDENTIALS = object() HTTP = object() with self.assertRaises(ValueError): - self._make_one(project=object(), credentials=CREDENTIALS, http=HTTP) + self._make_one(project=object(), credentials=CREDENTIALS, + http=HTTP) def _explicit_ctor_helper(self, project): import six @@ -189,7 +190,7 @@ def _explicit_ctor_helper(self, project): HTTP = object() client_obj = self._make_one(project=project, credentials=CREDENTIALS, - http=HTTP) + http=HTTP) if isinstance(project, six.binary_type): self.assertEqual(client_obj.project, project.decode('utf-8')) diff --git a/packages/google-cloud-core/unit_tests/test_credentials.py b/packages/google-cloud-core/unit_tests/test_credentials.py index 3da65b928cd5..ef583b35d98a 100644 --- a/packages/google-cloud-core/unit_tests/test_credentials.py +++ b/packages/google-cloud-core/unit_tests/test_credentials.py @@ -62,10 +62,10 @@ def _get_signed_query_params(*args): with _Monkey(MUT, _get_signed_query_params=_get_signed_query_params): url = self._call_fut(CREDENTIALS, RESOURCE, 1000, - api_access_endpoint=ENDPOINT, - response_type=response_type, - response_disposition=response_disposition, - generation=generation) + api_access_endpoint=ENDPOINT, + response_type=response_type, + response_disposition=response_disposition, + generation=generation) scheme, netloc, path, qs, frag = urlsplit(url) self.assertEqual(scheme, 'http') @@ -130,7 +130,7 @@ def test_it(self): EXPIRATION = 100 STRING_TO_SIGN = 'dummy_signature' result = self._call_fut(CREDENTIALS, EXPIRATION, - STRING_TO_SIGN) + STRING_TO_SIGN) self.assertEqual(result, { 'GoogleAccessId': ACCOUNT_NAME, diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index c01157fcac0f..b66deae9fdc8 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -74,7 +74,7 @@ def item_to_value(self, item): parent = Parent() page = self._make_one(parent, (10, 11, 12), - Parent.item_to_value) + Parent.item_to_value) page._remaining = 100 self.assertEqual(parent.calls, 0) @@ -107,7 +107,7 @@ def test_constructor(self): token = 'ab13nceor03' max_results = 1337 iterator = self._make_one(client, item_to_value, page_token=token, - max_results=max_results) + max_results=max_results) self.assertFalse(iterator._started) self.assertIs(iterator.client, client) @@ -313,7 +313,7 @@ def item_to_value(iterator, item): # pylint: disable=unused-argument {'items': [{'name': key1}, {'name': key2}]}) client = _Client(connection) iterator = self._make_one(client, path=path, - item_to_value=item_to_value) + item_to_value=item_to_value) self.assertEqual(iterator.num_results, 0) items_iter = iter(iterator) @@ -360,7 +360,7 @@ def test__has_next_page_w_number_w_token(self): def test__has_next_page_w_max_results_not_done(self): iterator = self._make_one(None, None, None, max_results=3, - page_token='definitely-not-none') + page_token='definitely-not-none') iterator.page_number = 1 self.assertLess(iterator.num_results, iterator.max_results) self.assertTrue(iterator._has_next_page()) @@ -394,7 +394,7 @@ def test__get_query_params_w_max_results(self): path = '/foo' max_results = 3 iterator = self._make_one(client, path, None, - max_results=max_results) + max_results=max_results) iterator.num_results = 1 local_max = max_results - iterator.num_results self.assertEqual(iterator._get_query_params(), @@ -406,7 +406,7 @@ def test__get_query_params_extra_params(self): path = '/foo' extra_params = {'key': 'val'} iterator = self._make_one(client, path, None, - extra_params=extra_params) + extra_params=extra_params) self.assertEqual(iterator._get_query_params(), extra_params) def test__get_query_params_w_token_and_extra_params(self): @@ -416,7 +416,7 @@ def test__get_query_params_w_token_and_extra_params(self): token = 'token' extra_params = {'key': 'val'} iterator = self._make_one(client, path, None, - extra_params=extra_params) + extra_params=extra_params) iterator.next_page_token = token expected_query = extra_params.copy() @@ -483,7 +483,7 @@ def test_constructor(self): item_to_value = object() max_results = 1337 iterator = self._make_one(client, page_iter, item_to_value, - max_results=max_results) + max_results=max_results) self.assertFalse(iterator._started) self.assertIs(iterator.client, client) From 78de627e78fdaac34ded9f8a03d5e6dc35771bba Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 7 Nov 2016 17:28:07 -0800 Subject: [PATCH 090/468] Renaming core connection module to _http. --- .../google-cloud-core/google/cloud/{connection.py => _http.py} | 0 .../unit_tests/{test_connection.py => test__http.py} | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename packages/google-cloud-core/google/cloud/{connection.py => _http.py} (100%) rename packages/google-cloud-core/unit_tests/{test_connection.py => test__http.py} (100%) diff --git a/packages/google-cloud-core/google/cloud/connection.py b/packages/google-cloud-core/google/cloud/_http.py similarity index 100% rename from packages/google-cloud-core/google/cloud/connection.py rename to packages/google-cloud-core/google/cloud/_http.py diff --git a/packages/google-cloud-core/unit_tests/test_connection.py b/packages/google-cloud-core/unit_tests/test__http.py similarity index 100% rename from packages/google-cloud-core/unit_tests/test_connection.py rename to packages/google-cloud-core/unit_tests/test__http.py From f078e17bb4183415fdf1712e5ac0eb1a96e3d9b6 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 7 Nov 2016 17:35:16 -0800 Subject: [PATCH 091/468] Making the connection property non-public in datastore. --- .../google-cloud-core/google/cloud/client.py | 4 ++-- .../google/cloud/iterator.py | 4 ++-- .../google/cloud/operation.py | 4 ++-- .../unit_tests/test__http.py | 6 +++-- .../unit_tests/test_client.py | 24 +++++++++---------- .../unit_tests/test_iterator.py | 2 +- .../unit_tests/test_operation.py | 2 +- 7 files changed, 24 insertions(+), 22 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/client.py b/packages/google-cloud-core/google/cloud/client.py index 893cde192910..521fa11e8e2b 100644 --- a/packages/google-cloud-core/google/cloud/client.py +++ b/packages/google-cloud-core/google/cloud/client.py @@ -18,7 +18,7 @@ import six from google.cloud._helpers import _determine_default_project -from google.cloud.connection import Connection +from google.cloud._http import Connection from google.cloud.credentials import get_credentials @@ -120,7 +120,7 @@ class Client(_ClientFactoryMixin): def __init__(self, credentials=None, http=None): if credentials is None and http is None: credentials = get_credentials() - self.connection = self._connection_class( + self._connection = self._connection_class( credentials=credentials, http=http) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index 5f774aa4a846..2d0a93689d32 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -382,12 +382,12 @@ def _get_next_page_response(self): """ params = self._get_query_params() if self._HTTP_METHOD == 'GET': - return self.client.connection.api_request( + return self.client._connection.api_request( method=self._HTTP_METHOD, path=self.path, query_params=params) elif self._HTTP_METHOD == 'POST': - return self.client.connection.api_request( + return self.client._connection.api_request( method=self._HTTP_METHOD, path=self.path, data=params) diff --git a/packages/google-cloud-core/google/cloud/operation.py b/packages/google-cloud-core/google/cloud/operation.py index 388e45dcbb94..8bc848e7facb 100644 --- a/packages/google-cloud-core/google/cloud/operation.py +++ b/packages/google-cloud-core/google/cloud/operation.py @@ -104,7 +104,7 @@ class Operation(object): :type client: :class:`~google.cloud.client.Client` :param client: The client used to poll for the status of the operation. If the operation was created via JSON/HTTP, the client - must own a :class:`~google.cloud.connection.Connection` + must own a :class:`~google.cloud._http.Connection` to send polling requests. If created via protobuf, the client must have a gRPC stub in the ``_operations_stub`` attribute. @@ -218,7 +218,7 @@ def _get_operation_http(self): :returns: The latest status of the current operation. """ path = 'operations/%s' % (self.name,) - api_response = self.client.connection.api_request( + api_response = self.client._connection.api_request( method='GET', path=path) return json_format.ParseDict( api_response, operations_pb2.Operation()) diff --git a/packages/google-cloud-core/unit_tests/test__http.py b/packages/google-cloud-core/unit_tests/test__http.py index e2eaa7e267aa..72d79a707aac 100644 --- a/packages/google-cloud-core/unit_tests/test__http.py +++ b/packages/google-cloud-core/unit_tests/test__http.py @@ -19,7 +19,8 @@ class TestConnection(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.connection import Connection + from google.cloud._http import Connection + return Connection def _make_one(self, *args, **kw): @@ -111,7 +112,8 @@ class TestJSONConnection(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.connection import JSONConnection + from google.cloud._http import JSONConnection + return JSONConnection def _make_one(self, *args, **kw): diff --git a/packages/google-cloud-core/unit_tests/test_client.py b/packages/google-cloud-core/unit_tests/test_client.py index 3ae51b9e82cd..e7fe5c03be12 100644 --- a/packages/google-cloud-core/unit_tests/test_client.py +++ b/packages/google-cloud-core/unit_tests/test_client.py @@ -60,8 +60,8 @@ def mock_get_credentials(): with _Monkey(client, get_credentials=mock_get_credentials): client_obj = self._make_one() - self.assertIsInstance(client_obj.connection, _MockConnection) - self.assertIs(client_obj.connection.credentials, CREDENTIALS) + self.assertIsInstance(client_obj._connection, _MockConnection) + self.assertIs(client_obj._connection.credentials, CREDENTIALS) self.assertEqual(FUNC_CALLS, ['get_credentials']) def test_ctor_explicit(self): @@ -69,9 +69,9 @@ def test_ctor_explicit(self): HTTP = object() client_obj = self._make_one(credentials=CREDENTIALS, http=HTTP) - self.assertIsInstance(client_obj.connection, _MockConnection) - self.assertIs(client_obj.connection.credentials, CREDENTIALS) - self.assertIs(client_obj.connection.http, HTTP) + self.assertIsInstance(client_obj._connection, _MockConnection) + self.assertIs(client_obj._connection.credentials, CREDENTIALS) + self.assertIs(client_obj._connection.http, HTTP) def test_from_service_account_json(self): from google.cloud._testing import _Monkey @@ -83,7 +83,7 @@ def test_from_service_account_json(self): with _Monkey(client, ServiceAccountCredentials=mock_creds): client_obj = KLASS.from_service_account_json(MOCK_FILENAME) - self.assertIs(client_obj.connection.credentials, mock_creds._result) + self.assertIs(client_obj._connection.credentials, mock_creds._result) self.assertEqual(mock_creds.json_called, [MOCK_FILENAME]) def test_from_service_account_json_fail(self): @@ -104,7 +104,7 @@ def test_from_service_account_p12(self): client_obj = KLASS.from_service_account_p12(CLIENT_EMAIL, MOCK_FILENAME) - self.assertIs(client_obj.connection.credentials, mock_creds._result) + self.assertIs(client_obj._connection.credentials, mock_creds._result) self.assertEqual(mock_creds.p12_called, [(CLIENT_EMAIL, MOCK_FILENAME)]) @@ -155,8 +155,8 @@ def mock_get_credentials(): client_obj = self._make_one() self.assertEqual(client_obj.project, PROJECT) - self.assertIsInstance(client_obj.connection, _MockConnection) - self.assertIs(client_obj.connection.credentials, CREDENTIALS) + self.assertIsInstance(client_obj._connection, _MockConnection) + self.assertIs(client_obj._connection.credentials, CREDENTIALS) self.assertEqual( FUNC_CALLS, [(None, '_determine_default_project'), 'get_credentials']) @@ -196,9 +196,9 @@ def _explicit_ctor_helper(self, project): self.assertEqual(client_obj.project, project.decode('utf-8')) else: self.assertEqual(client_obj.project, project) - self.assertIsInstance(client_obj.connection, _MockConnection) - self.assertIs(client_obj.connection.credentials, CREDENTIALS) - self.assertIs(client_obj.connection.http, HTTP) + self.assertIsInstance(client_obj._connection, _MockConnection) + self.assertIs(client_obj._connection.credentials, CREDENTIALS) + self.assertIs(client_obj._connection.http, HTTP) def test_ctor_explicit_bytes(self): PROJECT = b'PROJECT' diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index b66deae9fdc8..7f10ea47f104 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -591,7 +591,7 @@ def api_request(self, **kw): class _Client(object): def __init__(self, connection): - self.connection = connection + self._connection = connection class SimpleIter(object): diff --git a/packages/google-cloud-core/unit_tests/test_operation.py b/packages/google-cloud-core/unit_tests/test_operation.py index 2f59f4a5729b..41c469ba336d 100644 --- a/packages/google-cloud-core/unit_tests/test_operation.py +++ b/packages/google-cloud-core/unit_tests/test_operation.py @@ -419,4 +419,4 @@ class _Client(object): def __init__(self, connection=None): self._operations_stub = _OperationsStub() - self.connection = connection + self._connection = connection From 48ecfd14312bce9aff9084aea99e57c69acb68e9 Mon Sep 17 00:00:00 2001 From: Thomas Schultz Date: Mon, 14 Nov 2016 12:24:45 -0500 Subject: [PATCH 092/468] Pass headers for streaming download. Fixes #2715 --- .../google/cloud/streaming/transfer.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/streaming/transfer.py b/packages/google-cloud-core/google/cloud/streaming/transfer.py index 410aa9430bae..3d6d5b8e6016 100644 --- a/packages/google-cloud-core/google/cloud/streaming/transfer.py +++ b/packages/google-cloud-core/google/cloud/streaming/transfer.py @@ -387,7 +387,7 @@ def initialize_download(self, http_request, http): # Unless the user has requested otherwise, we want to just # go ahead and pump the bytes now. if self.auto_transfer: - self.stream_file(use_chunks=True) + self.stream_file(use_chunks=True, headers=http_request.headers) def _normalize_start_end(self, start, end=None): """Validate / fix up byte range. @@ -487,7 +487,7 @@ def _compute_end_byte(self, start, end=None, use_chunks=True): return end_byte - def _get_chunk(self, start, end): + def _get_chunk(self, start, end, headers=None): """Retrieve a chunk of the file. :type start: int @@ -496,11 +496,14 @@ def _get_chunk(self, start, end): :type end: int :param end: (Optional) end byte of the range. + :type headers: dict + :param headers: (Optional) Headers to be used for the ``Request``. + :rtype: :class:`google.cloud.streaming.http_wrapper.Response` :returns: response from the chunk request. """ self._ensure_initialized() - request = Request(url=self.url) + request = Request(url=self.url, headers=headers) self._set_range_header(request, start, end=end) return make_api_request( self.bytes_http, request, retries=self.num_retries) @@ -589,7 +592,7 @@ def get_range(self, start, end=None, use_chunks=True): raise TransferRetryError( 'Zero bytes unexpectedly returned in download response') - def stream_file(self, use_chunks=True): + def stream_file(self, use_chunks=True, headers=None): """Stream the entire download. Writes retrieved bytes into :attr:`stream`. @@ -598,6 +601,9 @@ def stream_file(self, use_chunks=True): :param use_chunks: If False, ignore :attr:`chunksize` and stream this download in a single request. If True, streams via chunks. + + :type headers: dict + :param headers: (Optional) Headers to be used for the ``Request``. """ self._ensure_initialized() while True: @@ -607,7 +613,8 @@ def stream_file(self, use_chunks=True): else: end_byte = self._compute_end_byte(self.progress, use_chunks=use_chunks) - response = self._get_chunk(self.progress, end_byte) + response = self._get_chunk(self.progress, end_byte, + headers=headers) if self.total_size is None: self._set_total(response.info) response = self._process_response(response) From a691873033553feff1c6cbc3cbdb2b1e3553abc4 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 14 Nov 2016 12:44:19 -0800 Subject: [PATCH 093/468] Upgrading core to version to 0.21.0. As a result, also upgrading the umbrella package and all packages to 0.21.0 (since they all depend on core). --- packages/google-cloud-core/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-core/setup.py b/packages/google-cloud-core/setup.py index b59d4f5d2b3c..5d2f0d1fef7f 100644 --- a/packages/google-cloud-core/setup.py +++ b/packages/google-cloud-core/setup.py @@ -59,7 +59,7 @@ setup( name='google-cloud-core', - version='0.20.0', + version='0.21.0', description='API Client library for Google Cloud: Core Helpers', long_description=README, namespace_packages=[ From 8899fb415a5f41861c3a01acb8c40c503519f3aa Mon Sep 17 00:00:00 2001 From: Thomas Schultz Date: Thu, 20 Oct 2016 12:32:12 -0400 Subject: [PATCH 094/468] Add GatewayTimeout exception to pubsub subscription pull. --- packages/google-cloud-core/google/cloud/_testing.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/packages/google-cloud-core/google/cloud/_testing.py b/packages/google-cloud-core/google/cloud/_testing.py index 49eb35ad50ae..880536f8aa45 100644 --- a/packages/google-cloud-core/google/cloud/_testing.py +++ b/packages/google-cloud-core/google/cloud/_testing.py @@ -77,6 +77,10 @@ def _make_grpc_failed_precondition(self): from grpc import StatusCode return self._make_grpc_error(StatusCode.FAILED_PRECONDITION) + def _make_grpc_deadline_exceeded(self): + from grpc import StatusCode + return self._make_grpc_error(StatusCode.DEADLINE_EXCEEDED) + class _GAXPageIterator(object): From 2c92af11a981aed5d35eaebc152fbb9ab2b0cf72 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Fri, 2 Dec 2016 15:02:25 -0800 Subject: [PATCH 095/468] Switch from oauth2client to google-auth (#2726) * Removes all use of oauth2client from every package and tests. * Updates core to use google-auth's default credentials, project ID, and scoping logic. * Updates bigtable to use google-auth's scoping logic. --- .../google/cloud/_helpers.py | 203 +------- .../google-cloud-core/google/cloud/_http.py | 39 +- .../google-cloud-core/google/cloud/client.py | 43 +- .../google/cloud/credentials.py | 75 +-- .../google/cloud/environment_vars.py | 6 - .../google/cloud/streaming/http_wrapper.py | 2 +- packages/google-cloud-core/setup.py | 3 +- packages/google-cloud-core/tox.ini | 127 +---- .../unit_tests/test__helpers.py | 449 +----------------- .../unit_tests/test__http.py | 95 +--- .../unit_tests/test_client.py | 65 +-- .../unit_tests/test_credentials.py | 70 +-- 12 files changed, 133 insertions(+), 1044 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/_helpers.py b/packages/google-cloud-core/google/cloud/_helpers.py index 33d9161cbf58..9b4ec5736cb0 100644 --- a/packages/google-cloud-core/google/cloud/_helpers.py +++ b/packages/google-cloud-core/google/cloud/_helpers.py @@ -22,27 +22,25 @@ import calendar import datetime -import json import os import re -import socket from threading import local as Local +import google.auth from google.protobuf import timestamp_pb2 -try: - from google.appengine.api import app_identity -except ImportError: - app_identity = None +import google_auth_httplib2 + try: import grpc -except ImportError: # pragma: NO COVER + from google.auth.transport.grpc import ( + AuthMetadataPlugin) # pragma: NO COVER +except ImportError: grpc = None + AuthMetadataPlugin = None + +import httplib2 import six from six.moves import http_client -from six.moves import configparser - -from google.cloud.environment_vars import PROJECT -from google.cloud.environment_vars import CREDENTIALS _NOW = datetime.datetime.utcnow # To be replaced by tests. @@ -168,139 +166,11 @@ def _ensure_tuple_or_list(arg_name, tuple_or_list): return list(tuple_or_list) -def _app_engine_id(): - """Gets the App Engine application ID if it can be inferred. - - :rtype: str or ``NoneType`` - :returns: App Engine application ID if running in App Engine, - else ``None``. - """ - if app_identity is None: - return None - - return app_identity.get_application_id() - - -def _file_project_id(): - """Gets the project ID from the credentials file if one is available. - - :rtype: str or ``NoneType`` - :returns: Project ID from JSON credentials file if value exists, - else ``None``. - """ - credentials_file_path = os.getenv(CREDENTIALS) - if credentials_file_path: - with open(credentials_file_path, 'rb') as credentials_file: - credentials_json = credentials_file.read() - credentials = json.loads(credentials_json.decode('utf-8')) - return credentials.get('project_id') - - -def _get_nix_config_path(): - """Get the ``gcloud`` CLI config path on *nix systems. - - :rtype: str - :returns: The filename on a *nix system containing the CLI - config file. - """ - return os.path.join(_USER_ROOT, '.config', _GCLOUD_CONFIG_FILE) - - -def _get_windows_config_path(): - """Get the ``gcloud`` CLI config path on Windows systems. - - :rtype: str - :returns: The filename on a Windows system containing the CLI - config file. - """ - appdata_dir = os.getenv('APPDATA', '') - return os.path.join(appdata_dir, _GCLOUD_CONFIG_FILE) - - -def _default_service_project_id(): - """Retrieves the project ID from the gcloud command line tool. - - This assumes the ``.config`` directory is stored - - in ~/.config on *nix systems - - in the %APPDATA% directory on Windows systems - - Additionally, the ${HOME} / "~" directory may not be present on Google - App Engine, so this may be conditionally ignored. - - Files that cannot be opened with configparser are silently ignored; this is - designed so that you can specify a list of potential configuration file - locations. - - :rtype: str or ``NoneType`` - :returns: Project-ID from default configuration file else ``None`` - """ - search_paths = [] - if _USER_ROOT is not None: - search_paths.append(_get_nix_config_path()) - - if os.name == 'nt': - search_paths.append(_get_windows_config_path()) - - config = configparser.RawConfigParser() - config.read(search_paths) - - if config.has_section(_GCLOUD_CONFIG_SECTION): - try: - return config.get(_GCLOUD_CONFIG_SECTION, _GCLOUD_CONFIG_KEY) - except configparser.NoOptionError: - return None - - -def _compute_engine_id(): - """Gets the Compute Engine project ID if it can be inferred. - - Uses 169.254.169.254 for the metadata server to avoid request - latency from DNS lookup. - - See https://cloud.google.com/compute/docs/metadata#metadataserver - for information about this IP address. (This IP is also used for - Amazon EC2 instances, so the metadata flavor is crucial.) - - See https://github.com/google/oauth2client/issues/93 for context about - DNS latency. - - :rtype: str or ``NoneType`` - :returns: Compute Engine project ID if the metadata service is available, - else ``None``. - """ - host = '169.254.169.254' - uri_path = '/computeMetadata/v1/project/project-id' - headers = {'Metadata-Flavor': 'Google'} - connection = http_client.HTTPConnection(host, timeout=0.1) - - try: - connection.request('GET', uri_path, headers=headers) - response = connection.getresponse() - if response.status == 200: - return response.read() - except socket.error: # socket.timeout or socket.error(64, 'Host is down') - pass - finally: - connection.close() - - -def _get_production_project(): - """Gets the production project if it can be inferred.""" - return os.getenv(PROJECT) - - def _determine_default_project(project=None): """Determine default project ID explicitly or implicitly as fall-back. - In implicit case, supports three environments. In order of precedence, the - implicit environments are: - - * GOOGLE_CLOUD_PROJECT environment variable - * GOOGLE_APPLICATION_CREDENTIALS JSON file - * Get default service project from - ``$ gcloud beta auth application-default login`` - * Google App Engine application ID - * Google Compute Engine project ID (from metadata server) + See :func:`google.auth.default` for details on how the default project + is determined. :type project: str :param project: Optional. The project name to use as default. @@ -309,20 +179,7 @@ def _determine_default_project(project=None): :returns: Default project if it can be determined. """ if project is None: - project = _get_production_project() - - if project is None: - project = _file_project_id() - - if project is None: - project = _default_service_project_id() - - if project is None: - project = _app_engine_id() - - if project is None: - project = _compute_engine_id() - + _, project = google.auth.default() return project @@ -597,40 +454,12 @@ def _name_from_project_path(path, project, template): return match.group('name') -class MetadataPlugin(object): - """Callable class to transform metadata for gRPC requests. - - :type credentials: :class:`oauth2client.client.OAuth2Credentials` - :param credentials: The OAuth2 Credentials to use for creating - access tokens. - """ - - def __init__(self, credentials): - self._credentials = credentials - - def __call__(self, unused_context, callback): - """Adds authorization header to request metadata. - - :type unused_context: object - :param unused_context: A gRPC context which is not needed - to modify headers. - - :type callback: callable - :param callback: A callback which will use the headers. - """ - access_token = self._credentials.get_access_token().access_token - headers = [ - ('authorization', 'Bearer ' + access_token), - ] - callback(headers, None) - - def make_secure_channel(credentials, user_agent, host): """Makes a secure channel for an RPC service. Uses / depends on gRPC. - :type credentials: :class:`oauth2client.client.OAuth2Credentials` + :type credentials: :class:`google.auth.credentials.Credentials` :param credentials: The OAuth2 Credentials to use for creating access tokens. @@ -646,7 +475,9 @@ def make_secure_channel(credentials, user_agent, host): # ssl_channel_credentials() loads root certificates from # `grpc/_adapter/credentials/roots.pem`. transport_creds = grpc.ssl_channel_credentials() - custom_metadata_plugin = MetadataPlugin(credentials) + http = httplib2.Http() + custom_metadata_plugin = AuthMetadataPlugin( + credentials, google_auth_httplib2.Request(http=http)) auth_creds = grpc.metadata_call_credentials( custom_metadata_plugin, name='google_creds') channel_creds = grpc.composite_channel_credentials( @@ -664,7 +495,7 @@ def make_secure_stub(credentials, user_agent, stub_class, host): Uses / depends on gRPC. - :type credentials: :class:`oauth2client.client.OAuth2Credentials` + :type credentials: :class:`google.auth.credentials.Credentials` :param credentials: The OAuth2 Credentials to use for creating access tokens. diff --git a/packages/google-cloud-core/google/cloud/_http.py b/packages/google-cloud-core/google/cloud/_http.py index 03e11449c558..c68958e356a6 100644 --- a/packages/google-cloud-core/google/cloud/_http.py +++ b/packages/google-cloud-core/google/cloud/_http.py @@ -19,6 +19,8 @@ import six from six.moves.urllib.parse import urlencode +import google.auth.credentials +import google_auth_httplib2 import httplib2 from google.cloud.exceptions import make_exception @@ -59,9 +61,9 @@ class Connection(object): object will also need to be able to add a bearer token to API requests and handle token refresh on 401 errors. - :type credentials: :class:`oauth2client.client.OAuth2Credentials` or + :type credentials: :class:`google.auth.credentials.Credentials` or :class:`NoneType` - :param credentials: The OAuth2 Credentials to use for this connection. + :param credentials: The credentials to use for this connection. :type http: :class:`httplib2.Http` or class that defines ``request()``. :param http: An optional HTTP object to make requests. @@ -77,14 +79,14 @@ class Connection(object): def __init__(self, credentials=None, http=None): self._http = http - self._credentials = self._create_scoped_credentials( + self._credentials = google.auth.credentials.with_scopes_if_required( credentials, self.SCOPE) @property def credentials(self): """Getter for current credentials. - :rtype: :class:`oauth2client.client.OAuth2Credentials` or + :rtype: :class:`google.auth.credentials.Credentials` or :class:`NoneType` :returns: The credentials object associated with this connection. """ @@ -98,34 +100,13 @@ def http(self): :returns: A Http object used to transport data. """ if self._http is None: - self._http = httplib2.Http() if self._credentials: - self._http = self._credentials.authorize(self._http) + self._http = google_auth_httplib2.AuthorizedHttp( + self._credentials) + else: + self._http = httplib2.Http() return self._http - @staticmethod - def _create_scoped_credentials(credentials, scope): - """Create a scoped set of credentials if it is required. - - :type credentials: :class:`oauth2client.client.OAuth2Credentials` or - :class:`NoneType` - :param credentials: The OAuth2 Credentials to add a scope to. - - :type scope: list of URLs - :param scope: the effective service auth scopes for the connection. - - :rtype: :class:`oauth2client.client.OAuth2Credentials` or - :class:`NoneType` - :returns: A new credentials object that has a scope added (if needed). - """ - if credentials: - try: - if credentials.create_scoped_required(): - credentials = credentials.create_scoped(scope) - except AttributeError: - pass - return credentials - class JSONConnection(Connection): """A connection to a Google JSON-based API. diff --git a/packages/google-cloud-core/google/cloud/client.py b/packages/google-cloud-core/google/cloud/client.py index 521fa11e8e2b..a63c614888ef 100644 --- a/packages/google-cloud-core/google/cloud/client.py +++ b/packages/google-cloud-core/google/cloud/client.py @@ -14,7 +14,7 @@ """Base classes for client used to interact with Google Cloud APIs.""" -from oauth2client.service_account import ServiceAccountCredentials +from google.oauth2 import service_account import six from google.cloud._helpers import _determine_default_project @@ -55,46 +55,11 @@ def from_service_account_json(cls, json_credentials_path, *args, **kwargs): """ if 'credentials' in kwargs: raise TypeError('credentials must not be in keyword arguments') - credentials = ServiceAccountCredentials.from_json_keyfile_name( + credentials = service_account.Credentials.from_service_account_file( json_credentials_path) kwargs['credentials'] = credentials return cls(*args, **kwargs) - @classmethod - def from_service_account_p12(cls, client_email, private_key_path, - *args, **kwargs): - """Factory to retrieve P12 credentials while creating client. - - .. note:: - Unless you have an explicit reason to use a PKCS12 key for your - service account, we recommend using a JSON key. - - :type client_email: str - :param client_email: The e-mail attached to the service account. - - :type private_key_path: str - :param private_key_path: The path to a private key file (this file was - given to you when you created the service - account). This file must be in P12 format. - - :type args: tuple - :param args: Remaining positional arguments to pass to constructor. - - :type kwargs: dict - :param kwargs: Remaining keyword arguments to pass to constructor. - - :rtype: :class:`google.cloud.client.Client` - :returns: The client created with the retrieved P12 credentials. - :raises: :class:`TypeError` if there is a conflict with the kwargs - and the credentials created by the factory. - """ - if 'credentials' in kwargs: - raise TypeError('credentials must not be in keyword arguments') - credentials = ServiceAccountCredentials.from_p12_keyfile( - client_email, private_key_path) - kwargs['credentials'] = credentials - return cls(*args, **kwargs) - class Client(_ClientFactoryMixin): """Client to bundle configuration needed for API requests. @@ -102,7 +67,7 @@ class Client(_ClientFactoryMixin): Assumes that the associated ``_connection_class`` only accepts ``http`` and ``credentials`` in its constructor. - :type credentials: :class:`oauth2client.client.OAuth2Credentials` or + :type credentials: :class:`google.auth.credentials.Credentials` or :class:`NoneType` :param credentials: The OAuth2 Credentials to use for the connection owned by this client. If not passed (and if no ``http`` @@ -165,7 +130,7 @@ class JSONClient(Client, _ClientProjectMixin): passed falls back to the default inferred from the environment. - :type credentials: :class:`oauth2client.client.OAuth2Credentials` or + :type credentials: :class:`google.auth.credentials.Credentials` or :class:`NoneType` :param credentials: The OAuth2 Credentials to use for the connection owned by this client. If not passed (and if no ``http`` diff --git a/packages/google-cloud-core/google/cloud/credentials.py b/packages/google-cloud-core/google/cloud/credentials.py index 8b1bc8d14bf3..52cba9b22fcc 100644 --- a/packages/google-cloud-core/google/cloud/credentials.py +++ b/packages/google-cloud-core/google/cloud/credentials.py @@ -19,7 +19,8 @@ import six from six.moves.urllib.parse import urlencode -from oauth2client import client +import google.auth +import google.auth.credentials from google.cloud._helpers import UTC from google.cloud._helpers import _NOW @@ -29,68 +30,20 @@ def get_credentials(): """Gets credentials implicitly from the current environment. - .. note:: + Uses :func:`google.auth.default()`. - You should not need to use this function directly. Instead, use a - helper method which uses this method under the hood. - - Checks environment in order of precedence: - - * Google App Engine (production and testing) - * Environment variable :envvar:`GOOGLE_APPLICATION_CREDENTIALS` pointing to - a file with stored credentials information. - * Stored "well known" file associated with ``gcloud`` command line tool. - * Google Compute Engine production environment. - - The file referred to in :envvar:`GOOGLE_APPLICATION_CREDENTIALS` is - expected to contain information about credentials that are ready to use. - This means either service account information or user account information - with a ready-to-use refresh token: - - .. code:: json - - { - 'type': 'authorized_user', - 'client_id': '...', - 'client_secret': '...', - 'refresh_token': '...' - } - - or - - .. code:: json - - { - 'type': 'service_account', - 'project_id': '...', - 'private_key_id': '...', - 'private_key': '...', - 'client_email': '...', - 'client_id': '...', - 'auth_uri': '...', - 'token_uri': '...', - 'auth_provider_x509_cert_url': '...', - 'client_x509_cert_url': '...' - } - - The second of these is simply a JSON key downloaded from the Google APIs - console. The first is a close cousin of the "client secrets" JSON file - used by :mod:`oauth2client.clientsecrets` but differs in formatting. - - :rtype: :class:`oauth2client.client.GoogleCredentials`, - :class:`oauth2client.contrib.appengine.AppAssertionCredentials`, - :class:`oauth2client.contrib.gce.AppAssertionCredentials`, - :class:`oauth2client.service_account.ServiceAccountCredentials` + :rtype: :class:`google.auth.credentials.Credentials`, :returns: A new credentials instance corresponding to the implicit environment. """ - return client.GoogleCredentials.get_application_default() + credentials, _ = google.auth.default() + return credentials def _get_signed_query_params(credentials, expiration, string_to_sign): """Gets query parameters for creating a signed URL. - :type credentials: :class:`oauth2client.client.AssertionCredentials` + :type credentials: :class:`google.auth.credentials.Signer` :param credentials: The credentials used to create a private key for signing text. @@ -106,7 +59,7 @@ def _get_signed_query_params(credentials, expiration, string_to_sign): :returns: Query parameters matching the signing credentials with a signed payload. """ - if not hasattr(credentials, 'sign_blob'): + if not isinstance(credentials, google.auth.credentials.Signing): auth_uri = ('http://google-cloud-python.readthedocs.io/en/latest/' 'google-cloud-auth.html#setting-up-a-service-account') raise AttributeError('you need a private key to sign credentials.' @@ -114,9 +67,9 @@ def _get_signed_query_params(credentials, expiration, string_to_sign): 'just contains a token. see %s for more ' 'details.' % (type(credentials), auth_uri)) - _, signature_bytes = credentials.sign_blob(string_to_sign) + signature_bytes = credentials.sign_bytes(string_to_sign) signature = base64.b64encode(signature_bytes) - service_account_name = credentials.service_account_email + service_account_name = credentials.signer_email return { 'GoogleAccessId': service_account_name, 'Expires': str(expiration), @@ -160,10 +113,8 @@ def generate_signed_url(credentials, resource, expiration, .. note:: - Assumes ``credentials`` implements a ``sign_blob()`` method that takes - bytes to sign and returns a pair of the key ID (unused here) and the - signed bytes (this is abstract in the base class - :class:`oauth2client.client.AssertionCredentials`). Also assumes + Assumes ``credentials`` implements the + :class:`google.auth.credentials.Signing` interface. Also assumes ``credentials`` has a ``service_account_email`` property which identifies the credentials. @@ -180,7 +131,7 @@ def generate_signed_url(credentials, resource, expiration, google-cloud-python/issues/922 .. _reference: https://cloud.google.com/storage/docs/reference-headers - :type credentials: :class:`oauth2client.appengine.AppAssertionCredentials` + :type credentials: :class:`google.auth.credentials.Signing` :param credentials: Credentials object with an associated private key to sign text. diff --git a/packages/google-cloud-core/google/cloud/environment_vars.py b/packages/google-cloud-core/google/cloud/environment_vars.py index d27eca8742d0..15e8ee1ce109 100644 --- a/packages/google-cloud-core/google/cloud/environment_vars.py +++ b/packages/google-cloud-core/google/cloud/environment_vars.py @@ -18,9 +18,6 @@ and tests. """ -PROJECT = 'GOOGLE_CLOUD_PROJECT' -"""Environment variable defining default project.""" - GCD_DATASET = 'DATASTORE_DATASET' """Environment variable defining default dataset ID under GCD.""" @@ -33,9 +30,6 @@ BIGTABLE_EMULATOR = 'BIGTABLE_EMULATOR_HOST' """Environment variable defining host for Bigtable emulator.""" -CREDENTIALS = 'GOOGLE_APPLICATION_CREDENTIALS' -"""Environment variable defining location of Google credentials.""" - DISABLE_GRPC = 'GOOGLE_CLOUD_DISABLE_GRPC' """Environment variable acting as flag to disable gRPC. diff --git a/packages/google-cloud-core/google/cloud/streaming/http_wrapper.py b/packages/google-cloud-core/google/cloud/streaming/http_wrapper.py index 3f8d8355645d..e80e105175e7 100644 --- a/packages/google-cloud-core/google/cloud/streaming/http_wrapper.py +++ b/packages/google-cloud-core/google/cloud/streaming/http_wrapper.py @@ -90,7 +90,7 @@ def _httplib2_debug_level(http_request, level, http=None): old_level = httplib2.debuglevel http_levels = {} httplib2.debuglevel = level - if http is not None: + if http is not None and getattr(http, 'connections', None) is not None: for connection_key, connection in http.connections.items(): # httplib2 stores two kinds of values in this dict, connection # classes and instances. Since the connection types are all diff --git a/packages/google-cloud-core/setup.py b/packages/google-cloud-core/setup.py index 5d2f0d1fef7f..8e7085d27904 100644 --- a/packages/google-cloud-core/setup.py +++ b/packages/google-cloud-core/setup.py @@ -52,8 +52,9 @@ REQUIREMENTS = [ 'httplib2 >= 0.9.1', 'googleapis-common-protos >= 1.3.4', - 'oauth2client >= 3.0.0, < 4.0.0dev', 'protobuf >= 3.0.0', + 'google-auth >= 0.4.0, < 2.0.0dev', + 'google-auth-httplib2', 'six', ] diff --git a/packages/google-cloud-core/tox.ini b/packages/google-cloud-core/tox.ini index 1e2eba979c5f..7c5ef9d29dde 100644 --- a/packages/google-cloud-core/tox.ini +++ b/packages/google-cloud-core/tox.ini @@ -1,10 +1,10 @@ [tox] envlist = - py27,py34,py35,cover,docs,lint + py27,py34,py35,cover [testing] deps = - {toxinidir}/core + mock pytest covercmd = py.test --quiet \ @@ -12,13 +12,6 @@ covercmd = --cov=unit_tests \ --cov-config {toxinidir}/.coveragerc \ unit_tests - py.test --quiet \ - --cov=google.cloud \ - --cov=unit_tests \ - --cov-append \ - --cov-config {toxinidir}/.coveragerc \ - core/unit_tests - coverage report --show-missing --fail-under=100 [testenv] commands = @@ -26,13 +19,6 @@ commands = deps = {[testing]deps} -[testenv:py27-pandas] -basepython = - python2.7 -deps = - {[testenv]deps} - pandas - [testenv:cover] basepython = python2.7 @@ -42,112 +28,3 @@ deps = {[testenv]deps} coverage pytest-cov - -[testenv:coveralls] -basepython = {[testenv:cover]basepython} -commands = - {[testing]covercmd} - coveralls -ignore_errors = True -deps = - {[testenv:cover]deps} - coveralls -passenv = {[testenv:system-tests]passenv} - -[testenv:json-docs] -basepython = - python2.7 -commands = - python -c \ - "import shutil; shutil.rmtree('docs/_build/json', ignore_errors=True)" - {toxinidir}/scripts/update_json_docs.sh -deps = - parinx - pdoc - Sphinx -passenv = - TRAVIS_TAG - TRAVIS_BRANCH - TRAVIS_PULL_REQUEST - GH_OWNER - GH_OAUTH_TOKEN - GH_PROJECT_NAME - -[testenv:docs] -basepython = - python2.7 -commands = - python -c \ - "import shutil; shutil.rmtree('docs/_build', ignore_errors=True)" - sphinx-build -W -b html -d docs/_build/doctrees docs docs/_build/html - python {toxinidir}/scripts/verify_included_modules.py --build-root _build -deps = - {[testenv]deps} - Sphinx - sphinx_rtd_theme -passenv = {[testenv:system-tests]passenv} SPHINX_RELEASE READTHEDOCS - -[pep8] -exclude = - docs/conf.py, - google/cloud/bigtable/_generated*/*, - google/cloud/datastore/_generated/* -verbose = 1 - -[testenv:lint] -basepython = - python2.7 -commands = - python {toxinidir}/scripts/pycodestyle_on_repo.py - python {toxinidir}/scripts/run_pylint.py -deps = - {[testenv]deps} - pycodestyle - pylint >= 1.6.4 -passenv = {[testenv:system-tests]passenv} - -[testenv:system-tests] -basepython = - python2.7 -commands = - python {toxinidir}/system_tests/attempt_system_tests.py {posargs} -passenv = GOOGLE_* GOOGLE_CLOUD_* TRAVIS* encrypted_* - -[testenv:system-tests3] -basepython = - python3.4 -commands = - python {toxinidir}/system_tests/attempt_system_tests.py {posargs} -passenv = {[testenv:system-tests]passenv} - -[emulator] -deps = - {[testenv]deps} - psutil -setenv = - GOOGLE_CLOUD_NO_PRINT=true -passenv = - GOOGLE_CLOUD_DISABLE_GRPC -emulatorcmd = - python {toxinidir}/system_tests/run_emulator.py - -[testenv:datastore-emulator] -commands = - {[emulator]emulatorcmd} --package=datastore -setenv = {[emulator]setenv} -passenv = {[emulator]passenv} -deps = {[emulator]deps} - -[testenv:pubsub-emulator] -commands = - {[emulator]emulatorcmd} --package=pubsub -setenv = {[emulator]setenv} -passenv = {[emulator]passenv} -deps = {[emulator]deps} - -[testenv:bigtable-emulator] -commands = - {[emulator]emulatorcmd} --package=bigtable -setenv = {[emulator]setenv} -passenv = {[emulator]passenv} -deps = {[emulator]deps} diff --git a/packages/google-cloud-core/unit_tests/test__helpers.py b/packages/google-cloud-core/unit_tests/test__helpers.py index 9430caf19967..78391e56ef42 100644 --- a/packages/google-cloud-core/unit_tests/test__helpers.py +++ b/packages/google-cloud-core/unit_tests/test__helpers.py @@ -12,9 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -import os import unittest +import mock + class Test__LocalStack(unittest.TestCase): @@ -126,329 +127,27 @@ def test_invalid_iterable(self): self._call_fut('ARGNAME', invalid_tuple_or_list) -class Test__app_engine_id(unittest.TestCase): - - def _call_fut(self): - from google.cloud._helpers import _app_engine_id - return _app_engine_id() - - def test_no_value(self): - from google.cloud._testing import _Monkey - from google.cloud import _helpers - - with _Monkey(_helpers, app_identity=None): - dataset_id = self._call_fut() - self.assertIsNone(dataset_id) - - def test_value_set(self): - from google.cloud._testing import _Monkey - from google.cloud import _helpers - - APP_ENGINE_ID = object() - APP_IDENTITY = _AppIdentity(APP_ENGINE_ID) - with _Monkey(_helpers, app_identity=APP_IDENTITY): - dataset_id = self._call_fut() - self.assertEqual(dataset_id, APP_ENGINE_ID) - - -class Test__file_project_id(unittest.TestCase): - - def _call_fut(self): - from google.cloud._helpers import _file_project_id - return _file_project_id() - - def test_success(self): - from google.cloud.environment_vars import CREDENTIALS - from google.cloud._testing import _Monkey - from google.cloud._testing import _NamedTemporaryFile - - project_id = 'test-project-id' - payload = '{"%s":"%s"}' % ('project_id', project_id) - with _NamedTemporaryFile() as temp: - with open(temp.name, 'w') as creds_file: - creds_file.write(payload) - - environ = {CREDENTIALS: temp.name} - with _Monkey(os, getenv=environ.get): - result = self._call_fut() - - self.assertEqual(result, project_id) - - def test_no_environment_variable_set(self): - from google.cloud._testing import _Monkey - - environ = {} - with _Monkey(os, getenv=environ.get): - result = self._call_fut() - - self.assertIsNone(result) - - -class Test__get_nix_config_path(unittest.TestCase): - - def _call_fut(self): - from google.cloud._helpers import _get_nix_config_path - return _get_nix_config_path() - - def test_it(self): - from google.cloud import _helpers as MUT - from google.cloud._testing import _Monkey - - user_root = 'a' - config_file = 'b' - with _Monkey(MUT, _USER_ROOT=user_root, - _GCLOUD_CONFIG_FILE=config_file): - result = self._call_fut() - - expected = os.path.join(user_root, '.config', config_file) - self.assertEqual(result, expected) - - -class Test__get_windows_config_path(unittest.TestCase): - - def _call_fut(self): - from google.cloud._helpers import _get_windows_config_path - return _get_windows_config_path() - - def test_it(self): - from google.cloud import _helpers as MUT - from google.cloud._testing import _Monkey - - appdata_dir = 'a' - environ = {'APPDATA': appdata_dir} - config_file = 'b' - with _Monkey(os, getenv=environ.get): - with _Monkey(MUT, _GCLOUD_CONFIG_FILE=config_file): - result = self._call_fut() - - expected = os.path.join(appdata_dir, config_file) - self.assertEqual(result, expected) - - -class Test__default_service_project_id(unittest.TestCase): - - CONFIG_TEMPLATE = '[%s]\n%s = %s\n' - - def _call_fut(self): - from google.cloud._helpers import _default_service_project_id - return _default_service_project_id() - - def test_nix(self): - from google.cloud import _helpers as MUT - from google.cloud._testing import _Monkey - from google.cloud._testing import _NamedTemporaryFile - - project_id = 'test-project-id' - with _NamedTemporaryFile() as temp: - config_value = self.CONFIG_TEMPLATE % ( - MUT._GCLOUD_CONFIG_SECTION, - MUT._GCLOUD_CONFIG_KEY, project_id) - with open(temp.name, 'w') as config_file: - config_file.write(config_value) - - def mock_get_path(): - return temp.name - - with _Monkey(os, name='not-nt'): - with _Monkey(MUT, _get_nix_config_path=mock_get_path, - _USER_ROOT='not-None'): - result = self._call_fut() - - self.assertEqual(result, project_id) - - def test_nix_missing_prject_key(self): - from google.cloud import _helpers as MUT - from google.cloud._testing import _Monkey - from google.cloud._testing import _NamedTemporaryFile - - with _NamedTemporaryFile() as temp: - config_value = '[%s]' % (MUT._GCLOUD_CONFIG_SECTION,) - with open(temp.name, 'w') as config_file: - config_file.write(config_value) - - def mock_get_path(): - return temp.name - - with _Monkey(os, name='not-nt'): - with _Monkey(MUT, _get_nix_config_path=mock_get_path, - _USER_ROOT='not-None'): - result = self._call_fut() - - self.assertEqual(result, None) - - def test_windows(self): - from google.cloud import _helpers as MUT - from google.cloud._testing import _Monkey - from google.cloud._testing import _NamedTemporaryFile - - project_id = 'test-project-id' - with _NamedTemporaryFile() as temp: - config_value = self.CONFIG_TEMPLATE % ( - MUT._GCLOUD_CONFIG_SECTION, - MUT._GCLOUD_CONFIG_KEY, project_id) - with open(temp.name, 'w') as config_file: - config_file.write(config_value) - - def mock_get_path(): - return temp.name - - with _Monkey(os, name='nt'): - with _Monkey(MUT, _get_windows_config_path=mock_get_path, - _USER_ROOT=None): - result = self._call_fut() - - self.assertEqual(result, project_id) - - def test_gae(self): - from google.cloud import _helpers as MUT - from google.cloud._testing import _Monkey - - with _Monkey(os, name='not-nt'): - with _Monkey(MUT, _USER_ROOT=None): - result = self._call_fut() - - self.assertIsNone(result) - - -class Test__compute_engine_id(unittest.TestCase): - - def _call_fut(self): - from google.cloud._helpers import _compute_engine_id - return _compute_engine_id() - - def _monkeyConnection(self, connection): - from six.moves import http_client - from google.cloud._testing import _Monkey - - def _connection_factory(host, timeout): - connection.host = host - connection.timeout = timeout - return connection - - return _Monkey(http_client, HTTPConnection=_connection_factory) - - def test_bad_status(self): - connection = _HTTPConnection(404, None) - with self._monkeyConnection(connection): - dataset_id = self._call_fut() - self.assertIsNone(dataset_id) - - def test_success(self): - COMPUTE_ENGINE_ID = object() - connection = _HTTPConnection(200, COMPUTE_ENGINE_ID) - with self._monkeyConnection(connection): - dataset_id = self._call_fut() - self.assertEqual(dataset_id, COMPUTE_ENGINE_ID) - - def test_socket_raises(self): - connection = _TimeoutHTTPConnection() - with self._monkeyConnection(connection): - dataset_id = self._call_fut() - self.assertIsNone(dataset_id) - - -class Test__get_production_project(unittest.TestCase): - - def _call_fut(self): - from google.cloud._helpers import _get_production_project - return _get_production_project() - - def test_no_value(self): - from google.cloud._testing import _Monkey - - environ = {} - with _Monkey(os, getenv=environ.get): - project = self._call_fut() - self.assertIsNone(project) - - def test_value_set(self): - from google.cloud._testing import _Monkey - from google.cloud._helpers import PROJECT - - MOCK_PROJECT = object() - environ = {PROJECT: MOCK_PROJECT} - with _Monkey(os, getenv=environ.get): - project = self._call_fut() - self.assertEqual(project, MOCK_PROJECT) - - class Test__determine_default_project(unittest.TestCase): def _call_fut(self, project=None): from google.cloud._helpers import _determine_default_project return _determine_default_project(project=project) - def _determine_default_helper(self, prod=None, gae=None, gce=None, - file_id=None, srv_id=None, project=None): - from google.cloud._testing import _Monkey - from google.cloud import _helpers - - _callers = [] - - def prod_mock(): - _callers.append('prod_mock') - return prod - - def file_id_mock(): - _callers.append('file_id_mock') - return file_id - - def srv_id_mock(): - _callers.append('srv_id_mock') - return srv_id - - def gae_mock(): - _callers.append('gae_mock') - return gae - - def gce_mock(): - _callers.append('gce_mock') - return gce - - patched_methods = { - '_get_production_project': prod_mock, - '_file_project_id': file_id_mock, - '_default_service_project_id': srv_id_mock, - '_app_engine_id': gae_mock, - '_compute_engine_id': gce_mock, - } - - with _Monkey(_helpers, **patched_methods): - returned_project = self._call_fut(project) - - return returned_project, _callers + def test_it(self): + with mock.patch('google.auth.default', autospec=True) as default: + default.return_value = ( + mock.sentinel.credentials, mock.sentinel.project) + project = self._call_fut() - def test_no_value(self): - project, callers = self._determine_default_helper() - self.assertIsNone(project) - self.assertEqual(callers, ['prod_mock', 'file_id_mock', 'srv_id_mock', - 'gae_mock', 'gce_mock']) + self.assertEqual(project, mock.sentinel.project) + default.assert_called_once_with() def test_explicit(self): - PROJECT = object() - project, callers = self._determine_default_helper(project=PROJECT) - self.assertEqual(project, PROJECT) - self.assertEqual(callers, []) - - def test_prod(self): - PROJECT = object() - project, callers = self._determine_default_helper(prod=PROJECT) - self.assertEqual(project, PROJECT) - self.assertEqual(callers, ['prod_mock']) - - def test_gae(self): - PROJECT = object() - project, callers = self._determine_default_helper(gae=PROJECT) - self.assertEqual(project, PROJECT) - self.assertEqual(callers, ['prod_mock', 'file_id_mock', - 'srv_id_mock', 'gae_mock']) - - def test_gce(self): - PROJECT = object() - project, callers = self._determine_default_helper(gce=PROJECT) - self.assertEqual(project, PROJECT) - self.assertEqual(callers, ['prod_mock', 'file_id_mock', 'srv_id_mock', - 'gae_mock', 'gce_mock']) + with mock.patch('google.auth.default', autospec=True) as default: + project = self._call_fut(mock.sentinel.project) + + self.assertEqual(project, mock.sentinel.project) + self.assertFalse(default.called) class Test__millis(unittest.TestCase): @@ -905,39 +604,6 @@ def test_w_project_passed_as_none(self): self.assertEqual(name, self.THING_NAME) -class TestMetadataPlugin(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud._helpers import MetadataPlugin - return MetadataPlugin - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - def test_constructor(self): - credentials = object() - plugin = self._make_one(credentials) - self.assertIs(plugin._credentials, credentials) - - def test___call__(self): - access_token_expected = 'FOOBARBAZ' - credentials = _Credentials(access_token=access_token_expected) - callback_args = [] - - def callback(*args): - callback_args.append(args) - - transformer = self._make_one(credentials) - result = transformer(None, callback) - cb_headers = [ - ('authorization', 'Bearer ' + access_token_expected), - ] - self.assertIsNone(result) - self.assertEqual(callback_args, [(cb_headers, None)]) - self.assertEqual(len(credentials._tokens), 1) - - class Test_make_secure_channel(unittest.TestCase): def _call_fut(self, *args, **kwargs): @@ -946,7 +612,6 @@ def _call_fut(self, *args, **kwargs): def test_it(self): from six.moves import http_client - from google.cloud._testing import _Monkey from google.cloud import _helpers as MUT SSL_CREDS = object() @@ -979,25 +644,23 @@ def secure_channel(self, *args, **kwargs): return CHANNEL grpc_mod = _GRPCModule() - metadata_plugin = object() - plugin_args = [] - - def mock_plugin(*args): - plugin_args.append(args) - return metadata_plugin host = 'HOST' credentials = object() user_agent = 'USER_AGENT' - with _Monkey(MUT, grpc=grpc_mod, - MetadataPlugin=mock_plugin): + + grpc_patch = mock.patch.object(MUT, 'grpc', new=grpc_mod) + request_patch = mock.patch('google_auth_httplib2.Request') + plugin_patch = mock.patch.object( + MUT, 'AuthMetadataPlugin', create=True) + with grpc_patch, request_patch as request_mock, plugin_patch as plugin: result = self._call_fut(credentials, user_agent, host) self.assertIs(result, CHANNEL) - self.assertEqual(plugin_args, [(credentials,)]) + plugin.assert_called_once_with(credentials, request_mock.return_value) self.assertEqual(grpc_mod.ssl_channel_credentials_args, ()) self.assertEqual(grpc_mod.metadata_call_credentials_args, - ((metadata_plugin,), {'name': 'google_creds'})) + ((plugin.return_value,), {'name': 'google_creds'})) self.assertEqual( grpc_mod.composite_channel_credentials_args, (SSL_CREDS, METADATA_CREDS)) @@ -1088,71 +751,3 @@ def test_with_port_argument(self): def test_without_port_argument(self): host = 'HOST:1114' self._helper(host, host) - - -class _AppIdentity(object): - - def __init__(self, app_id): - self.app_id = app_id - - def get_application_id(self): - return self.app_id - - -class _HTTPResponse(object): - - def __init__(self, status, data): - self.status = status - self.data = data - - def read(self): - return self.data - - -class _BaseHTTPConnection(object): - - host = timeout = None - - def __init__(self): - self._close_count = 0 - self._called_args = [] - self._called_kwargs = [] - - def request(self, method, uri, **kwargs): - self._called_args.append((method, uri)) - self._called_kwargs.append(kwargs) - - def close(self): - self._close_count += 1 - - -class _HTTPConnection(_BaseHTTPConnection): - - def __init__(self, status, project): - super(_HTTPConnection, self).__init__() - self.status = status - self.project = project - - def getresponse(self): - return _HTTPResponse(self.status, self.project) - - -class _TimeoutHTTPConnection(_BaseHTTPConnection): - - def getresponse(self): - import socket - raise socket.timeout('timed out') - - -class _Credentials(object): - - def __init__(self, access_token=None): - self._access_token = access_token - self._tokens = [] - - def get_access_token(self): - from oauth2client.client import AccessTokenInfo - token = AccessTokenInfo(access_token=self._access_token, - expires_in=None) - self._tokens.append(token) - return token diff --git a/packages/google-cloud-core/unit_tests/test__http.py b/packages/google-cloud-core/unit_tests/test__http.py index 72d79a707aac..b27f0240d82a 100644 --- a/packages/google-cloud-core/unit_tests/test__http.py +++ b/packages/google-cloud-core/unit_tests/test__http.py @@ -14,6 +14,8 @@ import unittest +import mock + class TestConnection(unittest.TestCase): @@ -31,11 +33,14 @@ def test_ctor_defaults(self): self.assertIsNone(conn.credentials) def test_ctor_explicit(self): - credentials = _Credentials() - self.assertEqual(credentials._create_scoped_calls, 0) + import google.auth.credentials + + credentials = mock.Mock(spec=google.auth.credentials.Scoped) + conn = self._make_one(credentials) - self.assertEqual(credentials._create_scoped_calls, 1) - self.assertIs(conn.credentials, credentials) + + credentials.with_scopes.assert_called_once_with(conn.SCOPE) + self.assertIs(conn.credentials, credentials.with_scopes.return_value) self.assertIsNone(conn._http) def test_ctor_explicit_http(self): @@ -61,13 +66,15 @@ def test_http_wo_creds(self): self.assertIsInstance(conn.http, httplib2.Http) def test_http_w_creds(self): - import httplib2 + import google.auth.credentials + import google_auth_httplib2 + + credentials = mock.Mock(spec=google.auth.credentials.Credentials) - authorized = object() - credentials = _Credentials(authorized) conn = self._make_one(credentials) - self.assertIs(conn.http, authorized) - self.assertIsInstance(credentials._called_with, httplib2.Http) + + self.assertIsInstance(conn.http, google_auth_httplib2.AuthorizedHttp) + self.assertIs(conn.http.credentials, credentials) def test_user_agent_format(self): from pkg_resources import get_distribution @@ -76,37 +83,6 @@ def test_user_agent_format(self): conn = self._make_one() self.assertEqual(conn.USER_AGENT, expected_ua) - def test__create_scoped_credentials_with_scoped_credentials(self): - klass = self._get_target_class() - scoped_creds = object() - scope = 'google-specific-scope' - credentials = _Credentials(scoped=scoped_creds) - - result = klass._create_scoped_credentials(credentials, scope) - self.assertIs(result, scoped_creds) - self.assertEqual(credentials._create_scoped_calls, 1) - self.assertEqual(credentials._scopes, [scope]) - - def test__create_scoped_credentials_without_scope_required(self): - klass = self._get_target_class() - credentials = _Credentials() - - result = klass._create_scoped_credentials(credentials, None) - self.assertIs(result, credentials) - self.assertEqual(credentials._create_scoped_calls, 1) - self.assertEqual(credentials._scopes, []) - - def test__create_scoped_credentials_non_scoped_credentials(self): - klass = self._get_target_class() - credentials = object() - result = klass._create_scoped_credentials(credentials, None) - self.assertIs(result, credentials) - - def test__create_scoped_credentials_no_credentials(self): - klass = self._get_target_class() - result = klass._create_scoped_credentials(None, None) - self.assertIsNone(result) - class TestJSONConnection(unittest.TestCase): @@ -137,9 +113,8 @@ def test_ctor_defaults(self): self.assertIsNone(conn.credentials) def test_ctor_explicit(self): - credentials = _Credentials() - conn = self._make_one(credentials) - self.assertIs(conn.credentials, credentials) + conn = self._make_one(mock.sentinel.credentials) + self.assertIs(conn.credentials, mock.sentinel.credentials) def test_http_w_existing(self): conn = self._make_one() @@ -152,13 +127,15 @@ def test_http_wo_creds(self): self.assertIsInstance(conn.http, httplib2.Http) def test_http_w_creds(self): - import httplib2 + import google.auth.credentials + import google_auth_httplib2 + + credentials = mock.Mock(spec=google.auth.credentials.Credentials) - authorized = object() - credentials = _Credentials(authorized) conn = self._make_one(credentials) - self.assertIs(conn.http, authorized) - self.assertIsInstance(credentials._called_with, httplib2.Http) + + self.assertIsInstance(conn.http, google_auth_httplib2.AuthorizedHttp) + self.assertIs(conn.http.credentials, credentials) def test_build_api_url_no_extra_query_params(self): conn = self._makeMockOne() @@ -437,25 +414,3 @@ def __init__(self, headers, content): def request(self, **kw): self._called_with = kw return self._response, self._content - - -class _Credentials(object): - - def __init__(self, authorized=None, scoped=None): - self._authorized = authorized - self._scoped = scoped - self._scoped_required = scoped is not None - self._create_scoped_calls = 0 - self._scopes = [] - - def authorize(self, http): - self._called_with = http - return self._authorized - - def create_scoped_required(self): - self._create_scoped_calls += 1 - return self._scoped_required - - def create_scoped(self, scope): - self._scopes.append(scope) - return self._scoped diff --git a/packages/google-cloud-core/unit_tests/test_client.py b/packages/google-cloud-core/unit_tests/test_client.py index e7fe5c03be12..975dcc710b27 100644 --- a/packages/google-cloud-core/unit_tests/test_client.py +++ b/packages/google-cloud-core/unit_tests/test_client.py @@ -14,6 +14,8 @@ import unittest +import mock + class Test_ClientFactoryMixin(unittest.TestCase): @@ -74,45 +76,26 @@ def test_ctor_explicit(self): self.assertIs(client_obj._connection.http, HTTP) def test_from_service_account_json(self): - from google.cloud._testing import _Monkey - from google.cloud import client - KLASS = self._get_target_class() - MOCK_FILENAME = 'foo.path' - mock_creds = _MockServiceAccountCredentials() - with _Monkey(client, ServiceAccountCredentials=mock_creds): - client_obj = KLASS.from_service_account_json(MOCK_FILENAME) - self.assertIs(client_obj._connection.credentials, mock_creds._result) - self.assertEqual(mock_creds.json_called, [MOCK_FILENAME]) + constructor_patch = mock.patch( + 'google.oauth2.service_account.Credentials.' + 'from_service_account_file') - def test_from_service_account_json_fail(self): - KLASS = self._get_target_class() - CREDENTIALS = object() - self.assertRaises(TypeError, KLASS.from_service_account_json, None, - credentials=CREDENTIALS) + with constructor_patch as constructor: + client_obj = KLASS.from_service_account_json( + mock.sentinel.filename) - def test_from_service_account_p12(self): - from google.cloud._testing import _Monkey - from google.cloud import client + self.assertIs( + client_obj._connection.credentials, constructor.return_value) + constructor.assert_called_once_with(mock.sentinel.filename) + def test_from_service_account_json_bad_args(self): KLASS = self._get_target_class() - CLIENT_EMAIL = 'phred@example.com' - MOCK_FILENAME = 'foo.path' - mock_creds = _MockServiceAccountCredentials() - with _Monkey(client, ServiceAccountCredentials=mock_creds): - client_obj = KLASS.from_service_account_p12(CLIENT_EMAIL, - MOCK_FILENAME) - - self.assertIs(client_obj._connection.credentials, mock_creds._result) - self.assertEqual(mock_creds.p12_called, - [(CLIENT_EMAIL, MOCK_FILENAME)]) - - def test_from_service_account_p12_fail(self): - KLASS = self._get_target_class() - CREDENTIALS = object() - self.assertRaises(TypeError, KLASS.from_service_account_p12, None, - None, credentials=CREDENTIALS) + + with self.assertRaises(TypeError): + KLASS.from_service_account_json( + mock.sentinel.filename, credentials=mock.sentinel.credentials) class TestJSONClient(unittest.TestCase): @@ -214,19 +197,3 @@ class _MockConnection(object): def __init__(self, credentials=None, http=None): self.credentials = credentials self.http = http - - -class _MockServiceAccountCredentials(object): - - def __init__(self): - self.p12_called = [] - self.json_called = [] - self._result = object() - - def from_p12_keyfile(self, email, path): - self.p12_called.append((email, path)) - return self._result - - def from_json_keyfile_name(self, path): - self.json_called.append(path) - return self._result diff --git a/packages/google-cloud-core/unit_tests/test_credentials.py b/packages/google-cloud-core/unit_tests/test_credentials.py index ef583b35d98a..6489dd19c4dd 100644 --- a/packages/google-cloud-core/unit_tests/test_credentials.py +++ b/packages/google-cloud-core/unit_tests/test_credentials.py @@ -14,6 +14,8 @@ import unittest +import mock + class Test_get_credentials(unittest.TestCase): @@ -22,15 +24,13 @@ def _call_fut(self): return credentials.get_credentials() def test_it(self): - from google.cloud._testing import _Monkey - from google.cloud import credentials as MUT - - client = _Client() - with _Monkey(MUT, client=client): + with mock.patch('google.auth.default', autospec=True) as default: + default.return_value = ( + mock.sentinel.credentials, mock.sentinel.project) found = self._call_fut() - self.assertIsInstance(found, _Credentials) - self.assertIs(found, client._signed) - self.assertTrue(client._get_app_default_called) + + self.assertIs(found, mock.sentinel.credentials) + default.assert_called_once_with() class Test_generate_signed_url(unittest.TestCase): @@ -44,18 +44,20 @@ def _generate_helper(self, response_type=None, response_disposition=None, import base64 from six.moves.urllib.parse import parse_qs from six.moves.urllib.parse import urlsplit + import google.auth.credentials from google.cloud._testing import _Monkey from google.cloud import credentials as MUT ENDPOINT = 'http://api.example.com' RESOURCE = '/name/path' SIGNED = base64.b64encode(b'DEADBEEF') - CREDENTIALS = _Credentials() + CREDENTIALS = mock.Mock(spec=google.auth.credentials.Signing) + CREDENTIALS.signer_email = 'service@example.com' def _get_signed_query_params(*args): credentials, expiration = args[:2] return { - 'GoogleAccessId': credentials.service_account_email, + 'GoogleAccessId': credentials.signer_email, 'Expires': str(expiration), 'Signature': SIGNED, } @@ -76,7 +78,7 @@ def _get_signed_query_params(*args): self.assertEqual(params.pop('Signature'), [SIGNED.decode('ascii')]) self.assertEqual(params.pop('Expires'), ['1000']) self.assertEqual(params.pop('GoogleAccessId'), - [CREDENTIALS.service_account_email]) + [CREDENTIALS.signer_email]) if response_type is not None: self.assertEqual(params.pop('response-content-type'), [response_type]) @@ -104,10 +106,11 @@ def test_w_custom_fields(self): class Test_generate_signed_url_exception(unittest.TestCase): def test_with_google_credentials(self): import time + import google.auth.credentials from google.cloud.credentials import generate_signed_url RESOURCE = '/name/path' - credentials = _GoogleCredentials() + credentials = mock.Mock(spec=google.auth.credentials.Credentials) expiration = int(time.time() + 5) self.assertRaises(AttributeError, generate_signed_url, credentials, resource=RESOURCE, expiration=expiration) @@ -122,11 +125,13 @@ def _call_fut(self, credentials, expiration, string_to_sign): def test_it(self): import base64 + import google.auth.credentials SIG_BYTES = b'DEADBEEF' - ACCOUNT_NAME = object() - CREDENTIALS = _Credentials(sign_result=SIG_BYTES, - service_account_email=ACCOUNT_NAME) + ACCOUNT_NAME = mock.sentinel.service_account_email + CREDENTIALS = mock.Mock(spec=google.auth.credentials.Signing) + CREDENTIALS.signer_email = ACCOUNT_NAME + CREDENTIALS.sign_bytes.return_value = SIG_BYTES EXPIRATION = 100 STRING_TO_SIGN = 'dummy_signature' result = self._call_fut(CREDENTIALS, EXPIRATION, @@ -137,7 +142,7 @@ def test_it(self): 'Expires': str(EXPIRATION), 'Signature': base64.b64encode(b'DEADBEEF'), }) - self.assertEqual(CREDENTIALS._signed, [STRING_TO_SIGN]) + CREDENTIALS.sign_bytes.assert_called_once_with(STRING_TO_SIGN) class Test__get_expiration_seconds(unittest.TestCase): @@ -221,36 +226,3 @@ def test_w_timedelta_days(self): result = self._call_fut(expiration_as_delta) self.assertEqual(result, utc_seconds + 86400) - - -class _Credentials(object): - - def __init__(self, service_account_email='testing@example.com', - sign_result=''): - self.service_account_email = service_account_email - self._sign_result = sign_result - self._signed = [] - - def sign_blob(self, bytes_to_sign): - self._signed.append(bytes_to_sign) - return None, self._sign_result - - -class _GoogleCredentials(object): - - def __init__(self, service_account_email='testing@example.com'): - self.service_account_email = service_account_email - - -class _Client(object): - - def __init__(self): - self._signed = _Credentials() - - class GoogleCredentials(object): - @staticmethod - def get_application_default(): - self._get_app_default_called = True - return self._signed - - self.GoogleCredentials = GoogleCredentials From fdb020ae52cd159b75a43d6b1d4b70fcf5dcdc44 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 5 Dec 2016 12:11:21 -0500 Subject: [PATCH 096/468] Add helper for parsing zoneless time strings. --- packages/google-cloud-core/google/cloud/_helpers.py | 13 +++++++++++++ .../google-cloud-core/unit_tests/test__helpers.py | 12 ++++++++++++ 2 files changed, 25 insertions(+) diff --git a/packages/google-cloud-core/google/cloud/_helpers.py b/packages/google-cloud-core/google/cloud/_helpers.py index 9b4ec5736cb0..50936915fa33 100644 --- a/packages/google-cloud-core/google/cloud/_helpers.py +++ b/packages/google-cloud-core/google/cloud/_helpers.py @@ -251,6 +251,19 @@ def _date_from_iso8601_date(value): return datetime.datetime.strptime(value, '%Y-%m-%d').date() +def _time_from_iso8601_time_naive(value): + """Convert a zoneless ISO8601 time string to naive datetime time + + :type value: str + :param value: The time string to convert + + :rtype: :class:`datetime.time` + :returns: A datetime time object created from the string + + """ + return datetime.datetime.strptime(value, '%H:%M:%S').time() + + def _rfc3339_to_datetime(dt_str): """Convert a microsecond-precision timetamp to a native datetime. diff --git a/packages/google-cloud-core/unit_tests/test__helpers.py b/packages/google-cloud-core/unit_tests/test__helpers.py index 78391e56ef42..08c27ae556e2 100644 --- a/packages/google-cloud-core/unit_tests/test__helpers.py +++ b/packages/google-cloud-core/unit_tests/test__helpers.py @@ -265,6 +265,18 @@ def test_todays_date(self): self.assertEqual(self._call_fut(TODAY.strftime("%Y-%m-%d")), TODAY) +class Test___time_from_iso8601_time_naive(unittest.TestCase): + + def _call_fut(self, value): + from google.cloud._helpers import _time_from_iso8601_time_naive + return _time_from_iso8601_time_naive(value) + + def test_todays_date(self): + import datetime + WHEN = datetime.time(12, 9, 42) + self.assertEqual(self._call_fut(("12:09:42")), WHEN) + + class Test__rfc3339_to_datetime(unittest.TestCase): def _call_fut(self, dt_str): From cf495c5f4d9d46142c85468d4caae28708ecaeee Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Mon, 5 Dec 2016 12:56:51 -0800 Subject: [PATCH 097/468] Make make_secure_channel use google-auth (#2808) --- .../google/cloud/_helpers.py | 26 +++----- packages/google-cloud-core/tox.ini | 1 + .../unit_tests/test__helpers.py | 66 ++++--------------- 3 files changed, 23 insertions(+), 70 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/_helpers.py b/packages/google-cloud-core/google/cloud/_helpers.py index 50936915fa33..f8d535b47099 100644 --- a/packages/google-cloud-core/google/cloud/_helpers.py +++ b/packages/google-cloud-core/google/cloud/_helpers.py @@ -32,11 +32,9 @@ try: import grpc - from google.auth.transport.grpc import ( - AuthMetadataPlugin) # pragma: NO COVER -except ImportError: + import google.auth.transport.grpc +except ImportError: # pragma: NO COVER grpc = None - AuthMetadataPlugin = None import httplib2 import six @@ -485,22 +483,16 @@ def make_secure_channel(credentials, user_agent, host): :rtype: :class:`grpc._channel.Channel` :returns: gRPC secure channel with credentials attached. """ - # ssl_channel_credentials() loads root certificates from - # `grpc/_adapter/credentials/roots.pem`. - transport_creds = grpc.ssl_channel_credentials() - http = httplib2.Http() - custom_metadata_plugin = AuthMetadataPlugin( - credentials, google_auth_httplib2.Request(http=http)) - auth_creds = grpc.metadata_call_credentials( - custom_metadata_plugin, name='google_creds') - channel_creds = grpc.composite_channel_credentials( - transport_creds, auth_creds) target = '%s:%d' % (host, http_client.HTTPS_PORT) - channel_args = ( + http_request = google_auth_httplib2.Request(http=httplib2.Http()) + options = ( ('grpc.primary_user_agent', user_agent), ) - return grpc.secure_channel(target, channel_creds, - options=channel_args) + return google.auth.transport.grpc.secure_authorized_channel( + credentials, + http_request, + target, + options=options) def make_secure_stub(credentials, user_agent, stub_class, host): diff --git a/packages/google-cloud-core/tox.ini b/packages/google-cloud-core/tox.ini index 7c5ef9d29dde..3f9dc5ae4218 100644 --- a/packages/google-cloud-core/tox.ini +++ b/packages/google-cloud-core/tox.ini @@ -4,6 +4,7 @@ envlist = [testing] deps = + grpcio mock pytest covercmd = diff --git a/packages/google-cloud-core/unit_tests/test__helpers.py b/packages/google-cloud-core/unit_tests/test__helpers.py index 08c27ae556e2..59752a6e594f 100644 --- a/packages/google-cloud-core/unit_tests/test__helpers.py +++ b/packages/google-cloud-core/unit_tests/test__helpers.py @@ -624,65 +624,25 @@ def _call_fut(self, *args, **kwargs): def test_it(self): from six.moves import http_client - from google.cloud import _helpers as MUT - - SSL_CREDS = object() - METADATA_CREDS = object() - COMPOSITE_CREDS = object() - CHANNEL = object() - - class _GRPCModule(object): - - def __init__(self): - self.ssl_channel_credentials_args = None - self.metadata_call_credentials_args = None - self.composite_channel_credentials_args = None - self.secure_channel_args = None - - def ssl_channel_credentials(self, *args): - self.ssl_channel_credentials_args = args - return SSL_CREDS - - def metadata_call_credentials(self, *args, **kwargs): - self.metadata_call_credentials_args = (args, kwargs) - return METADATA_CREDS - - def composite_channel_credentials(self, *args): - self.composite_channel_credentials_args = args - return COMPOSITE_CREDS - def secure_channel(self, *args, **kwargs): - self.secure_channel_args = (args, kwargs) - return CHANNEL - - grpc_mod = _GRPCModule() - - host = 'HOST' credentials = object() + host = 'HOST' user_agent = 'USER_AGENT' - grpc_patch = mock.patch.object(MUT, 'grpc', new=grpc_mod) - request_patch = mock.patch('google_auth_httplib2.Request') - plugin_patch = mock.patch.object( - MUT, 'AuthMetadataPlugin', create=True) - with grpc_patch, request_patch as request_mock, plugin_patch as plugin: + secure_authorized_channel_patch = mock.patch( + 'google.auth.transport.grpc.secure_authorized_channel', + autospec=True) + + with secure_authorized_channel_patch as secure_authorized_channel: result = self._call_fut(credentials, user_agent, host) - self.assertIs(result, CHANNEL) - plugin.assert_called_once_with(credentials, request_mock.return_value) - self.assertEqual(grpc_mod.ssl_channel_credentials_args, ()) - self.assertEqual(grpc_mod.metadata_call_credentials_args, - ((plugin.return_value,), {'name': 'google_creds'})) - self.assertEqual( - grpc_mod.composite_channel_credentials_args, - (SSL_CREDS, METADATA_CREDS)) - target = '%s:%d' % (host, http_client.HTTPS_PORT) - secure_args = (target, COMPOSITE_CREDS) - secure_kwargs = { - 'options': (('grpc.primary_user_agent', user_agent),) - } - self.assertEqual(grpc_mod.secure_channel_args, - (secure_args, secure_kwargs)) + self.assertIs(result, secure_authorized_channel.return_value) + + expected_target = '%s:%d' % (host, http_client.HTTPS_PORT) + expected_options = (('grpc.primary_user_agent', user_agent),) + + secure_authorized_channel.assert_called_once_with( + credentials, mock.ANY, expected_target, options=expected_options) class Test_make_secure_stub(unittest.TestCase): From 9f2a8abd82c823d342a922061181089f01e89ff1 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 5 Dec 2016 13:50:25 -0800 Subject: [PATCH 098/468] Cutting new releases for BigQuery, Language and Core. --- packages/google-cloud-core/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-core/setup.py b/packages/google-cloud-core/setup.py index 8e7085d27904..550e2069f305 100644 --- a/packages/google-cloud-core/setup.py +++ b/packages/google-cloud-core/setup.py @@ -60,7 +60,7 @@ setup( name='google-cloud-core', - version='0.21.0', + version='0.22.0', description='API Client library for Google Cloud: Core Helpers', long_description=README, namespace_packages=[ From 5f343c8da3741bf06b7ac114f50e534dd6a35cb8 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 7 Dec 2016 16:00:24 -0800 Subject: [PATCH 099/468] Raise ValueError if credentials are not from google-auth (#2828) --- .../google-cloud-core/google/cloud/client.py | 12 ++++++++++ .../unit_tests/test_client.py | 24 ++++++++++++++----- 2 files changed, 30 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/client.py b/packages/google-cloud-core/google/cloud/client.py index a63c614888ef..ab413f91a652 100644 --- a/packages/google-cloud-core/google/cloud/client.py +++ b/packages/google-cloud-core/google/cloud/client.py @@ -14,6 +14,7 @@ """Base classes for client used to interact with Google Cloud APIs.""" +import google.auth.credentials from google.oauth2 import service_account import six @@ -22,6 +23,13 @@ from google.cloud.credentials import get_credentials +_GOOGLE_AUTH_CREDENTIALS_HELP = ( + 'This library only supports credentials from google-auth-library-python. ' + 'See https://google-cloud-python.readthedocs.io/en/latest/' + 'google-cloud-auth.html for help on authentication with this library.' +) + + class _ClientFactoryMixin(object): """Mixin to allow factories that create credentials. @@ -83,6 +91,10 @@ class Client(_ClientFactoryMixin): _connection_class = Connection def __init__(self, credentials=None, http=None): + if (credentials is not None and + not isinstance( + credentials, google.auth.credentials.Credentials)): + raise ValueError(_GOOGLE_AUTH_CREDENTIALS_HELP) if credentials is None and http is None: credentials = get_credentials() self._connection = self._connection_class( diff --git a/packages/google-cloud-core/unit_tests/test_client.py b/packages/google-cloud-core/unit_tests/test_client.py index 975dcc710b27..21d036c06ad0 100644 --- a/packages/google-cloud-core/unit_tests/test_client.py +++ b/packages/google-cloud-core/unit_tests/test_client.py @@ -17,6 +17,11 @@ import mock +def _make_credentials(): + import google.auth.credentials + return mock.Mock(spec=google.auth.credentials.Credentials) + + class Test_ClientFactoryMixin(unittest.TestCase): @staticmethod @@ -52,7 +57,7 @@ def test_ctor_defaults(self): from google.cloud._testing import _Monkey from google.cloud import client - CREDENTIALS = object() + CREDENTIALS = _make_credentials() FUNC_CALLS = [] def mock_get_credentials(): @@ -67,7 +72,7 @@ def mock_get_credentials(): self.assertEqual(FUNC_CALLS, ['get_credentials']) def test_ctor_explicit(self): - CREDENTIALS = object() + CREDENTIALS = _make_credentials() HTTP = object() client_obj = self._make_one(credentials=CREDENTIALS, http=HTTP) @@ -75,12 +80,19 @@ def test_ctor_explicit(self): self.assertIs(client_obj._connection.credentials, CREDENTIALS) self.assertIs(client_obj._connection.http, HTTP) + def test_ctor_bad_credentials(self): + CREDENTIALS = object() + + with self.assertRaises(ValueError): + self._make_one(credentials=CREDENTIALS) + def test_from_service_account_json(self): KLASS = self._get_target_class() constructor_patch = mock.patch( 'google.oauth2.service_account.Credentials.' - 'from_service_account_file') + 'from_service_account_file', + return_value=_make_credentials()) with constructor_patch as constructor: client_obj = KLASS.from_service_account_json( @@ -122,7 +134,7 @@ def test_ctor_defaults(self): from google.cloud import client PROJECT = 'PROJECT' - CREDENTIALS = object() + CREDENTIALS = _make_credentials() FUNC_CALLS = [] def mock_determine_proj(project): @@ -160,7 +172,7 @@ def mock_determine_proj(project): self.assertEqual(FUNC_CALLS, [(None, '_determine_default_project')]) def test_ctor_w_invalid_project(self): - CREDENTIALS = object() + CREDENTIALS = _make_credentials() HTTP = object() with self.assertRaises(ValueError): self._make_one(project=object(), credentials=CREDENTIALS, @@ -169,7 +181,7 @@ def test_ctor_w_invalid_project(self): def _explicit_ctor_helper(self, project): import six - CREDENTIALS = object() + CREDENTIALS = _make_credentials() HTTP = object() client_obj = self._make_one(project=project, credentials=CREDENTIALS, From e9296841e445566d69580ebc3a0676b17d9c67cf Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 9 Dec 2016 08:38:08 -0500 Subject: [PATCH 100/468] Promote local tempdir context manager to 'g.c._testing'. --- .../google/cloud/_testing.py | 18 +++++++++++++++ .../unit_tests/streaming/test_transfer.py | 23 ++++--------------- 2 files changed, 23 insertions(+), 18 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/_testing.py b/packages/google-cloud-core/google/cloud/_testing.py index 880536f8aa45..e862ce00dfc4 100644 --- a/packages/google-cloud-core/google/cloud/_testing.py +++ b/packages/google-cloud-core/google/cloud/_testing.py @@ -54,6 +54,24 @@ def __exit__(self, exc_type, exc_val, exc_tb): os.remove(self.name) +def _tempdir_maker(): + import contextlib + import shutil + import tempfile + + @contextlib.contextmanager + def _tempdir_mgr(): + temp_dir = tempfile.mkdtemp() + yield temp_dir + shutil.rmtree(temp_dir) + + return _tempdir_mgr + + +_tempdir = _tempdir_maker() +del _tempdir_maker + + class _GAXBaseAPI(object): _random_gax_error = False diff --git a/packages/google-cloud-core/unit_tests/streaming/test_transfer.py b/packages/google-cloud-core/unit_tests/streaming/test_transfer.py index d3074f728e24..aa4fd2e546ad 100644 --- a/packages/google-cloud-core/unit_tests/streaming/test_transfer.py +++ b/packages/google-cloud-core/unit_tests/streaming/test_transfer.py @@ -196,6 +196,7 @@ def test_ctor_w_total_size(self): def test_from_file_w_existing_file_no_override(self): import os + from google.cloud._testing import _tempdir klass = self._get_target_class() with _tempdir() as tempdir: filename = os.path.join(tempdir, 'file.out') @@ -206,6 +207,7 @@ def test_from_file_w_existing_file_no_override(self): def test_from_file_w_existing_file_w_override_wo_auto_transfer(self): import os + from google.cloud._testing import _tempdir klass = self._get_target_class() with _tempdir() as tempdir: filename = os.path.join(tempdir, 'file.out') @@ -837,6 +839,7 @@ def test_from_file_w_nonesuch_file(self): def test_from_file_wo_mimetype_w_unguessable_filename(self): import os + from google.cloud._testing import _tempdir klass = self._get_target_class() CONTENT = b'EXISTING FILE W/ UNGUESSABLE MIMETYPE' with _tempdir() as tempdir: @@ -848,6 +851,7 @@ def test_from_file_wo_mimetype_w_unguessable_filename(self): def test_from_file_wo_mimetype_w_guessable_filename(self): import os + from google.cloud._testing import _tempdir klass = self._get_target_class() CONTENT = b'EXISTING FILE W/ GUESSABLE MIMETYPE' with _tempdir() as tempdir: @@ -862,6 +866,7 @@ def test_from_file_wo_mimetype_w_guessable_filename(self): def test_from_file_w_mimetype_w_auto_transfer_w_kwds(self): import os + from google.cloud._testing import _tempdir klass = self._get_target_class() CONTENT = b'EXISTING FILE W/ GUESSABLE MIMETYPE' CHUNK_SIZE = 3 @@ -1940,21 +1945,3 @@ def __call__(self, request, end): assert self._called_with is None self._called_with = (request, end) return self._response - - -def _tempdir_maker(): - import contextlib - import shutil - import tempfile - - @contextlib.contextmanager - def _tempdir_mgr(): - temp_dir = tempfile.mkdtemp() - yield temp_dir - shutil.rmtree(temp_dir) - - return _tempdir_mgr - - -_tempdir = _tempdir_maker() -del _tempdir_maker From f9ee419a8e84c7059ded959cfa0af67270c8992a Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 8 Dec 2016 15:17:03 -0800 Subject: [PATCH 101/468] Update versions for mega-release. We want to update - `google-cloud-bigquery` - `google-cloud-datastore` - `google-cloud-logging` - `google-cloud-storage` - `google-cloud-core` And then update `google-cloud` to re-wrap the latest versions of each. However, to avoid having packages in `google-cloud` with conflicting versions of `google-cloud-core`, we must release all packages. --- packages/google-cloud-core/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-core/setup.py b/packages/google-cloud-core/setup.py index 550e2069f305..4e19fcd13678 100644 --- a/packages/google-cloud-core/setup.py +++ b/packages/google-cloud-core/setup.py @@ -60,7 +60,7 @@ setup( name='google-cloud-core', - version='0.22.0', + version='0.22.1', description='API Client library for Google Cloud: Core Helpers', long_description=README, namespace_packages=[ From cdd1ef739b81351e3834f1dfef0c23b26e2ff53a Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 9 Dec 2016 16:57:17 -0800 Subject: [PATCH 102/468] Explicitly putting 1.0.2 lower bound on grpcio. Also upgrading logging from 0.14.x to 0.90.x --- packages/google-cloud-core/tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-core/tox.ini b/packages/google-cloud-core/tox.ini index 3f9dc5ae4218..48e8a517f057 100644 --- a/packages/google-cloud-core/tox.ini +++ b/packages/google-cloud-core/tox.ini @@ -4,7 +4,7 @@ envlist = [testing] deps = - grpcio + grpcio >= 1.0.2rc0 mock pytest covercmd = From c9e198f2e270855fa3029de55a3c6d4ff9854a0a Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 14 Dec 2016 23:05:25 -0800 Subject: [PATCH 103/468] Removing _connection_class from base client. --- .../google-cloud-core/google/cloud/client.py | 45 +++++++++--------- packages/google-cloud-core/tox.ini | 2 +- .../unit_tests/test_client.py | 46 ++++--------------- 3 files changed, 33 insertions(+), 60 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/client.py b/packages/google-cloud-core/google/cloud/client.py index ab413f91a652..338642b263dc 100644 --- a/packages/google-cloud-core/google/cloud/client.py +++ b/packages/google-cloud-core/google/cloud/client.py @@ -19,7 +19,6 @@ import six from google.cloud._helpers import _determine_default_project -from google.cloud._http import Connection from google.cloud.credentials import get_credentials @@ -72,24 +71,23 @@ def from_service_account_json(cls, json_credentials_path, *args, **kwargs): class Client(_ClientFactoryMixin): """Client to bundle configuration needed for API requests. - Assumes that the associated ``_connection_class`` only accepts - ``http`` and ``credentials`` in its constructor. + Stores ``credentials`` and ``http`` object so that subclasses + can pass them along to a connection class. - :type credentials: :class:`google.auth.credentials.Credentials` or - :class:`NoneType` - :param credentials: The OAuth2 Credentials to use for the connection - owned by this client. If not passed (and if no ``http`` - object is passed), falls back to the default inferred - from the environment. + :type credentials: :class:`~google.auth.credentials.Credentials` + :param credentials: (Optional) The OAuth2 Credentials to use for this + client. If not passed (and if no ``http`` object is + passed), falls back to the default inferred from the + environment. - :type http: :class:`httplib2.Http` or class that defines ``request()``. - :param http: An optional HTTP object to make requests. If not passed, an + :type http: :class:`~httplib2.Http` + :param http: (Optional) HTTP object to make requests. Can be any object + that defines ``request()`` with the same interface as + :meth:`~httplib2.Http.request`. If not passed, an ``http`` object is created that is bound to the ``credentials`` for the current object. """ - _connection_class = Connection - def __init__(self, credentials=None, http=None): if (credentials is not None and not isinstance( @@ -97,8 +95,8 @@ def __init__(self, credentials=None, http=None): raise ValueError(_GOOGLE_AUTH_CREDENTIALS_HELP) if credentials is None and http is None: credentials = get_credentials() - self._connection = self._connection_class( - credentials=credentials, http=http) + self._credentials = credentials + self._http = http class _ClientProjectMixin(object): @@ -142,15 +140,16 @@ class JSONClient(Client, _ClientProjectMixin): passed falls back to the default inferred from the environment. - :type credentials: :class:`google.auth.credentials.Credentials` or - :class:`NoneType` - :param credentials: The OAuth2 Credentials to use for the connection - owned by this client. If not passed (and if no ``http`` - object is passed), falls back to the default inferred - from the environment. + :type credentials: :class:`~google.auth.credentials.Credentials` + :param credentials: (Optional) The OAuth2 Credentials to use for this + client. If not passed (and if no ``http`` object is + passed), falls back to the default inferred from the + environment. - :type http: :class:`httplib2.Http` or class that defines ``request()``. - :param http: An optional HTTP object to make requests. If not passed, an + :type http: :class:`~httplib2.Http` + :param http: (Optional) HTTP object to make requests. Can be any object + that defines ``request()`` with the same interface as + :meth:`~httplib2.Http.request`. If not passed, an ``http`` object is created that is bound to the ``credentials`` for the current object. diff --git a/packages/google-cloud-core/tox.ini b/packages/google-cloud-core/tox.ini index 48e8a517f057..156ffc07e00e 100644 --- a/packages/google-cloud-core/tox.ini +++ b/packages/google-cloud-core/tox.ini @@ -4,7 +4,7 @@ envlist = [testing] deps = - grpcio >= 1.0.2rc0 + grpcio >= 1.0.2 mock pytest covercmd = diff --git a/packages/google-cloud-core/unit_tests/test_client.py b/packages/google-cloud-core/unit_tests/test_client.py index 21d036c06ad0..dd1075aae5f8 100644 --- a/packages/google-cloud-core/unit_tests/test_client.py +++ b/packages/google-cloud-core/unit_tests/test_client.py @@ -36,15 +36,6 @@ def test_virtual(self): class TestClient(unittest.TestCase): - def setUp(self): - KLASS = self._get_target_class() - self.original_cnxn_class = KLASS._connection_class - KLASS._connection_class = _MockConnection - - def tearDown(self): - KLASS = self._get_target_class() - KLASS._connection_class = self.original_cnxn_class - @staticmethod def _get_target_class(): from google.cloud.client import Client @@ -67,8 +58,8 @@ def mock_get_credentials(): with _Monkey(client, get_credentials=mock_get_credentials): client_obj = self._make_one() - self.assertIsInstance(client_obj._connection, _MockConnection) - self.assertIs(client_obj._connection.credentials, CREDENTIALS) + self.assertIs(client_obj._credentials, CREDENTIALS) + self.assertIsNone(client_obj._http) self.assertEqual(FUNC_CALLS, ['get_credentials']) def test_ctor_explicit(self): @@ -76,9 +67,8 @@ def test_ctor_explicit(self): HTTP = object() client_obj = self._make_one(credentials=CREDENTIALS, http=HTTP) - self.assertIsInstance(client_obj._connection, _MockConnection) - self.assertIs(client_obj._connection.credentials, CREDENTIALS) - self.assertIs(client_obj._connection.http, HTTP) + self.assertIs(client_obj._credentials, CREDENTIALS) + self.assertIs(client_obj._http, HTTP) def test_ctor_bad_credentials(self): CREDENTIALS = object() @@ -99,7 +89,8 @@ def test_from_service_account_json(self): mock.sentinel.filename) self.assertIs( - client_obj._connection.credentials, constructor.return_value) + client_obj._credentials, constructor.return_value) + self.assertIsNone(client_obj._http) constructor.assert_called_once_with(mock.sentinel.filename) def test_from_service_account_json_bad_args(self): @@ -112,15 +103,6 @@ def test_from_service_account_json_bad_args(self): class TestJSONClient(unittest.TestCase): - def setUp(self): - KLASS = self._get_target_class() - self.original_cnxn_class = KLASS._connection_class - KLASS._connection_class = _MockConnection - - def tearDown(self): - KLASS = self._get_target_class() - KLASS._connection_class = self.original_cnxn_class - @staticmethod def _get_target_class(): from google.cloud.client import JSONClient @@ -150,8 +132,8 @@ def mock_get_credentials(): client_obj = self._make_one() self.assertEqual(client_obj.project, PROJECT) - self.assertIsInstance(client_obj._connection, _MockConnection) - self.assertIs(client_obj._connection.credentials, CREDENTIALS) + self.assertIs(client_obj._credentials, CREDENTIALS) + self.assertIsNone(client_obj._http) self.assertEqual( FUNC_CALLS, [(None, '_determine_default_project'), 'get_credentials']) @@ -191,9 +173,8 @@ def _explicit_ctor_helper(self, project): self.assertEqual(client_obj.project, project.decode('utf-8')) else: self.assertEqual(client_obj.project, project) - self.assertIsInstance(client_obj._connection, _MockConnection) - self.assertIs(client_obj._connection.credentials, CREDENTIALS) - self.assertIs(client_obj._connection.http, HTTP) + self.assertIs(client_obj._credentials, CREDENTIALS) + self.assertIs(client_obj._http, HTTP) def test_ctor_explicit_bytes(self): PROJECT = b'PROJECT' @@ -202,10 +183,3 @@ def test_ctor_explicit_bytes(self): def test_ctor_explicit_unicode(self): PROJECT = u'PROJECT' self._explicit_ctor_helper(PROJECT) - - -class _MockConnection(object): - - def __init__(self, credentials=None, http=None): - self.credentials = credentials - self.http = http From 6122d00e42632ec9231ff660bc4db1358c1a762e Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 14 Dec 2016 23:43:05 -0800 Subject: [PATCH 104/468] Manually creating Client._connection in subclasses. --- packages/google-cloud-core/google/cloud/client.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/client.py b/packages/google-cloud-core/google/cloud/client.py index 338642b263dc..7a14e03f763a 100644 --- a/packages/google-cloud-core/google/cloud/client.py +++ b/packages/google-cloud-core/google/cloud/client.py @@ -82,8 +82,8 @@ class Client(_ClientFactoryMixin): :type http: :class:`~httplib2.Http` :param http: (Optional) HTTP object to make requests. Can be any object - that defines ``request()`` with the same interface as - :meth:`~httplib2.Http.request`. If not passed, an + that defines ``request()`` with the same interface as + :meth:`~httplib2.Http.request`. If not passed, an ``http`` object is created that is bound to the ``credentials`` for the current object. """ @@ -148,8 +148,8 @@ class JSONClient(Client, _ClientProjectMixin): :type http: :class:`~httplib2.Http` :param http: (Optional) HTTP object to make requests. Can be any object - that defines ``request()`` with the same interface as - :meth:`~httplib2.Http.request`. If not passed, an + that defines ``request()`` with the same interface as + :meth:`~httplib2.Http.request`. If not passed, an ``http`` object is created that is bound to the ``credentials`` for the current object. From a43d8e6be996c378bd8af0c1845267f76e29238d Mon Sep 17 00:00:00 2001 From: Thomas Schultz Date: Fri, 23 Dec 2016 07:58:49 -0500 Subject: [PATCH 105/468] Add max_receive_message_length for larger rows. --- .../google/cloud/_helpers.py | 25 ++++++++++---- .../unit_tests/test__helpers.py | 33 +++++++++++++++++-- 2 files changed, 50 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/_helpers.py b/packages/google-cloud-core/google/cloud/_helpers.py index f8d535b47099..03f6d2fdb60c 100644 --- a/packages/google-cloud-core/google/cloud/_helpers.py +++ b/packages/google-cloud-core/google/cloud/_helpers.py @@ -465,7 +465,7 @@ def _name_from_project_path(path, project, template): return match.group('name') -def make_secure_channel(credentials, user_agent, host): +def make_secure_channel(credentials, user_agent, host, extra_options=None): """Makes a secure channel for an RPC service. Uses / depends on gRPC. @@ -480,14 +480,21 @@ def make_secure_channel(credentials, user_agent, host): :type host: str :param host: The host for the service. + :type extra_options: tuple + :param extra_options: (Optional) Extra gRPC options used when creating the + channel. + :rtype: :class:`grpc._channel.Channel` :returns: gRPC secure channel with credentials attached. """ target = '%s:%d' % (host, http_client.HTTPS_PORT) http_request = google_auth_httplib2.Request(http=httplib2.Http()) - options = ( - ('grpc.primary_user_agent', user_agent), - ) + + user_agent_option = ('grpc.primary_user_agent', user_agent) + if extra_options is not None: + options = (user_agent_option,) + extra_options + else: + options = (user_agent_option,) return google.auth.transport.grpc.secure_authorized_channel( credentials, http_request, @@ -495,7 +502,8 @@ def make_secure_channel(credentials, user_agent, host): options=options) -def make_secure_stub(credentials, user_agent, stub_class, host): +def make_secure_stub(credentials, user_agent, stub_class, host, + extra_options=None): """Makes a secure stub for an RPC service. Uses / depends on gRPC. @@ -513,10 +521,15 @@ def make_secure_stub(credentials, user_agent, stub_class, host): :type host: str :param host: The host for the service. + :type extra_options: tuple + :param extra_options: (Optional) Extra gRPC options passed when creating + the channel. + :rtype: object, instance of ``stub_class`` :returns: The stub object used to make gRPC requests to a given API. """ - channel = make_secure_channel(credentials, user_agent, host) + channel = make_secure_channel(credentials, user_agent, host, + extra_options=extra_options) return stub_class(channel) diff --git a/packages/google-cloud-core/unit_tests/test__helpers.py b/packages/google-cloud-core/unit_tests/test__helpers.py index 59752a6e594f..bc1a4dafb6b7 100644 --- a/packages/google-cloud-core/unit_tests/test__helpers.py +++ b/packages/google-cloud-core/unit_tests/test__helpers.py @@ -644,6 +644,33 @@ def test_it(self): secure_authorized_channel.assert_called_once_with( credentials, mock.ANY, expected_target, options=expected_options) + def test_extra_options(self): + from six.moves import http_client + + credentials = object() + host = 'HOST' + user_agent = 'USER_AGENT' + extra_options = (('some', 'option'),) + + secure_authorized_channel_patch = mock.patch( + 'google.auth.transport.grpc.secure_authorized_channel', + autospec=True) + + with secure_authorized_channel_patch as secure_authorized_channel: + result = self._call_fut(credentials, user_agent, host, + extra_options) + + self.assertIs(result, secure_authorized_channel.return_value) + + expected_target = '%s:%d' % (host, http_client.HTTPS_PORT) + expected_options = ( + ('grpc.primary_user_agent', user_agent), + extra_options[0], + ) + + secure_authorized_channel.assert_called_once_with( + credentials, mock.ANY, expected_target, options=expected_options) + class Test_make_secure_stub(unittest.TestCase): @@ -664,13 +691,15 @@ def stub_class(channel): channels.append(channel) return result - def mock_channel(*args): + def mock_channel(*args, **kwargs): channel_args.append(args) + channel_args.append(kwargs) return channel_obj credentials = object() user_agent = 'you-sir-age-int' host = 'localhost' + extra_options = {'extra_options': None} with _Monkey(MUT, make_secure_channel=mock_channel): stub = self._call_fut(credentials, user_agent, stub_class, host) @@ -678,7 +707,7 @@ def mock_channel(*args): self.assertIs(stub, result) self.assertEqual(channels, [channel_obj]) self.assertEqual(channel_args, - [(credentials, user_agent, host)]) + [(credentials, user_agent, host), extra_options]) class Test_make_insecure_stub(unittest.TestCase): From 19ca1ceab113142f4d35489dd088eff70252ba3c Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 17 Jan 2017 16:12:47 -0500 Subject: [PATCH 106/468] Testing support for creating gRPC errors w/ tralling metadata. --- packages/google-cloud-core/google/cloud/_testing.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/_testing.py b/packages/google-cloud-core/google/cloud/_testing.py index e862ce00dfc4..f9d2b57fda52 100644 --- a/packages/google-cloud-core/google/cloud/_testing.py +++ b/packages/google-cloud-core/google/cloud/_testing.py @@ -79,12 +79,12 @@ class _GAXBaseAPI(object): def __init__(self, **kw): self.__dict__.update(kw) - def _make_grpc_error(self, status_code): + def _make_grpc_error(self, status_code, trailing=None): from grpc._channel import _RPCState from google.cloud.exceptions import GrpcRendezvous details = 'Some error details.' - exc_state = _RPCState((), None, None, status_code, details) + exc_state = _RPCState((), None, trailing, status_code, details) return GrpcRendezvous(exc_state, None, None, None) def _make_grpc_not_found(self): From 47ec6c8ba9b624d02dfcd16fdc3a98360035c93b Mon Sep 17 00:00:00 2001 From: Ben Demaree Date: Wed, 18 Jan 2017 09:42:44 -0600 Subject: [PATCH 107/468] Make note of num_results property on iterators --- packages/google-cloud-core/google/cloud/iterator.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index 2d0a93689d32..07acc1048391 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -42,6 +42,14 @@ ... if not my_item.is_valid: ... break +At any point, you may check the number of items consumed by referencing the +``num_results`` property of the iterator:: + + >>> my_iterator = Iterator(...) + ... for my_item in my_iterator: + ... if my_iterator.num_results >= 10: + ... break + When iterating, not every new item will send a request to the server. To iterate based on each page of items (where a page corresponds to a request):: From e094c4bbfbdb69d36c128ba5e8db4a96f079dcca Mon Sep 17 00:00:00 2001 From: Ben Demaree Date: Wed, 18 Jan 2017 10:04:29 -0600 Subject: [PATCH 108/468] Proper REPL prompt --- packages/google-cloud-core/google/cloud/iterator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index 07acc1048391..7bb708e90f09 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -46,7 +46,7 @@ ``num_results`` property of the iterator:: >>> my_iterator = Iterator(...) - ... for my_item in my_iterator: + >>> for my_item in my_iterator: ... if my_iterator.num_results >= 10: ... break From b434e42a4252c777a34a5026c58b630c28ceef2f Mon Sep 17 00:00:00 2001 From: Thomas Schultz Date: Wed, 18 Jan 2017 13:14:03 -0500 Subject: [PATCH 109/468] Update import spacing part 2. --- .../streaming/test_buffered_stream.py | 9 ++ .../unit_tests/streaming/test_exceptions.py | 2 + .../unit_tests/streaming/test_http_wrapper.py | 13 +++ .../unit_tests/streaming/test_stream_slice.py | 7 ++ .../unit_tests/streaming/test_transfer.py | 88 +++++++++++++++++++ .../unit_tests/streaming/test_util.py | 4 + .../unit_tests/test__helpers.py | 24 +++++ .../unit_tests/test__http.py | 10 +++ .../unit_tests/test_client.py | 4 + .../unit_tests/test_credentials.py | 6 ++ .../unit_tests/test_exceptions.py | 7 ++ .../unit_tests/test_iterator.py | 5 ++ .../unit_tests/test_operation.py | 6 +- 13 files changed, 184 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-core/unit_tests/streaming/test_buffered_stream.py b/packages/google-cloud-core/unit_tests/streaming/test_buffered_stream.py index 8a8793b0c49c..797ceea2d280 100644 --- a/packages/google-cloud-core/unit_tests/streaming/test_buffered_stream.py +++ b/packages/google-cloud-core/unit_tests/streaming/test_buffered_stream.py @@ -20,6 +20,7 @@ class Test_BufferedStream(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.streaming.buffered_stream import BufferedStream + return BufferedStream def _make_one(self, *args, **kw): @@ -41,6 +42,7 @@ class _Stream(object): def test_ctor_start_zero_longer_than_buffer(self): from io import BytesIO + CONTENT = b'CONTENT GOES HERE' START = 0 BUFSIZE = 4 @@ -56,6 +58,7 @@ def test_ctor_start_zero_longer_than_buffer(self): def test_ctor_start_nonzero_shorter_than_buffer(self): from io import BytesIO + CONTENT = b'CONTENT GOES HERE' START = 8 BUFSIZE = 10 @@ -72,6 +75,7 @@ def test_ctor_start_nonzero_shorter_than_buffer(self): def test__bytes_remaining_start_zero_longer_than_buffer(self): from io import BytesIO + CONTENT = b'CONTENT GOES HERE' START = 0 BUFSIZE = 4 @@ -81,6 +85,7 @@ def test__bytes_remaining_start_zero_longer_than_buffer(self): def test__bytes_remaining_start_zero_shorter_than_buffer(self): from io import BytesIO + CONTENT = b'CONTENT GOES HERE' START = 8 BUFSIZE = 10 @@ -91,6 +96,7 @@ def test__bytes_remaining_start_zero_shorter_than_buffer(self): def test_read_w_none(self): from io import BytesIO + CONTENT = b'CONTENT GOES HERE' START = 0 BUFSIZE = 4 @@ -101,6 +107,7 @@ def test_read_w_none(self): def test_read_w_negative_size(self): from io import BytesIO + CONTENT = b'CONTENT GOES HERE' START = 0 BUFSIZE = 4 @@ -111,6 +118,7 @@ def test_read_w_negative_size(self): def test_read_from_start(self): from io import BytesIO + CONTENT = b'CONTENT GOES HERE' START = 0 BUFSIZE = 4 @@ -120,6 +128,7 @@ def test_read_from_start(self): def test_read_exhausted(self): from io import BytesIO + CONTENT = b'CONTENT GOES HERE' START = len(CONTENT) BUFSIZE = 10 diff --git a/packages/google-cloud-core/unit_tests/streaming/test_exceptions.py b/packages/google-cloud-core/unit_tests/streaming/test_exceptions.py index 0cb1c724bf99..b31c562c8e9d 100644 --- a/packages/google-cloud-core/unit_tests/streaming/test_exceptions.py +++ b/packages/google-cloud-core/unit_tests/streaming/test_exceptions.py @@ -20,6 +20,7 @@ class Test_HttpError(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.streaming.exceptions import HttpError + return HttpError def _make_one(self, *args, **kw): @@ -62,6 +63,7 @@ class Test_RetryAfterError(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.streaming.exceptions import RetryAfterError + return RetryAfterError def _make_one(self, *args, **kw): diff --git a/packages/google-cloud-core/unit_tests/streaming/test_http_wrapper.py b/packages/google-cloud-core/unit_tests/streaming/test_http_wrapper.py index f05e1b0a6f9f..b0d3156ba42f 100644 --- a/packages/google-cloud-core/unit_tests/streaming/test_http_wrapper.py +++ b/packages/google-cloud-core/unit_tests/streaming/test_http_wrapper.py @@ -20,6 +20,7 @@ class Test__httplib2_debug_level(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.streaming.http_wrapper import _httplib2_debug_level + return _httplib2_debug_level def _make_one(self, *args, **kw): @@ -80,6 +81,7 @@ class Test_Request(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.streaming.http_wrapper import Request + return Request def _make_one(self, *args, **kw): @@ -95,6 +97,7 @@ def test_ctor_defaults(self): def test_loggable_body_setter_w_body_None(self): from google.cloud.streaming.exceptions import RequestError + request = self._make_one(body=None) with self.assertRaises(RequestError): request.loggable_body = 'abc' @@ -121,6 +124,7 @@ class Test_Response(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.streaming.http_wrapper import Response + return Response def _make_one(self, *args, **kw): @@ -217,10 +221,12 @@ class Test__check_response(unittest.TestCase): def _call_fut(self, *args, **kw): from google.cloud.streaming.http_wrapper import _check_response + return _check_response(*args, **kw) def test_w_none(self): from google.cloud.streaming.exceptions import RequestError + with self.assertRaises(RequestError): self._call_fut(None) @@ -254,6 +260,7 @@ class Test__reset_http_connections(unittest.TestCase): def _call_fut(self, *args, **kw): from google.cloud.streaming.http_wrapper import _reset_http_connections + return _reset_http_connections(*args, **kw) def test_wo_connections(self): @@ -289,6 +296,7 @@ def _verify_requested(self, http, request, def test_defaults_wo_connections(self): from google.cloud._testing import _Monkey from google.cloud.streaming import http_wrapper as MUT + INFO = {'status': '200'} CONTENT = 'CONTENT' _http = _Http((INFO, CONTENT)) @@ -309,6 +317,7 @@ def test_defaults_wo_connections(self): def test_w_http_connections_miss(self): from google.cloud._testing import _Monkey from google.cloud.streaming import http_wrapper as MUT + INFO = {'status': '200'} CONTENT = 'CONTENT' CONN_TYPE = object() @@ -331,6 +340,7 @@ def test_w_http_connections_miss(self): def test_w_http_connections_hit(self): from google.cloud._testing import _Monkey from google.cloud.streaming import http_wrapper as MUT + INFO = {'status': '200'} CONTENT = 'CONTENT' CONN_TYPE = object() @@ -354,6 +364,7 @@ def test_w_request_returning_None(self): from google.cloud._testing import _Monkey from google.cloud.streaming import http_wrapper as MUT from google.cloud.streaming.exceptions import RequestError + INFO = None CONTENT = None CONN_TYPE = object() @@ -371,6 +382,7 @@ class Test_make_api_request(unittest.TestCase): def _call_fut(self, *args, **kw): from google.cloud.streaming.http_wrapper import make_api_request + return make_api_request(*args, **kw) def test_wo_exception(self): @@ -424,6 +436,7 @@ def _wo_exception(*args, **kw): def test_w_exceptions_gt_max_retries(self): from google.cloud._testing import _Monkey from google.cloud.streaming import http_wrapper as MUT + HTTP = object() REQUEST = _Request() _created, _checked = [], [] diff --git a/packages/google-cloud-core/unit_tests/streaming/test_stream_slice.py b/packages/google-cloud-core/unit_tests/streaming/test_stream_slice.py index c0c5ff375a96..47820078447d 100644 --- a/packages/google-cloud-core/unit_tests/streaming/test_stream_slice.py +++ b/packages/google-cloud-core/unit_tests/streaming/test_stream_slice.py @@ -20,6 +20,7 @@ class Test_StreamSlice(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.streaming.stream_slice import StreamSlice + return StreamSlice def _make_one(self, *args, **kw): @@ -27,6 +28,7 @@ def _make_one(self, *args, **kw): def test_ctor(self): from io import BytesIO + CONTENT = b'CONTENT GOES HERE' MAXSIZE = 4 stream = BytesIO(CONTENT) @@ -39,6 +41,7 @@ def test_ctor(self): def test___nonzero___empty(self): from io import BytesIO + CONTENT = b'' MAXSIZE = 0 stream = BytesIO(CONTENT) @@ -47,6 +50,7 @@ def test___nonzero___empty(self): def test___nonzero___nonempty(self): from io import BytesIO + CONTENT = b'CONTENT GOES HERE' MAXSIZE = 4 stream = BytesIO(CONTENT) @@ -56,6 +60,7 @@ def test___nonzero___nonempty(self): def test_read_exhausted(self): from io import BytesIO from six.moves import http_client + CONTENT = b'' MAXSIZE = 4 stream = BytesIO(CONTENT) @@ -65,6 +70,7 @@ def test_read_exhausted(self): def test_read_implicit_size(self): from io import BytesIO + CONTENT = b'CONTENT GOES HERE' MAXSIZE = 4 stream = BytesIO(CONTENT) @@ -74,6 +80,7 @@ def test_read_implicit_size(self): def test_read_explicit_size(self): from io import BytesIO + CONTENT = b'CONTENT GOES HERE' MAXSIZE = 4 SIZE = 3 diff --git a/packages/google-cloud-core/unit_tests/streaming/test_transfer.py b/packages/google-cloud-core/unit_tests/streaming/test_transfer.py index aa4fd2e546ad..8bafd4a1cc47 100644 --- a/packages/google-cloud-core/unit_tests/streaming/test_transfer.py +++ b/packages/google-cloud-core/unit_tests/streaming/test_transfer.py @@ -21,6 +21,7 @@ class Test__Transfer(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.streaming.transfer import _Transfer + return _Transfer def _make_one(self, *args, **kw): @@ -28,6 +29,7 @@ def _make_one(self, *args, **kw): def test_ctor_defaults(self): from google.cloud.streaming.transfer import _DEFAULT_CHUNKSIZE + stream = _Stream() xfer = self._make_one(stream) self.assertIs(xfer.stream, stream) @@ -96,6 +98,7 @@ def test__initialize_not_already_initialized_w_http(self): def test__initialize_not_already_initialized_wo_http(self): from httplib2 import Http + stream = _Stream() xfer = self._make_one(stream) xfer._initialize(None, self.URL) @@ -114,6 +117,7 @@ def test__initialize_w_existing_http(self): def test__initialize_already_initialized(self): from google.cloud.streaming.exceptions import TransferInvalidError + URL_2 = 'http://example.com/other' HTTP_1, HTTP_2 = object(), object() stream = _Stream() @@ -131,6 +135,7 @@ def test__ensure_initialized_hit(self): def test__ensure_initialized_miss(self): from google.cloud.streaming.exceptions import TransferInvalidError + stream = _Stream() xfer = self._make_one(stream) with self.assertRaises(TransferInvalidError): @@ -143,6 +148,7 @@ def test__ensure_uninitialized_hit(self): def test__ensure_uninitialized_miss(self): from google.cloud.streaming.exceptions import TransferInvalidError + stream = _Stream() HTTP = object() xfer = self._make_one(stream) @@ -166,6 +172,7 @@ class Test_Download(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.streaming.transfer import Download + return Download def _make_one(self, *args, **kw): @@ -197,6 +204,7 @@ def test_ctor_w_total_size(self): def test_from_file_w_existing_file_no_override(self): import os from google.cloud._testing import _tempdir + klass = self._get_target_class() with _tempdir() as tempdir: filename = os.path.join(tempdir, 'file.out') @@ -208,6 +216,7 @@ def test_from_file_w_existing_file_no_override(self): def test_from_file_w_existing_file_w_override_wo_auto_transfer(self): import os from google.cloud._testing import _tempdir + klass = self._get_target_class() with _tempdir() as tempdir: filename = os.path.join(tempdir, 'file.out') @@ -277,6 +286,7 @@ def test__set_total_w_content_range_w_asterisk_total(self): def test_initialize_download_already_initialized(self): from google.cloud.streaming.exceptions import TransferInvalidError + request = _Request() download = self._make_one(_Stream()) download._initialize(None, self.URL) @@ -296,6 +306,7 @@ def test_initialize_download_w_autotransfer_failing(self): from google.cloud._testing import _Monkey from google.cloud.streaming import transfer as MUT from google.cloud.streaming.exceptions import HttpError + request = _Request() http = object() download = self._make_one(_Stream(), auto_transfer=True) @@ -314,6 +325,7 @@ def test_initialize_download_w_autotransfer_w_content_location(self): from six.moves import http_client from google.cloud._testing import _Monkey from google.cloud.streaming import transfer as MUT + REDIRECT_URL = 'http://example.com/other' request = _Request() http = object() @@ -335,6 +347,7 @@ def test_initialize_download_w_autotransfer_w_content_location(self): def test__normalize_start_end_w_end_w_start_lt_0(self): from google.cloud.streaming.exceptions import TransferInvalidError + download = self._make_one(_Stream()) with self.assertRaises(TransferInvalidError): @@ -342,6 +355,7 @@ def test__normalize_start_end_w_end_w_start_lt_0(self): def test__normalize_start_end_w_end_w_start_gt_total(self): from google.cloud.streaming.exceptions import TransferInvalidError + download = self._make_one(_Stream()) download._set_total({'content-range': 'bytes 0-1/2'}) @@ -350,6 +364,7 @@ def test__normalize_start_end_w_end_w_start_gt_total(self): def test__normalize_start_end_w_end_lt_start(self): from google.cloud.streaming.exceptions import TransferInvalidError + download = self._make_one(_Stream()) download._set_total({'content-range': 'bytes 0-1/2'}) @@ -422,6 +437,7 @@ def test__compute_end_byte_w_start_ge_0_wo_end_w_total_size(self): def test__get_chunk_not_initialized(self): from google.cloud.streaming.exceptions import TransferInvalidError + download = self._make_one(_Stream()) with self.assertRaises(TransferInvalidError): @@ -431,6 +447,7 @@ def test__get_chunk(self): from six.moves import http_client from google.cloud._testing import _Monkey from google.cloud.streaming import transfer as MUT + http = object() download = self._make_one(_Stream()) download._initialize(http, self.URL) @@ -450,6 +467,7 @@ def test__get_chunk(self): def test__process_response_w_FORBIDDEN(self): from google.cloud.streaming.exceptions import HttpError from six.moves import http_client + download = self._make_one(_Stream()) response = _makeResponse(http_client.FORBIDDEN) with self.assertRaises(HttpError): @@ -458,6 +476,7 @@ def test__process_response_w_FORBIDDEN(self): def test__process_response_w_NOT_FOUND(self): from google.cloud.streaming.exceptions import HttpError from six.moves import http_client + download = self._make_one(_Stream()) response = _makeResponse(http_client.NOT_FOUND) with self.assertRaises(HttpError): @@ -466,6 +485,7 @@ def test__process_response_w_NOT_FOUND(self): def test__process_response_w_other_error(self): from google.cloud.streaming.exceptions import TransferRetryError from six.moves import http_client + download = self._make_one(_Stream()) response = _makeResponse(http_client.BAD_REQUEST) with self.assertRaises(TransferRetryError): @@ -473,6 +493,7 @@ def test__process_response_w_other_error(self): def test__process_response_w_OK_wo_encoding(self): from six.moves import http_client + stream = _Stream() download = self._make_one(stream) response = _makeResponse(http_client.OK, content='OK') @@ -484,6 +505,7 @@ def test__process_response_w_OK_wo_encoding(self): def test__process_response_w_PARTIAL_CONTENT_w_encoding(self): from six.moves import http_client + stream = _Stream() download = self._make_one(stream) info = {'content-encoding': 'blah'} @@ -496,6 +518,7 @@ def test__process_response_w_PARTIAL_CONTENT_w_encoding(self): def test__process_response_w_REQUESTED_RANGE_NOT_SATISFIABLE(self): from six.moves import http_client + stream = _Stream() download = self._make_one(stream) response = _makeResponse( @@ -508,6 +531,7 @@ def test__process_response_w_REQUESTED_RANGE_NOT_SATISFIABLE(self): def test__process_response_w_NO_CONTENT(self): from six.moves import http_client + stream = _Stream() download = self._make_one(stream) response = _makeResponse(status_code=http_client.NO_CONTENT) @@ -519,6 +543,7 @@ def test__process_response_w_NO_CONTENT(self): def test_get_range_not_initialized(self): from google.cloud.streaming.exceptions import TransferInvalidError + download = self._make_one(_Stream()) with self.assertRaises(TransferInvalidError): download.get_range(0, 10) @@ -527,6 +552,7 @@ def test_get_range_wo_total_size_complete(self): from six.moves import http_client from google.cloud._testing import _Monkey from google.cloud.streaming import transfer as MUT + CONTENT = b'ABCDEFGHIJ' LEN = len(CONTENT) REQ_RANGE = 'bytes=0-%d' % (LEN,) @@ -554,6 +580,7 @@ def test_get_range_wo_total_size_wo_end(self): from six.moves import http_client from google.cloud._testing import _Monkey from google.cloud.streaming import transfer as MUT + CONTENT = b'ABCDEFGHIJ' LEN = len(CONTENT) START = 5 @@ -583,6 +610,7 @@ def test_get_range_w_total_size_partial(self): from six.moves import http_client from google.cloud._testing import _Monkey from google.cloud.streaming import transfer as MUT + CONTENT = b'ABCDEFGHIJ' LEN = len(CONTENT) PARTIAL_LEN = 5 @@ -613,6 +641,7 @@ def test_get_range_w_empty_chunk(self): from google.cloud._testing import _Monkey from google.cloud.streaming import transfer as MUT from google.cloud.streaming.exceptions import TransferRetryError + CONTENT = b'ABCDEFGHIJ' LEN = len(CONTENT) START = 5 @@ -643,6 +672,7 @@ def test_get_range_w_total_size_wo_use_chunks(self): from six.moves import http_client from google.cloud._testing import _Monkey from google.cloud.streaming import transfer as MUT + CONTENT = b'ABCDEFGHIJ' LEN = len(CONTENT) CHUNK_SIZE = 3 @@ -671,6 +701,7 @@ def test_get_range_w_multiple_chunks(self): from six.moves import http_client from google.cloud._testing import _Monkey from google.cloud.streaming import transfer as MUT + CONTENT = b'ABCDE' LEN = len(CONTENT) CHUNK_SIZE = 3 @@ -705,6 +736,7 @@ def test_get_range_w_multiple_chunks(self): def test_stream_file_not_initialized(self): from google.cloud.streaming.exceptions import TransferInvalidError + download = self._make_one(_Stream()) with self.assertRaises(TransferInvalidError): @@ -712,6 +744,7 @@ def test_stream_file_not_initialized(self): def test_stream_file_w_initial_response_complete(self): from six.moves import http_client + CONTENT = b'ABCDEFGHIJ' LEN = len(CONTENT) RESP_RANGE = 'bytes 0-%d/%d' % (LEN - 1, LEN,) @@ -732,6 +765,7 @@ def test_stream_file_w_initial_response_incomplete(self): from six.moves import http_client from google.cloud._testing import _Monkey from google.cloud.streaming import transfer as MUT + CHUNK_SIZE = 3 CONTENT = b'ABCDEF' LEN = len(CONTENT) @@ -769,6 +803,7 @@ def test_stream_file_wo_initial_response_wo_total_size(self): from six.moves import http_client from google.cloud._testing import _Monkey from google.cloud.streaming import transfer as MUT + CONTENT = b'ABCDEFGHIJ' LEN = len(CONTENT) CHUNK_SIZE = 123 @@ -804,6 +839,7 @@ class Test_Upload(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.streaming.transfer import Upload + return Upload def _make_one(self, stream, mime_type=MIME_TYPE, *args, **kw): @@ -811,6 +847,7 @@ def _make_one(self, stream, mime_type=MIME_TYPE, *args, **kw): def test_ctor_defaults(self): from google.cloud.streaming.transfer import _DEFAULT_CHUNKSIZE + stream = _Stream() upload = self._make_one(stream) self.assertIs(upload.stream, stream) @@ -840,6 +877,7 @@ def test_from_file_w_nonesuch_file(self): def test_from_file_wo_mimetype_w_unguessable_filename(self): import os from google.cloud._testing import _tempdir + klass = self._get_target_class() CONTENT = b'EXISTING FILE W/ UNGUESSABLE MIMETYPE' with _tempdir() as tempdir: @@ -852,6 +890,7 @@ def test_from_file_wo_mimetype_w_unguessable_filename(self): def test_from_file_wo_mimetype_w_guessable_filename(self): import os from google.cloud._testing import _tempdir + klass = self._get_target_class() CONTENT = b'EXISTING FILE W/ GUESSABLE MIMETYPE' with _tempdir() as tempdir: @@ -867,6 +906,7 @@ def test_from_file_wo_mimetype_w_guessable_filename(self): def test_from_file_w_mimetype_w_auto_transfer_w_kwds(self): import os from google.cloud._testing import _tempdir + klass = self._get_target_class() CONTENT = b'EXISTING FILE W/ GUESSABLE MIMETYPE' CHUNK_SIZE = 3 @@ -924,18 +964,21 @@ def test_strategy_setter_invalid(self): def test_strategy_setter_SIMPLE_UPLOAD(self): from google.cloud.streaming.transfer import SIMPLE_UPLOAD + upload = self._make_one(_Stream()) upload.strategy = SIMPLE_UPLOAD self.assertEqual(upload.strategy, SIMPLE_UPLOAD) def test_strategy_setter_RESUMABLE_UPLOAD(self): from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + upload = self._make_one(_Stream()) upload.strategy = RESUMABLE_UPLOAD self.assertEqual(upload.strategy, RESUMABLE_UPLOAD) def test_total_size_setter_initialized(self): from google.cloud.streaming.exceptions import TransferInvalidError + SIZE = 123 upload = self._make_one(_Stream) http = object() @@ -951,6 +994,7 @@ def test_total_size_setter_not_initialized(self): def test__set_default_strategy_w_existing_strategy(self): from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + config = _Dummy( resumable_path='/resumable/endpoint', simple_multipart=True, @@ -964,6 +1008,7 @@ def test__set_default_strategy_w_existing_strategy(self): def test__set_default_strategy_wo_resumable_path(self): from google.cloud.streaming.transfer import SIMPLE_UPLOAD + config = _Dummy( resumable_path=None, simple_multipart=True, @@ -977,6 +1022,7 @@ def test__set_default_strategy_wo_resumable_path(self): def test__set_default_strategy_w_total_size_gt_threshhold(self): from google.cloud.streaming.transfer import RESUMABLE_UPLOAD_THRESHOLD from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + config = _UploadConfig() request = _Request() upload = self._make_one( @@ -986,6 +1032,7 @@ def test__set_default_strategy_w_total_size_gt_threshhold(self): def test__set_default_strategy_w_body_wo_multipart(self): from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'ABCDEFGHIJ' config = _UploadConfig() config.simple_multipart = False @@ -996,6 +1043,7 @@ def test__set_default_strategy_w_body_wo_multipart(self): def test__set_default_strategy_w_body_w_multipart_wo_simple_path(self): from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'ABCDEFGHIJ' config = _UploadConfig() config.simple_path = None @@ -1006,6 +1054,7 @@ def test__set_default_strategy_w_body_w_multipart_wo_simple_path(self): def test__set_default_strategy_w_body_w_multipart_w_simple_path(self): from google.cloud.streaming.transfer import SIMPLE_UPLOAD + CONTENT = b'ABCDEFGHIJ' config = _UploadConfig() request = _Request(body=CONTENT) @@ -1034,6 +1083,7 @@ def test_configure_request_w_invalid_mimetype(self): def test_configure_request_w_simple_wo_body(self): from google.cloud.streaming.transfer import SIMPLE_UPLOAD + CONTENT = b'CONTENT' config = _UploadConfig() request = _Request() @@ -1053,6 +1103,7 @@ def test_configure_request_w_simple_wo_body(self): def test_configure_request_w_simple_w_body(self): from google.cloud._helpers import _to_bytes from google.cloud.streaming.transfer import SIMPLE_UPLOAD + CONTENT = b'CONTENT' BODY = b'BODY' config = _UploadConfig() @@ -1095,6 +1146,7 @@ def test_configure_request_w_simple_w_body(self): def test_configure_request_w_resumable_wo_total_size(self): from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'CONTENT' config = _UploadConfig() request = _Request() @@ -1112,6 +1164,7 @@ def test_configure_request_w_resumable_wo_total_size(self): def test_configure_request_w_resumable_w_total_size(self): from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'CONTENT' LEN = len(CONTENT) config = _UploadConfig() @@ -1132,6 +1185,7 @@ def test_configure_request_w_resumable_w_total_size(self): def test_refresh_upload_state_w_simple_strategy(self): from google.cloud.streaming.transfer import SIMPLE_UPLOAD + upload = self._make_one(_Stream()) upload.strategy = SIMPLE_UPLOAD upload.refresh_upload_state() # no-op @@ -1139,6 +1193,7 @@ def test_refresh_upload_state_w_simple_strategy(self): def test_refresh_upload_state_not_initialized(self): from google.cloud.streaming.exceptions import TransferInvalidError from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + upload = self._make_one(_Stream()) upload.strategy = RESUMABLE_UPLOAD with self.assertRaises(TransferInvalidError): @@ -1149,6 +1204,7 @@ def test_refresh_upload_state_w_OK(self): from google.cloud._testing import _Monkey from google.cloud.streaming import transfer as MUT from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'ABCDEFGHIJ' LEN = len(CONTENT) RESP_RANGE = 'bytes 0-%d/%d' % (LEN - 1, LEN,) @@ -1176,6 +1232,7 @@ def test_refresh_upload_state_w_CREATED(self): from google.cloud._testing import _Monkey from google.cloud.streaming import transfer as MUT from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'ABCDEFGHIJ' LEN = len(CONTENT) RESP_RANGE = 'bytes 0-%d/%d' % (LEN - 1, LEN,) @@ -1203,6 +1260,7 @@ def test_refresh_upload_state_w_RESUME_INCOMPLETE_w_range(self): from google.cloud.streaming.http_wrapper import RESUME_INCOMPLETE from google.cloud._testing import _Monkey from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'ABCDEFGHIJ' LEN = len(CONTENT) LAST = 5 @@ -1230,6 +1288,7 @@ def test_refresh_upload_state_w_RESUME_INCOMPLETE_wo_range(self): from google.cloud.streaming.http_wrapper import RESUME_INCOMPLETE from google.cloud._testing import _Monkey from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'ABCDEFGHIJ' LEN = len(CONTENT) http = object() @@ -1256,6 +1315,7 @@ def test_refresh_upload_state_w_error(self): from google.cloud.streaming import transfer as MUT from google.cloud.streaming.exceptions import HttpError from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'ABCDEFGHIJ' LEN = len(CONTENT) http = object() @@ -1295,6 +1355,7 @@ def test_initialize_upload_no_strategy(self): def test_initialize_upload_simple_w_http(self): from google.cloud.streaming.transfer import SIMPLE_UPLOAD + request = _Request() upload = self._make_one(_Stream()) upload.strategy = SIMPLE_UPLOAD @@ -1303,6 +1364,7 @@ def test_initialize_upload_simple_w_http(self): def test_initialize_upload_resumable_already_initialized(self): from google.cloud.streaming.exceptions import TransferInvalidError from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + request = _Request() upload = self._make_one(_Stream()) upload.strategy = RESUMABLE_UPLOAD @@ -1316,6 +1378,7 @@ def test_initialize_upload_w_http_resumable_not_initialized_w_error(self): from google.cloud.streaming import transfer as MUT from google.cloud.streaming.exceptions import HttpError from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + request = _Request() upload = self._make_one(_Stream()) upload.strategy = RESUMABLE_UPLOAD @@ -1331,6 +1394,7 @@ def test_initialize_upload_w_http_wo_auto_transfer_w_OK(self): from google.cloud._testing import _Monkey from google.cloud.streaming import transfer as MUT from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + request = _Request() upload = self._make_one(_Stream(), auto_transfer=False) upload.strategy = RESUMABLE_UPLOAD @@ -1352,6 +1416,7 @@ def test_initialize_upload_w_granularity_w_auto_transfer_w_OK(self): from google.cloud._testing import _Monkey from google.cloud.streaming import transfer as MUT from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'ABCDEFGHIJ' http = object() request = _Request() @@ -1400,6 +1465,7 @@ def test__validate_chunksize_w__server_chunk_granularity_hit(self): def test_stream_file_w_simple_strategy(self): from google.cloud.streaming.transfer import SIMPLE_UPLOAD + upload = self._make_one(_Stream()) upload.strategy = SIMPLE_UPLOAD with self.assertRaises(ValueError): @@ -1407,6 +1473,7 @@ def test_stream_file_w_simple_strategy(self): def test_stream_file_w_use_chunks_invalid_chunk_size(self): from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + upload = self._make_one(_Stream(), chunksize=1024) upload.strategy = RESUMABLE_UPLOAD upload._server_chunk_granularity = 100 @@ -1416,6 +1483,7 @@ def test_stream_file_w_use_chunks_invalid_chunk_size(self): def test_stream_file_not_initialized(self): from google.cloud.streaming.exceptions import TransferInvalidError from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + upload = self._make_one(_Stream(), chunksize=1024) upload.strategy = RESUMABLE_UPLOAD upload._server_chunk_granularity = 128 @@ -1424,6 +1492,7 @@ def test_stream_file_not_initialized(self): def test_stream_file_already_complete_w_unseekable_stream(self): from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + http = object() stream = object() response = object() @@ -1438,6 +1507,7 @@ def test_stream_file_already_complete_w_unseekable_stream(self): def test_stream_file_already_complete_w_seekable_stream_unsynced(self): from google.cloud.streaming.exceptions import CommunicationError from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'ABCDEFGHIJ' http = object() stream = _Stream(CONTENT) @@ -1454,6 +1524,7 @@ def test_stream_file_already_complete_w_seekable_stream_unsynced(self): def test_stream_file_already_complete_wo_seekable_method_synced(self): import os from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'ABCDEFGHIJ' http = object() stream = _Stream(CONTENT) @@ -1470,6 +1541,7 @@ def test_stream_file_already_complete_wo_seekable_method_synced(self): def test_stream_file_already_complete_w_seekable_method_true_synced(self): import os from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'ABCDEFGHIJ' http = object() stream = _StreamWithSeekableMethod(CONTENT, True) @@ -1486,6 +1558,7 @@ def test_stream_file_already_complete_w_seekable_method_true_synced(self): def test_stream_file_already_complete_w_seekable_method_false(self): import os from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'ABCDEFGHIJ' http = object() stream = _StreamWithSeekableMethod(CONTENT, False) @@ -1505,6 +1578,7 @@ def test_stream_file_incomplete(self): from google.cloud.streaming import transfer as MUT from google.cloud.streaming.http_wrapper import RESUME_INCOMPLETE from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'ABCDEFGHIJ' http = object() stream = _Stream(CONTENT) @@ -1550,6 +1624,7 @@ def test_stream_file_incomplete_w_transfer_error(self): from google.cloud.streaming.exceptions import CommunicationError from google.cloud.streaming.http_wrapper import RESUME_INCOMPLETE from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'ABCDEFGHIJ' http = object() stream = _Stream(CONTENT) @@ -1586,6 +1661,7 @@ def test__send_media_request_wo_error(self): from google.cloud._testing import _Monkey from google.cloud.streaming import transfer as MUT from google.cloud.streaming.http_wrapper import RESUME_INCOMPLETE + CONTENT = b'ABCDEFGHIJ' bytes_http = object() stream = _Stream(CONTENT) @@ -1616,6 +1692,7 @@ def test__send_media_request_w_error(self): from google.cloud.streaming.exceptions import HttpError from google.cloud.streaming.http_wrapper import RESUME_INCOMPLETE from google.cloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'ABCDEFGHIJ' bytes_http = object() http = object() @@ -1652,12 +1729,14 @@ def test__send_media_request_w_error(self): def test__send_media_body_not_initialized(self): from google.cloud.streaming.exceptions import TransferInvalidError + upload = self._make_one(_Stream()) with self.assertRaises(TransferInvalidError): upload._send_media_body(0) def test__send_media_body_wo_total_size(self): from google.cloud.streaming.exceptions import TransferInvalidError + http = object() upload = self._make_one(_Stream()) upload._initialize(http, _Request.URL) @@ -1666,6 +1745,7 @@ def test__send_media_body_wo_total_size(self): def test__send_media_body_start_lt_total_size(self): from google.cloud.streaming.stream_slice import StreamSlice + SIZE = 1234 http = object() stream = _Stream() @@ -1693,6 +1773,7 @@ def test__send_media_body_start_lt_total_size(self): def test__send_media_body_start_eq_total_size(self): from google.cloud.streaming.stream_slice import StreamSlice + SIZE = 1234 http = object() stream = _Stream() @@ -1720,6 +1801,7 @@ def test__send_media_body_start_eq_total_size(self): def test__send_chunk_not_initialized(self): from google.cloud.streaming.exceptions import TransferInvalidError + upload = self._make_one(_Stream()) with self.assertRaises(TransferInvalidError): upload._send_chunk(0) @@ -1779,6 +1861,7 @@ def test__send_chunk_wo_total_size_stream_not_exhausted(self): def test__send_chunk_w_total_size_stream_not_exhausted(self): from google.cloud.streaming.stream_slice import StreamSlice + CONTENT = b'ABCDEFGHIJ' SIZE = len(CONTENT) CHUNK_SIZE = SIZE - 5 @@ -1810,6 +1893,7 @@ def test__send_chunk_w_total_size_stream_not_exhausted(self): def test__send_chunk_w_total_size_stream_exhausted(self): from google.cloud.streaming.stream_slice import StreamSlice + CONTENT = b'ABCDEFGHIJ' SIZE = len(CONTENT) CHUNK_SIZE = 1000 @@ -1840,12 +1924,15 @@ def test__send_chunk_w_total_size_stream_exhausted(self): def _email_chunk_parser(): import six + if six.PY3: # pragma: NO COVER Python3 from email.parser import BytesParser + parser = BytesParser() return parser.parsebytes else: from email.parser import Parser + parser = Parser() return parser.parsestr @@ -1868,6 +1955,7 @@ class _Stream(object): def __init__(self, to_read=b''): import io + self._written = [] self._to_read = io.BytesIO(to_read) diff --git a/packages/google-cloud-core/unit_tests/streaming/test_util.py b/packages/google-cloud-core/unit_tests/streaming/test_util.py index 1ee1c03d073f..4da788182cb9 100644 --- a/packages/google-cloud-core/unit_tests/streaming/test_util.py +++ b/packages/google-cloud-core/unit_tests/streaming/test_util.py @@ -19,17 +19,20 @@ class Test_calculate_wait_for_retry(unittest.TestCase): def _call_fut(self, *args, **kw): from google.cloud.streaming.util import calculate_wait_for_retry + return calculate_wait_for_retry(*args, **kw) def test_w_negative_jitter_lt_max_wait(self): import random from google.cloud._testing import _Monkey + with _Monkey(random, uniform=lambda lower, upper: lower): self.assertEqual(self._call_fut(1), 1.5) def test_w_positive_jitter_gt_max_wait(self): import random from google.cloud._testing import _Monkey + with _Monkey(random, uniform=lambda lower, upper: upper): self.assertEqual(self._call_fut(4), 20) @@ -38,6 +41,7 @@ class Test_acceptable_mime_type(unittest.TestCase): def _call_fut(self, *args, **kw): from google.cloud.streaming.util import acceptable_mime_type + return acceptable_mime_type(*args, **kw) def test_pattern_wo_slash(self): diff --git a/packages/google-cloud-core/unit_tests/test__helpers.py b/packages/google-cloud-core/unit_tests/test__helpers.py index bc1a4dafb6b7..f9fa5d58ea0f 100644 --- a/packages/google-cloud-core/unit_tests/test__helpers.py +++ b/packages/google-cloud-core/unit_tests/test__helpers.py @@ -51,6 +51,7 @@ class Test__UTC(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud._helpers import _UTC + return _UTC def _make_one(self): @@ -58,6 +59,7 @@ def _make_one(self): def test_module_property(self): from google.cloud import _helpers as MUT + klass = self._get_target_class() try: import pytz @@ -104,6 +106,7 @@ class Test__ensure_tuple_or_list(unittest.TestCase): def _call_fut(self, arg_name, tuple_or_list): from google.cloud._helpers import _ensure_tuple_or_list + return _ensure_tuple_or_list(arg_name, tuple_or_list) def test_valid_tuple(self): @@ -131,6 +134,7 @@ class Test__determine_default_project(unittest.TestCase): def _call_fut(self, project=None): from google.cloud._helpers import _determine_default_project + return _determine_default_project(project=project) def test_it(self): @@ -154,6 +158,7 @@ class Test__millis(unittest.TestCase): def _call_fut(self, value): from google.cloud._helpers import _millis + return _millis(value) def test_one_second_from_epoch(self): @@ -168,6 +173,7 @@ class Test__microseconds_from_datetime(unittest.TestCase): def _call_fut(self, value): from google.cloud._helpers import _microseconds_from_datetime + return _microseconds_from_datetime(value) def test_it(self): @@ -185,6 +191,7 @@ class Test__millis_from_datetime(unittest.TestCase): def _call_fut(self, value): from google.cloud._helpers import _millis_from_datetime + return _millis_from_datetime(value) def test_w_none(self): @@ -240,6 +247,7 @@ class Test__datetime_from_microseconds(unittest.TestCase): def _call_fut(self, value): from google.cloud._helpers import _datetime_from_microseconds + return _datetime_from_microseconds(value) def test_it(self): @@ -257,10 +265,12 @@ class Test___date_from_iso8601_date(unittest.TestCase): def _call_fut(self, value): from google.cloud._helpers import _date_from_iso8601_date + return _date_from_iso8601_date(value) def test_todays_date(self): import datetime + TODAY = datetime.date.today() self.assertEqual(self._call_fut(TODAY.strftime("%Y-%m-%d")), TODAY) @@ -269,10 +279,12 @@ class Test___time_from_iso8601_time_naive(unittest.TestCase): def _call_fut(self, value): from google.cloud._helpers import _time_from_iso8601_time_naive + return _time_from_iso8601_time_naive(value) def test_todays_date(self): import datetime + WHEN = datetime.time(12, 9, 42) self.assertEqual(self._call_fut(("12:09:42")), WHEN) @@ -281,6 +293,7 @@ class Test__rfc3339_to_datetime(unittest.TestCase): def _call_fut(self, dt_str): from google.cloud._helpers import _rfc3339_to_datetime + return _rfc3339_to_datetime(dt_str) def test_w_bogus_zone(self): @@ -335,6 +348,7 @@ class Test__rfc3339_nanos_to_datetime(unittest.TestCase): def _call_fut(self, dt_str): from google.cloud._helpers import _rfc3339_nanos_to_datetime + return _rfc3339_nanos_to_datetime(dt_str) def test_w_bogus_zone(self): @@ -423,6 +437,7 @@ class Test__datetime_to_rfc3339(unittest.TestCase): def _call_fut(self, *args, **kwargs): from google.cloud._helpers import _datetime_to_rfc3339 + return _datetime_to_rfc3339(*args, **kwargs) @staticmethod @@ -471,6 +486,7 @@ class Test__to_bytes(unittest.TestCase): def _call_fut(self, *args, **kwargs): from google.cloud._helpers import _to_bytes + return _to_bytes(*args, **kwargs) def test_with_bytes(self): @@ -498,6 +514,7 @@ class Test__bytes_to_unicode(unittest.TestCase): def _call_fut(self, *args, **kwargs): from google.cloud._helpers import _bytes_to_unicode + return _bytes_to_unicode(*args, **kwargs) def test_with_bytes(self): @@ -519,6 +536,7 @@ class Test__pb_timestamp_to_datetime(unittest.TestCase): def _call_fut(self, timestamp): from google.cloud._helpers import _pb_timestamp_to_datetime + return _pb_timestamp_to_datetime(timestamp) def test_it(self): @@ -540,6 +558,7 @@ class Test__pb_timestamp_to_rfc3339(unittest.TestCase): def _call_fut(self, timestamp): from google.cloud._helpers import _pb_timestamp_to_rfc3339 + return _pb_timestamp_to_rfc3339(timestamp) def test_it(self): @@ -557,6 +576,7 @@ class Test__datetime_to_pb_timestamp(unittest.TestCase): def _call_fut(self, when): from google.cloud._helpers import _datetime_to_pb_timestamp + return _datetime_to_pb_timestamp(when) def test_it(self): @@ -603,6 +623,7 @@ def test_w_mismatched_project(self): def test_w_valid_data_w_compiled_regex(self): import re + template = re.compile(self.TEMPLATE) PATH = 'projects/%s/things/%s' % (self.PROJECT, self.THING_NAME) name = self._call_fut(PATH, self.PROJECT, template) @@ -620,6 +641,7 @@ class Test_make_secure_channel(unittest.TestCase): def _call_fut(self, *args, **kwargs): from google.cloud._helpers import make_secure_channel + return make_secure_channel(*args, **kwargs) def test_it(self): @@ -676,6 +698,7 @@ class Test_make_secure_stub(unittest.TestCase): def _call_fut(self, *args, **kwargs): from google.cloud._helpers import make_secure_stub + return make_secure_stub(*args, **kwargs) def test_it(self): @@ -714,6 +737,7 @@ class Test_make_insecure_stub(unittest.TestCase): def _call_fut(self, *args, **kwargs): from google.cloud._helpers import make_insecure_stub + return make_insecure_stub(*args, **kwargs) def _helper(self, target, host, port=None): diff --git a/packages/google-cloud-core/unit_tests/test__http.py b/packages/google-cloud-core/unit_tests/test__http.py index b27f0240d82a..23a198d1f68b 100644 --- a/packages/google-cloud-core/unit_tests/test__http.py +++ b/packages/google-cloud-core/unit_tests/test__http.py @@ -62,6 +62,7 @@ def test_http_w_existing(self): def test_http_wo_creds(self): import httplib2 + conn = self._make_one() self.assertIsInstance(conn.http, httplib2.Http) @@ -78,6 +79,7 @@ def test_http_w_creds(self): def test_user_agent_format(self): from pkg_resources import get_distribution + expected_ua = 'gcloud-python/{0}'.format( get_distribution('google-cloud-core').version) conn = self._make_one() @@ -123,6 +125,7 @@ def test_http_w_existing(self): def test_http_wo_creds(self): import httplib2 + conn = self._make_one() self.assertIsInstance(conn.http, httplib2.Http) @@ -151,6 +154,7 @@ def test_build_api_url_no_extra_query_params(self): def test_build_api_url_w_extra_query_params(self): from six.moves.urllib.parse import parse_qsl from six.moves.urllib.parse import urlsplit + conn = self._makeMockOne() uri = conn.build_api_url('/foo', {'bar': 'baz'}) @@ -271,6 +275,7 @@ def test_api_request_wo_json_expected(self): def test_api_request_w_query_params(self): from six.moves.urllib.parse import parse_qsl from six.moves.urllib.parse import urlsplit + conn = self._makeMockOne() http = conn._http = _Http( {'status': '200', 'content-type': 'application/json'}, @@ -301,6 +306,7 @@ def test_api_request_w_query_params(self): def test_api_request_w_headers(self): from six.moves.urllib.parse import urlsplit + conn = self._makeMockOne() http = conn._http = _Http( {'status': '200', 'content-type': 'application/json'}, @@ -332,6 +338,7 @@ def test_api_request_w_headers(self): def test_api_request_w_data(self): import json + DATA = {'foo': 'bar'} DATAJ = json.dumps(DATA) conn = self._makeMockOne() @@ -360,6 +367,7 @@ def test_api_request_w_data(self): def test_api_request_w_404(self): from google.cloud.exceptions import NotFound + conn = self._makeMockOne() conn._http = _Http( {'status': '404', 'content-type': 'text/plain'}, @@ -369,6 +377,7 @@ def test_api_request_w_404(self): def test_api_request_w_500(self): from google.cloud.exceptions import InternalServerError + conn = self._makeMockOne() conn._http = _Http( {'status': '500', 'content-type': 'text/plain'}, @@ -408,6 +417,7 @@ class _Http(object): def __init__(self, headers, content): from httplib2 import Response + self._response = Response(headers) self._content = content diff --git a/packages/google-cloud-core/unit_tests/test_client.py b/packages/google-cloud-core/unit_tests/test_client.py index dd1075aae5f8..a503891bc0c6 100644 --- a/packages/google-cloud-core/unit_tests/test_client.py +++ b/packages/google-cloud-core/unit_tests/test_client.py @@ -19,6 +19,7 @@ def _make_credentials(): import google.auth.credentials + return mock.Mock(spec=google.auth.credentials.Credentials) @@ -27,6 +28,7 @@ class Test_ClientFactoryMixin(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.client import _ClientFactoryMixin + return _ClientFactoryMixin def test_virtual(self): @@ -39,6 +41,7 @@ class TestClient(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.client import Client + return Client def _make_one(self, *args, **kw): @@ -106,6 +109,7 @@ class TestJSONClient(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.client import JSONClient + return JSONClient def _make_one(self, *args, **kw): diff --git a/packages/google-cloud-core/unit_tests/test_credentials.py b/packages/google-cloud-core/unit_tests/test_credentials.py index 6489dd19c4dd..53370a061494 100644 --- a/packages/google-cloud-core/unit_tests/test_credentials.py +++ b/packages/google-cloud-core/unit_tests/test_credentials.py @@ -21,6 +21,7 @@ class Test_get_credentials(unittest.TestCase): def _call_fut(self): from google.cloud import credentials + return credentials.get_credentials() def test_it(self): @@ -37,6 +38,7 @@ class Test_generate_signed_url(unittest.TestCase): def _call_fut(self, *args, **kwargs): from google.cloud.credentials import generate_signed_url + return generate_signed_url(*args, **kwargs) def _generate_helper(self, response_type=None, response_disposition=None, @@ -108,6 +110,7 @@ def test_with_google_credentials(self): import time import google.auth.credentials from google.cloud.credentials import generate_signed_url + RESOURCE = '/name/path' credentials = mock.Mock(spec=google.auth.credentials.Credentials) @@ -120,6 +123,7 @@ class Test__get_signed_query_params(unittest.TestCase): def _call_fut(self, credentials, expiration, string_to_sign): from google.cloud.credentials import _get_signed_query_params + return _get_signed_query_params(credentials, expiration, string_to_sign) @@ -149,10 +153,12 @@ class Test__get_expiration_seconds(unittest.TestCase): def _call_fut(self, expiration): from google.cloud.credentials import _get_expiration_seconds + return _get_expiration_seconds(expiration) def _utc_seconds(self, when): import calendar + return int(calendar.timegm(when.timetuple())) def test_w_invalid(self): diff --git a/packages/google-cloud-core/unit_tests/test_exceptions.py b/packages/google-cloud-core/unit_tests/test_exceptions.py index 36cdc3f3e360..4ab41cb8d576 100644 --- a/packages/google-cloud-core/unit_tests/test_exceptions.py +++ b/packages/google-cloud-core/unit_tests/test_exceptions.py @@ -20,6 +20,7 @@ class Test_GoogleCloudError(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.exceptions import GoogleCloudError + return GoogleCloudError def _make_one(self, message, errors=()): @@ -51,11 +52,13 @@ class Test_make_exception(unittest.TestCase): def _call_fut(self, response, content, error_info=None, use_json=True): from google.cloud.exceptions import make_exception + return make_exception(response, content, error_info=error_info, use_json=use_json) def test_hit_w_content_as_str(self): from google.cloud.exceptions import NotFound + response = _Response(404) content = b'{"error": {"message": "Not Found"}}' exception = self._call_fut(response, content) @@ -67,6 +70,7 @@ def test_hit_w_content_as_unicode(self): import six from google.cloud._helpers import _to_bytes from google.cloud.exceptions import NotFound + error_message = u'That\u2019s not found.' expected = u'404 %s' % (error_message,) @@ -88,6 +92,7 @@ def test_hit_w_content_as_unicode_as_py3(self): import six from google.cloud._testing import _Monkey from google.cloud.exceptions import NotFound + error_message = u'That is not found.' expected = u'404 %s' % (error_message,) @@ -103,6 +108,7 @@ def test_hit_w_content_as_unicode_as_py3(self): def test_miss_w_content_as_dict(self): from google.cloud.exceptions import GoogleCloudError + ERROR = { 'domain': 'global', 'location': 'test', @@ -119,6 +125,7 @@ def test_miss_w_content_as_dict(self): def test_html_when_json_expected(self): from google.cloud.exceptions import NotFound + response = _Response(NotFound.code) content = '404 Not Found' exception = self._call_fut(response, content, use_json=True) diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/unit_tests/test_iterator.py index 7f10ea47f104..a7d9e4f0924d 100644 --- a/packages/google-cloud-core/unit_tests/test_iterator.py +++ b/packages/google-cloud-core/unit_tests/test_iterator.py @@ -19,6 +19,7 @@ class Test__do_nothing_page_start(unittest.TestCase): def _call_fut(self, iterator, page, response): from google.cloud.iterator import _do_nothing_page_start + return _do_nothing_page_start(iterator, page, response) def test_do_nothing(self): @@ -31,6 +32,7 @@ class TestPage(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.iterator import Page + return Page def _make_one(self, *args, **kw): @@ -95,6 +97,7 @@ class TestIterator(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.iterator import Iterator + return Iterator def _make_one(self, *args, **kw): @@ -238,6 +241,7 @@ class TestHTTPIterator(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.iterator import HTTPIterator + return HTTPIterator def _make_one(self, *args, **kw): @@ -471,6 +475,7 @@ class TestGAXIterator(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.iterator import GAXIterator + return GAXIterator def _make_one(self, *args, **kw): diff --git a/packages/google-cloud-core/unit_tests/test_operation.py b/packages/google-cloud-core/unit_tests/test_operation.py index 41c469ba336d..375484d1d60c 100644 --- a/packages/google-cloud-core/unit_tests/test_operation.py +++ b/packages/google-cloud-core/unit_tests/test_operation.py @@ -19,6 +19,7 @@ class Test__compute_type_url(unittest.TestCase): def _call_fut(self, klass, prefix=None): from google.cloud.operation import _compute_type_url + if prefix is None: return _compute_type_url(klass) return _compute_type_url(klass, prefix) @@ -35,6 +36,7 @@ def test_wo_prefix(self): def test_w_prefix(self): from google.protobuf.struct_pb2 import Struct + PREFIX = 'test.google-cloud-python.com' type_url = self._call_fut(Struct, PREFIX) @@ -48,6 +50,7 @@ class Test_register_type(unittest.TestCase): def _call_fut(self, klass, type_url=None): from google.cloud.operation import register_type + register_type(klass, type_url=type_url) def test_explicit(self): @@ -110,6 +113,7 @@ class TestOperation(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.operation import Operation + return Operation def _make_one(self, *args, **kw): @@ -144,6 +148,7 @@ def test_ctor_explicit(self): def test_from_pb_wo_metadata_or_kw(self): from google.longrunning import operations_pb2 + client = _Client() operation_pb = operations_pb2.Operation(name=self.OPERATION_NAME) klass = self._get_target_class() @@ -345,7 +350,6 @@ def test__update_state_metadata(self): def test__update_state_error(self): from google.longrunning import operations_pb2 from google.rpc.status_pb2 import Status - from google.cloud._testing import _Monkey operation = self._make_one(None, None) self.assertIsNone(operation.error) From 56a2161bb443ca57e751f69c98befd91b3a0c1cb Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 20 Jan 2017 11:08:42 -0800 Subject: [PATCH 110/468] Moving Bigtable helpers for duration protobufs into core. --- .../google/cloud/_helpers.py | 47 +++++++++++++ .../unit_tests/test__helpers.py | 67 +++++++++++++++++++ 2 files changed, 114 insertions(+) diff --git a/packages/google-cloud-core/google/cloud/_helpers.py b/packages/google-cloud-core/google/cloud/_helpers.py index 03f6d2fdb60c..f2fceb8526ed 100644 --- a/packages/google-cloud-core/google/cloud/_helpers.py +++ b/packages/google-cloud-core/google/cloud/_helpers.py @@ -27,6 +27,7 @@ from threading import local as Local import google.auth +from google.protobuf import duration_pb2 from google.protobuf import timestamp_pb2 import google_auth_httplib2 @@ -424,6 +425,52 @@ def _datetime_to_pb_timestamp(when): return timestamp_pb2.Timestamp(seconds=seconds, nanos=nanos) +def _timedelta_to_duration_pb(timedelta_val): + """Convert a Python timedelta object to a duration protobuf. + + .. note:: + + The Python timedelta has a granularity of microseconds while + the protobuf duration type has a duration of nanoseconds. + + :type timedelta_val: :class:`datetime.timedelta` + :param timedelta_val: A timedelta object. + + :rtype: :class:`google.protobuf.duration_pb2.Duration` + :returns: A duration object equivalent to the time delta. + """ + seconds_decimal = timedelta_val.total_seconds() + # Truncate the parts other than the integer. + seconds = int(seconds_decimal) + if seconds_decimal < 0: + signed_micros = timedelta_val.microseconds - 10**6 + else: + signed_micros = timedelta_val.microseconds + # Convert nanoseconds to microseconds. + nanos = 1000 * signed_micros + return duration_pb2.Duration(seconds=seconds, nanos=nanos) + + +def _duration_pb_to_timedelta(duration_pb): + """Convert a duration protobuf to a Python timedelta object. + + .. note:: + + The Python timedelta has a granularity of microseconds while + the protobuf duration type has a duration of nanoseconds. + + :type duration_pb: :class:`google.protobuf.duration_pb2.Duration` + :param duration_pb: A protobuf duration object. + + :rtype: :class:`datetime.timedelta` + :returns: The converted timedelta object. + """ + return datetime.timedelta( + seconds=duration_pb.seconds, + microseconds=(duration_pb.nanos / 1000.0), + ) + + def _name_from_project_path(path, project, template): """Validate a URI path and get the leaf object's name. diff --git a/packages/google-cloud-core/unit_tests/test__helpers.py b/packages/google-cloud-core/unit_tests/test__helpers.py index f9fa5d58ea0f..e0642780c0a4 100644 --- a/packages/google-cloud-core/unit_tests/test__helpers.py +++ b/packages/google-cloud-core/unit_tests/test__helpers.py @@ -594,6 +594,73 @@ def test_it(self): self.assertEqual(self._call_fut(dt_stamp), timestamp) +class Test__timedelta_to_duration_pb(unittest.TestCase): + + def _call_fut(self, *args, **kwargs): + from google.cloud._helpers import _timedelta_to_duration_pb + + return _timedelta_to_duration_pb(*args, **kwargs) + + def test_it(self): + import datetime + from google.protobuf import duration_pb2 + + seconds = microseconds = 1 + timedelta_val = datetime.timedelta(seconds=seconds, + microseconds=microseconds) + result = self._call_fut(timedelta_val) + self.assertIsInstance(result, duration_pb2.Duration) + self.assertEqual(result.seconds, seconds) + self.assertEqual(result.nanos, 1000 * microseconds) + + def test_with_negative_microseconds(self): + import datetime + from google.protobuf import duration_pb2 + + seconds = 1 + microseconds = -5 + timedelta_val = datetime.timedelta(seconds=seconds, + microseconds=microseconds) + result = self._call_fut(timedelta_val) + self.assertIsInstance(result, duration_pb2.Duration) + self.assertEqual(result.seconds, seconds - 1) + self.assertEqual(result.nanos, 10**9 + 1000 * microseconds) + + def test_with_negative_seconds(self): + import datetime + from google.protobuf import duration_pb2 + + seconds = -1 + microseconds = 5 + timedelta_val = datetime.timedelta(seconds=seconds, + microseconds=microseconds) + result = self._call_fut(timedelta_val) + self.assertIsInstance(result, duration_pb2.Duration) + self.assertEqual(result.seconds, seconds + 1) + self.assertEqual(result.nanos, -(10**9 - 1000 * microseconds)) + + +class Test__duration_pb_to_timedelta(unittest.TestCase): + + def _call_fut(self, *args, **kwargs): + from google.cloud._helpers import _duration_pb_to_timedelta + + return _duration_pb_to_timedelta(*args, **kwargs) + + def test_it(self): + import datetime + from google.protobuf import duration_pb2 + + seconds = microseconds = 1 + duration_pb = duration_pb2.Duration(seconds=seconds, + nanos=1000 * microseconds) + timedelta_val = datetime.timedelta(seconds=seconds, + microseconds=microseconds) + result = self._call_fut(duration_pb) + self.assertIsInstance(result, datetime.timedelta) + self.assertEqual(result, timedelta_val) + + class Test__name_from_project_path(unittest.TestCase): PROJECT = 'PROJECT' From d04a8440c3d74deef0c6b521f9885a3f4b693067 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 26 Jan 2017 13:00:00 -0800 Subject: [PATCH 111/468] Using empty-tuple default as extra_options. --- packages/google-cloud-core/google/cloud/_helpers.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/_helpers.py b/packages/google-cloud-core/google/cloud/_helpers.py index f2fceb8526ed..22ed39b24ffc 100644 --- a/packages/google-cloud-core/google/cloud/_helpers.py +++ b/packages/google-cloud-core/google/cloud/_helpers.py @@ -512,7 +512,7 @@ def _name_from_project_path(path, project, template): return match.group('name') -def make_secure_channel(credentials, user_agent, host, extra_options=None): +def make_secure_channel(credentials, user_agent, host, extra_options=()): """Makes a secure channel for an RPC service. Uses / depends on gRPC. @@ -538,10 +538,7 @@ def make_secure_channel(credentials, user_agent, host, extra_options=None): http_request = google_auth_httplib2.Request(http=httplib2.Http()) user_agent_option = ('grpc.primary_user_agent', user_agent) - if extra_options is not None: - options = (user_agent_option,) + extra_options - else: - options = (user_agent_option,) + options = (user_agent_option,) + extra_options return google.auth.transport.grpc.secure_authorized_channel( credentials, http_request, @@ -550,7 +547,7 @@ def make_secure_channel(credentials, user_agent, host, extra_options=None): def make_secure_stub(credentials, user_agent, stub_class, host, - extra_options=None): + extra_options=()): """Makes a secure stub for an RPC service. Uses / depends on gRPC. From cb51a6ede106e2bed63b2e375dee08b03d092328 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 26 Jan 2017 13:13:02 -0800 Subject: [PATCH 112/468] Renaming JSONClient -> ClientWithProject. Done via: $ git grep -l JSONClient | xargs sed -i s/JSONClient/ClientWithProject/g Also fixing test b0rken by previous commit. --- packages/google-cloud-core/google/cloud/client.py | 7 ++----- packages/google-cloud-core/unit_tests/test__helpers.py | 2 +- packages/google-cloud-core/unit_tests/test_client.py | 6 +++--- 3 files changed, 6 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/client.py b/packages/google-cloud-core/google/cloud/client.py index 7a14e03f763a..01dec6498b9e 100644 --- a/packages/google-cloud-core/google/cloud/client.py +++ b/packages/google-cloud-core/google/cloud/client.py @@ -129,11 +129,8 @@ def _determine_default(project): return _determine_default_project(project) -class JSONClient(Client, _ClientProjectMixin): - """Client for Google JSON-based API. - - Assumes such APIs use the ``project`` and the client needs to store this - value. +class ClientWithProject(Client, _ClientProjectMixin): + """Client that also stores a project. :type project: str :param project: the project which the client acts on behalf of. If not diff --git a/packages/google-cloud-core/unit_tests/test__helpers.py b/packages/google-cloud-core/unit_tests/test__helpers.py index e0642780c0a4..ced80e92278f 100644 --- a/packages/google-cloud-core/unit_tests/test__helpers.py +++ b/packages/google-cloud-core/unit_tests/test__helpers.py @@ -789,7 +789,7 @@ def mock_channel(*args, **kwargs): credentials = object() user_agent = 'you-sir-age-int' host = 'localhost' - extra_options = {'extra_options': None} + extra_options = {'extra_options': ()} with _Monkey(MUT, make_secure_channel=mock_channel): stub = self._call_fut(credentials, user_agent, stub_class, host) diff --git a/packages/google-cloud-core/unit_tests/test_client.py b/packages/google-cloud-core/unit_tests/test_client.py index a503891bc0c6..e3e58f238d85 100644 --- a/packages/google-cloud-core/unit_tests/test_client.py +++ b/packages/google-cloud-core/unit_tests/test_client.py @@ -104,13 +104,13 @@ def test_from_service_account_json_bad_args(self): mock.sentinel.filename, credentials=mock.sentinel.credentials) -class TestJSONClient(unittest.TestCase): +class TestClientWithProject(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.client import JSONClient + from google.cloud.client import ClientWithProject - return JSONClient + return ClientWithProject def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) From cc44b7896b35fab344d907a192cc2447fabcb01d Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 26 Jan 2017 15:15:44 -0800 Subject: [PATCH 113/468] Moving _http and _credentials default values from base connection to client. Still leaving proxy properties on Connection to point to the http and credentials objects owned by the client. --- .../google-cloud-core/google/cloud/_http.py | 56 +----- .../google-cloud-core/google/cloud/client.py | 44 ++++- .../unit_tests/test__http.py | 180 +++++++----------- .../unit_tests/test_client.py | 34 +++- 4 files changed, 145 insertions(+), 169 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/_http.py b/packages/google-cloud-core/google/cloud/_http.py index c68958e356a6..c86d00fb586e 100644 --- a/packages/google-cloud-core/google/cloud/_http.py +++ b/packages/google-cloud-core/google/cloud/_http.py @@ -20,8 +20,6 @@ from six.moves.urllib.parse import urlencode import google.auth.credentials -import google_auth_httplib2 -import httplib2 from google.cloud.exceptions import make_exception @@ -37,50 +35,14 @@ class Connection(object): """A generic connection to Google Cloud Platform. - Subclasses should understand only the basic types in method arguments, - however they should be capable of returning advanced types. - - If no value is passed in for ``http``, a :class:`httplib2.Http` object - will be created and authorized with the ``credentials``. If not, the - ``credentials`` and ``http`` need not be related. - - Subclasses may seek to use the private key from ``credentials`` to sign - data. - - A custom (non-``httplib2``) HTTP object must have a ``request`` method - which accepts the following arguments: - - * ``uri`` - * ``method`` - * ``body`` - * ``headers`` - - In addition, ``redirections`` and ``connection_type`` may be used. - - Without the use of ``credentials.authorize(http)``, a custom ``http`` - object will also need to be able to add a bearer token to API - requests and handle token refresh on 401 errors. - - :type credentials: :class:`google.auth.credentials.Credentials` or - :class:`NoneType` - :param credentials: The credentials to use for this connection. - - :type http: :class:`httplib2.Http` or class that defines ``request()``. - :param http: An optional HTTP object to make requests. + :type client: :class:`~google.cloud.client.Client` + :param client: The client that owns the credentials. """ USER_AGENT = DEFAULT_USER_AGENT - SCOPE = None - """The scopes required for authenticating with a service. - - Needs to be set by subclasses. - """ - - def __init__(self, credentials=None, http=None): - self._http = http - self._credentials = google.auth.credentials.with_scopes_if_required( - credentials, self.SCOPE) + def __init__(self, client): + self._client = client @property def credentials(self): @@ -90,7 +52,7 @@ def credentials(self): :class:`NoneType` :returns: The credentials object associated with this connection. """ - return self._credentials + return self._client._credentials @property def http(self): @@ -99,13 +61,7 @@ def http(self): :rtype: :class:`httplib2.Http` :returns: A Http object used to transport data. """ - if self._http is None: - if self._credentials: - self._http = google_auth_httplib2.AuthorizedHttp( - self._credentials) - else: - self._http = httplib2.Http() - return self._http + return self._client._http class JSONConnection(Connection): diff --git a/packages/google-cloud-core/google/cloud/client.py b/packages/google-cloud-core/google/cloud/client.py index 01dec6498b9e..d3f9c2f411f9 100644 --- a/packages/google-cloud-core/google/cloud/client.py +++ b/packages/google-cloud-core/google/cloud/client.py @@ -16,6 +16,7 @@ import google.auth.credentials from google.oauth2 import service_account +import google_auth_httplib2 import six from google.cloud._helpers import _determine_default_project @@ -74,6 +75,26 @@ class Client(_ClientFactoryMixin): Stores ``credentials`` and ``http`` object so that subclasses can pass them along to a connection class. + If no value is passed in for ``http``, a :class:`httplib2.Http` object + will be created and authorized with the ``credentials``. If not, the + ``credentials`` and ``http`` need not be related. + + Callers and subclasses may seek to use the private key from + ``credentials`` to sign data. + + A custom (non-``httplib2``) HTTP object must have a ``request`` method + which accepts the following arguments: + + * ``uri`` + * ``method`` + * ``body`` + * ``headers`` + + In addition, ``redirections`` and ``connection_type`` may be used. + + A custom ``http`` object will also need to be able to add a bearer token + to API requests and handle token refresh on 401 errors. + :type credentials: :class:`~google.auth.credentials.Credentials` :param credentials: (Optional) The OAuth2 Credentials to use for this client. If not passed (and if no ``http`` object is @@ -88,6 +109,12 @@ class Client(_ClientFactoryMixin): ``credentials`` for the current object. """ + _SCOPE = None + """The scopes required for authenticating with a service. + + Needs to be set by subclasses. + """ + def __init__(self, credentials=None, http=None): if (credentials is not None and not isinstance( @@ -95,8 +122,21 @@ def __init__(self, credentials=None, http=None): raise ValueError(_GOOGLE_AUTH_CREDENTIALS_HELP) if credentials is None and http is None: credentials = get_credentials() - self._credentials = credentials - self._http = http + self._credentials = google.auth.credentials.with_scopes_if_required( + credentials, self._SCOPE) + self._http_internal = http + + @property + def _http(self): + """Getter for object used for HTTP transport. + + :rtype: :class:`~httplib2.Http` + :returns: An HTTP object. + """ + if self._http_internal is None: + self._http_internal = google_auth_httplib2.AuthorizedHttp( + self._credentials) + return self._http_internal class _ClientProjectMixin(object): diff --git a/packages/google-cloud-core/unit_tests/test__http.py b/packages/google-cloud-core/unit_tests/test__http.py index 23a198d1f68b..28e44045f976 100644 --- a/packages/google-cloud-core/unit_tests/test__http.py +++ b/packages/google-cloud-core/unit_tests/test__http.py @@ -28,61 +28,27 @@ def _get_target_class(): def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) - def test_ctor_defaults(self): - conn = self._make_one() - self.assertIsNone(conn.credentials) + def test_constructor(self): + client = object() + conn = self._make_one(client) + self.assertIs(conn._client, client) - def test_ctor_explicit(self): - import google.auth.credentials + def test_credentials_property(self): + client = mock.Mock(spec=['_credentials']) + conn = self._make_one(client) + self.assertIs(conn.credentials, client._credentials) - credentials = mock.Mock(spec=google.auth.credentials.Scoped) - - conn = self._make_one(credentials) - - credentials.with_scopes.assert_called_once_with(conn.SCOPE) - self.assertIs(conn.credentials, credentials.with_scopes.return_value) - self.assertIsNone(conn._http) - - def test_ctor_explicit_http(self): - http = object() - conn = self._make_one(http=http) - self.assertIsNone(conn.credentials) - self.assertIs(conn.http, http) - - def test_ctor_credentials_wo_create_scoped(self): - credentials = object() - conn = self._make_one(credentials) - self.assertIs(conn.credentials, credentials) - self.assertIsNone(conn._http) - - def test_http_w_existing(self): - conn = self._make_one() - conn._http = http = object() - self.assertIs(conn.http, http) - - def test_http_wo_creds(self): - import httplib2 - - conn = self._make_one() - self.assertIsInstance(conn.http, httplib2.Http) - - def test_http_w_creds(self): - import google.auth.credentials - import google_auth_httplib2 - - credentials = mock.Mock(spec=google.auth.credentials.Credentials) - - conn = self._make_one(credentials) - - self.assertIsInstance(conn.http, google_auth_httplib2.AuthorizedHttp) - self.assertIs(conn.http.credentials, credentials) + def test_http_property(self): + client = mock.Mock(spec=['_http']) + conn = self._make_one(client) + self.assertIs(conn.http, client._http) def test_user_agent_format(self): from pkg_resources import get_distribution expected_ua = 'gcloud-python/{0}'.format( get_distribution('google-cloud-core').version) - conn = self._make_one() + conn = self._make_one(object()) self.assertEqual(conn.USER_AGENT, expected_ua) @@ -97,7 +63,7 @@ def _get_target_class(): def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) - def _makeMockOne(self, *args, **kw): + def _make_mock_one(self, *args, **kw): class MockConnection(self._get_target_class()): API_URL_TEMPLATE = '{api_base_url}/mock/{api_version}{path}' API_BASE_URL = 'http://mock' @@ -110,38 +76,14 @@ def test_class_defaults(self): self.assertIsNone(klass.API_BASE_URL) self.assertIsNone(klass.API_VERSION) - def test_ctor_defaults(self): - conn = self._make_one() - self.assertIsNone(conn.credentials) - - def test_ctor_explicit(self): - conn = self._make_one(mock.sentinel.credentials) - self.assertIs(conn.credentials, mock.sentinel.credentials) - - def test_http_w_existing(self): - conn = self._make_one() - conn._http = http = object() - self.assertIs(conn.http, http) - - def test_http_wo_creds(self): - import httplib2 - - conn = self._make_one() - self.assertIsInstance(conn.http, httplib2.Http) - - def test_http_w_creds(self): - import google.auth.credentials - import google_auth_httplib2 - - credentials = mock.Mock(spec=google.auth.credentials.Credentials) - - conn = self._make_one(credentials) - - self.assertIsInstance(conn.http, google_auth_httplib2.AuthorizedHttp) - self.assertIs(conn.http.credentials, credentials) + def test_constructor(self): + client = object() + conn = self._make_one(client) + self.assertIs(conn._client, client) def test_build_api_url_no_extra_query_params(self): - conn = self._makeMockOne() + client = object() + conn = self._make_mock_one(client) # Intended to emulate self.mock_template URI = '/'.join([ conn.API_BASE_URL, @@ -155,7 +97,8 @@ def test_build_api_url_w_extra_query_params(self): from six.moves.urllib.parse import parse_qsl from six.moves.urllib.parse import urlsplit - conn = self._makeMockOne() + client = object() + conn = self._make_mock_one(client) uri = conn.build_api_url('/foo', {'bar': 'baz'}) scheme, netloc, path, qs, _ = urlsplit(uri) @@ -172,12 +115,13 @@ def test_build_api_url_w_extra_query_params(self): self.assertEqual(parms['bar'], 'baz') def test__make_request_no_data_no_content_type_no_headers(self): - conn = self._make_one() - URI = 'http://example.com/test' - http = conn._http = _Http( + http = _Http( {'status': '200', 'content-type': 'text/plain'}, b'', ) + client = mock.Mock(_http=http, spec=['_http']) + conn = self._make_one(client) + URI = 'http://example.com/test' headers, content = conn._make_request('GET', URI) self.assertEqual(headers['status'], '200') self.assertEqual(headers['content-type'], 'text/plain') @@ -193,12 +137,13 @@ def test__make_request_no_data_no_content_type_no_headers(self): self.assertEqual(http._called_with['headers'], expected_headers) def test__make_request_w_data_no_extra_headers(self): - conn = self._make_one() - URI = 'http://example.com/test' - http = conn._http = _Http( + http = _Http( {'status': '200', 'content-type': 'text/plain'}, b'', ) + client = mock.Mock(_http=http, spec=['_http']) + conn = self._make_one(client) + URI = 'http://example.com/test' conn._make_request('GET', URI, {}, 'application/json') self.assertEqual(http._called_with['method'], 'GET') self.assertEqual(http._called_with['uri'], URI) @@ -212,12 +157,13 @@ def test__make_request_w_data_no_extra_headers(self): self.assertEqual(http._called_with['headers'], expected_headers) def test__make_request_w_extra_headers(self): - conn = self._make_one() - URI = 'http://example.com/test' - http = conn._http = _Http( + http = _Http( {'status': '200', 'content-type': 'text/plain'}, b'', ) + client = mock.Mock(_http=http, spec=['_http']) + conn = self._make_one(client) + URI = 'http://example.com/test' conn._make_request('GET', URI, headers={'X-Foo': 'foo'}) self.assertEqual(http._called_with['method'], 'GET') self.assertEqual(http._called_with['uri'], URI) @@ -231,18 +177,19 @@ def test__make_request_w_extra_headers(self): self.assertEqual(http._called_with['headers'], expected_headers) def test_api_request_defaults(self): + http = _Http( + {'status': '200', 'content-type': 'application/json'}, + b'{}', + ) + client = mock.Mock(_http=http, spec=['_http']) + conn = self._make_mock_one(client) PATH = '/path/required' - conn = self._makeMockOne() # Intended to emulate self.mock_template URI = '/'.join([ conn.API_BASE_URL, 'mock', '%s%s' % (conn.API_VERSION, PATH), ]) - http = conn._http = _Http( - {'status': '200', 'content-type': 'application/json'}, - b'{}', - ) self.assertEqual(conn.api_request('GET', PATH), {}) self.assertEqual(http._called_with['method'], 'GET') self.assertEqual(http._called_with['uri'], URI) @@ -255,20 +202,22 @@ def test_api_request_defaults(self): self.assertEqual(http._called_with['headers'], expected_headers) def test_api_request_w_non_json_response(self): - conn = self._makeMockOne() - conn._http = _Http( + http = _Http( {'status': '200', 'content-type': 'text/plain'}, b'CONTENT', ) + client = mock.Mock(_http=http, spec=['_http']) + conn = self._make_mock_one(client) self.assertRaises(TypeError, conn.api_request, 'GET', '/') def test_api_request_wo_json_expected(self): - conn = self._makeMockOne() - conn._http = _Http( + http = _Http( {'status': '200', 'content-type': 'text/plain'}, b'CONTENT', ) + client = mock.Mock(_http=http, spec=['_http']) + conn = self._make_mock_one(client) self.assertEqual(conn.api_request('GET', '/', expect_json=False), b'CONTENT') @@ -276,11 +225,12 @@ def test_api_request_w_query_params(self): from six.moves.urllib.parse import parse_qsl from six.moves.urllib.parse import urlsplit - conn = self._makeMockOne() - http = conn._http = _Http( + http = _Http( {'status': '200', 'content-type': 'application/json'}, b'{}', ) + client = mock.Mock(_http=http, spec=['_http']) + conn = self._make_mock_one(client) self.assertEqual(conn.api_request('GET', '/', {'foo': 'bar'}), {}) self.assertEqual(http._called_with['method'], 'GET') uri = http._called_with['uri'] @@ -307,11 +257,12 @@ def test_api_request_w_query_params(self): def test_api_request_w_headers(self): from six.moves.urllib.parse import urlsplit - conn = self._makeMockOne() - http = conn._http = _Http( + http = _Http( {'status': '200', 'content-type': 'application/json'}, b'{}', ) + client = mock.Mock(_http=http, spec=['_http']) + conn = self._make_mock_one(client) self.assertEqual( conn.api_request('GET', '/', headers={'X-Foo': 'bar'}), {}) self.assertEqual(http._called_with['method'], 'GET') @@ -341,7 +292,12 @@ def test_api_request_w_data(self): DATA = {'foo': 'bar'} DATAJ = json.dumps(DATA) - conn = self._makeMockOne() + http = _Http( + {'status': '200', 'content-type': 'application/json'}, + b'{}', + ) + client = mock.Mock(_http=http, spec=['_http']) + conn = self._make_mock_one(client) # Intended to emulate self.mock_template URI = '/'.join([ conn.API_BASE_URL, @@ -349,10 +305,6 @@ def test_api_request_w_data(self): conn.API_VERSION, '', ]) - http = conn._http = _Http( - {'status': '200', 'content-type': 'application/json'}, - b'{}', - ) self.assertEqual(conn.api_request('POST', '/', data=DATA), {}) self.assertEqual(http._called_with['method'], 'POST') self.assertEqual(http._called_with['uri'], URI) @@ -368,29 +320,33 @@ def test_api_request_w_data(self): def test_api_request_w_404(self): from google.cloud.exceptions import NotFound - conn = self._makeMockOne() - conn._http = _Http( + http = _Http( {'status': '404', 'content-type': 'text/plain'}, b'{}' ) + client = mock.Mock(_http=http, spec=['_http']) + conn = self._make_mock_one(client) self.assertRaises(NotFound, conn.api_request, 'GET', '/') def test_api_request_w_500(self): from google.cloud.exceptions import InternalServerError - conn = self._makeMockOne() - conn._http = _Http( + http = _Http( {'status': '500', 'content-type': 'text/plain'}, b'{}', ) + client = mock.Mock(_http=http, spec=['_http']) + conn = self._make_mock_one(client) self.assertRaises(InternalServerError, conn.api_request, 'GET', '/') def test_api_request_non_binary_response(self): - conn = self._makeMockOne() - http = conn._http = _Http( + http = _Http( {'status': '200', 'content-type': 'application/json'}, u'{}', ) + client = mock.Mock(_http=http, spec=['_http']) + conn = self._make_mock_one(client) + result = conn.api_request('GET', '/') # Intended to emulate self.mock_template URI = '/'.join([ diff --git a/packages/google-cloud-core/unit_tests/test_client.py b/packages/google-cloud-core/unit_tests/test_client.py index e3e58f238d85..d76e3d776bfe 100644 --- a/packages/google-cloud-core/unit_tests/test_client.py +++ b/packages/google-cloud-core/unit_tests/test_client.py @@ -62,7 +62,7 @@ def mock_get_credentials(): client_obj = self._make_one() self.assertIs(client_obj._credentials, CREDENTIALS) - self.assertIsNone(client_obj._http) + self.assertIsNone(client_obj._http_internal) self.assertEqual(FUNC_CALLS, ['get_credentials']) def test_ctor_explicit(self): @@ -71,7 +71,7 @@ def test_ctor_explicit(self): client_obj = self._make_one(credentials=CREDENTIALS, http=HTTP) self.assertIs(client_obj._credentials, CREDENTIALS) - self.assertIs(client_obj._http, HTTP) + self.assertIs(client_obj._http_internal, HTTP) def test_ctor_bad_credentials(self): CREDENTIALS = object() @@ -93,7 +93,7 @@ def test_from_service_account_json(self): self.assertIs( client_obj._credentials, constructor.return_value) - self.assertIsNone(client_obj._http) + self.assertIsNone(client_obj._http_internal) constructor.assert_called_once_with(mock.sentinel.filename) def test_from_service_account_json_bad_args(self): @@ -103,6 +103,30 @@ def test_from_service_account_json_bad_args(self): KLASS.from_service_account_json( mock.sentinel.filename, credentials=mock.sentinel.credentials) + def test__http_property_existing(self): + credentials = _make_credentials() + http = object() + client = self._make_one(credentials=credentials, http=http) + self.assertIs(client._http_internal, http) + self.assertIs(client._http, http) + + def test__http_property_new(self): + credentials = _make_credentials() + client = self._make_one(credentials=credentials) + self.assertIsNone(client._http_internal) + + patch = mock.patch('google_auth_httplib2.AuthorizedHttp', + return_value=mock.sentinel.http) + with patch as mocked: + self.assertIs(client._http, mock.sentinel.http) + # Check the mock. + mocked.assert_called_once_with(credentials) + self.assertEqual(mocked.call_count, 1) + # Make sure the cached value is used on subsequent access. + self.assertIs(client._http_internal, mock.sentinel.http) + self.assertIs(client._http, mock.sentinel.http) + self.assertEqual(mocked.call_count, 1) + class TestClientWithProject(unittest.TestCase): @@ -137,7 +161,7 @@ def mock_get_credentials(): self.assertEqual(client_obj.project, PROJECT) self.assertIs(client_obj._credentials, CREDENTIALS) - self.assertIsNone(client_obj._http) + self.assertIsNone(client_obj._http_internal) self.assertEqual( FUNC_CALLS, [(None, '_determine_default_project'), 'get_credentials']) @@ -178,7 +202,7 @@ def _explicit_ctor_helper(self, project): else: self.assertEqual(client_obj.project, project) self.assertIs(client_obj._credentials, CREDENTIALS) - self.assertIs(client_obj._http, HTTP) + self.assertIs(client_obj._http_internal, HTTP) def test_ctor_explicit_bytes(self): PROJECT = b'PROJECT' From bdf1682d34e8b76f2d57bcc47581cdff6279c973 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 26 Jan 2017 15:53:28 -0800 Subject: [PATCH 114/468] Changing datastore Connection to only accept client. --- packages/google-cloud-core/google/cloud/_http.py | 2 +- packages/google-cloud-core/google/cloud/client.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/_http.py b/packages/google-cloud-core/google/cloud/_http.py index c86d00fb586e..5bf72386ec79 100644 --- a/packages/google-cloud-core/google/cloud/_http.py +++ b/packages/google-cloud-core/google/cloud/_http.py @@ -36,7 +36,7 @@ class Connection(object): """A generic connection to Google Cloud Platform. :type client: :class:`~google.cloud.client.Client` - :param client: The client that owns the credentials. + :param client: The client that owns the current connection. """ USER_AGENT = DEFAULT_USER_AGENT diff --git a/packages/google-cloud-core/google/cloud/client.py b/packages/google-cloud-core/google/cloud/client.py index d3f9c2f411f9..7fa603b77527 100644 --- a/packages/google-cloud-core/google/cloud/client.py +++ b/packages/google-cloud-core/google/cloud/client.py @@ -109,7 +109,7 @@ class Client(_ClientFactoryMixin): ``credentials`` for the current object. """ - _SCOPE = None + SCOPE = None """The scopes required for authenticating with a service. Needs to be set by subclasses. @@ -123,7 +123,7 @@ def __init__(self, credentials=None, http=None): if credentials is None and http is None: credentials = get_credentials() self._credentials = google.auth.credentials.with_scopes_if_required( - credentials, self._SCOPE) + credentials, self.SCOPE) self._http_internal = http @property From c97228c70cd09104419c124cb6f6eb6d4bef94df Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 26 Jan 2017 21:41:04 -0800 Subject: [PATCH 115/468] Removing unused import after Connection() signature change. --- packages/google-cloud-core/google/cloud/_http.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/_http.py b/packages/google-cloud-core/google/cloud/_http.py index 5bf72386ec79..e5d6cd81b239 100644 --- a/packages/google-cloud-core/google/cloud/_http.py +++ b/packages/google-cloud-core/google/cloud/_http.py @@ -19,8 +19,6 @@ import six from six.moves.urllib.parse import urlencode -import google.auth.credentials - from google.cloud.exceptions import make_exception From 4c6b088761dd05549472988db492309ec8ab5fe6 Mon Sep 17 00:00:00 2001 From: Thomas Schultz Date: Tue, 31 Jan 2017 09:17:12 -0500 Subject: [PATCH 116/468] Updates for pycodestyle. (#2973) --- packages/google-cloud-core/unit_tests/test_exceptions.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-core/unit_tests/test_exceptions.py b/packages/google-cloud-core/unit_tests/test_exceptions.py index 4ab41cb8d576..b3488296eff4 100644 --- a/packages/google-cloud-core/unit_tests/test_exceptions.py +++ b/packages/google-cloud-core/unit_tests/test_exceptions.py @@ -40,7 +40,7 @@ def test_ctor_explicit(self): 'locationType': 'testing', 'message': 'Testing', 'reason': 'test', - } + } e = self._make_one('Testing', [ERROR]) e.code = 600 self.assertEqual(str(e), '600 Testing') @@ -115,7 +115,7 @@ def test_miss_w_content_as_dict(self): 'locationType': 'testing', 'message': 'Testing', 'reason': 'test', - } + } response = _Response(600) content = {"error": {"message": "Unknown Error", "errors": [ERROR]}} exception = self._call_fut(response, content) From 9b61e8c63b25cffe100880f65691d4fd63e0143d Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 30 Jan 2017 10:29:42 -0800 Subject: [PATCH 117/468] Cutting google-cloud-core==0.23.0. --- packages/google-cloud-core/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-core/setup.py b/packages/google-cloud-core/setup.py index 4e19fcd13678..0713f23cc27d 100644 --- a/packages/google-cloud-core/setup.py +++ b/packages/google-cloud-core/setup.py @@ -60,7 +60,7 @@ setup( name='google-cloud-core', - version='0.22.1', + version='0.23.0', description='API Client library for Google Cloud: Core Helpers', long_description=README, namespace_packages=[ From 89b723185ff08efb05bd55fcebe41ca779dfa57b Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 16 Feb 2017 16:41:57 -0800 Subject: [PATCH 118/468] Adding ability to send version info header on HTTP requests. Added an "extra headers" feature to enable this. I am not a fan of changing `Connection()` so haphazardly, but I hope to completely re-factor / destory `Connection()` in the near-term so I am less worried. This only adds the storage and datastore header info, for the purposes of a simple review. Once we agree on the approach, I can add support in the other subpackages. --- .../google-cloud-core/google/cloud/_http.py | 16 +++++++- .../unit_tests/test__http.py | 38 +++++++++++++++++++ 2 files changed, 53 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-core/google/cloud/_http.py b/packages/google-cloud-core/google/cloud/_http.py index e5d6cd81b239..e1a481e581a7 100644 --- a/packages/google-cloud-core/google/cloud/_http.py +++ b/packages/google-cloud-core/google/cloud/_http.py @@ -15,7 +15,9 @@ """Shared implementation of connections to API servers.""" import json +import platform from pkg_resources import get_distribution + import six from six.moves.urllib.parse import urlencode @@ -29,6 +31,10 @@ get_distribution('google-cloud-core').version) """The user agent for google-cloud-python requests.""" +CLIENT_INFO_HEADER = 'X-Goog-API-Client' +CLIENT_INFO_TEMPLATE = ( + 'gl-python/' + platform.python_version() + ' gccl/{}') + class Connection(object): """A generic connection to Google Cloud Platform. @@ -38,6 +44,11 @@ class Connection(object): """ USER_AGENT = DEFAULT_USER_AGENT + _EXTRA_HEADERS = {} + """Headers to be sent with every request. + + Intended to be over-ridden by subclasses. + """ def __init__(self, client): self._client = client @@ -147,7 +158,9 @@ def _make_request(self, method, url, data=None, content_type=None, :param content_type: The proper MIME type of the data provided. :type headers: dict - :param headers: A dictionary of HTTP headers to send with the request. + :param headers: (Optional) A dictionary of HTTP headers to send with + the request. If passed, will be modified directly + here with added headers. :type target_object: object :param target_object: @@ -161,6 +174,7 @@ def _make_request(self, method, url, data=None, content_type=None, returned by :meth:`_do_request`. """ headers = headers or {} + headers.update(self._EXTRA_HEADERS) headers['Accept-Encoding'] = 'gzip' if data: diff --git a/packages/google-cloud-core/unit_tests/test__http.py b/packages/google-cloud-core/unit_tests/test__http.py index 28e44045f976..1226042b5859 100644 --- a/packages/google-cloud-core/unit_tests/test__http.py +++ b/packages/google-cloud-core/unit_tests/test__http.py @@ -287,6 +287,44 @@ def test_api_request_w_headers(self): } self.assertEqual(http._called_with['headers'], expected_headers) + def test_api_request_w_extra_headers(self): + from six.moves.urllib.parse import urlsplit + + http = _Http( + {'status': '200', 'content-type': 'application/json'}, + b'{}', + ) + client = mock.Mock(_http=http, spec=['_http']) + conn = self._make_mock_one(client) + conn._EXTRA_HEADERS = { + 'X-Baz': 'dax-quux', + 'X-Foo': 'not-bar', # Collision with ``headers``. + } + self.assertEqual( + conn.api_request('GET', '/', headers={'X-Foo': 'bar'}), {}) + self.assertEqual(http._called_with['method'], 'GET') + uri = http._called_with['uri'] + scheme, netloc, path, qs, _ = urlsplit(uri) + self.assertEqual('%s://%s' % (scheme, netloc), conn.API_BASE_URL) + # Intended to emulate self.mock_template + PATH = '/'.join([ + '', + 'mock', + conn.API_VERSION, + '', + ]) + self.assertEqual(path, PATH) + self.assertEqual(qs, '') + self.assertIsNone(http._called_with['body']) + expected_headers = { + 'Accept-Encoding': 'gzip', + 'Content-Length': '0', + 'User-Agent': conn.USER_AGENT, + 'X-Foo': 'not-bar', # The one passed-in is overridden. + 'X-Baz': 'dax-quux', + } + self.assertEqual(http._called_with['headers'], expected_headers) + def test_api_request_w_data(self): import json From 3d5e93316d4b1ab4abafc0f4eacc7dfff91d0075 Mon Sep 17 00:00:00 2001 From: Thomas Schultz Date: Fri, 17 Feb 2017 08:43:25 -0500 Subject: [PATCH 119/468] Core formatting. --- packages/google-cloud-core/unit_tests/test__helpers.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/google-cloud-core/unit_tests/test__helpers.py b/packages/google-cloud-core/unit_tests/test__helpers.py index ced80e92278f..fcd47f7535bc 100644 --- a/packages/google-cloud-core/unit_tests/test__helpers.py +++ b/packages/google-cloud-core/unit_tests/test__helpers.py @@ -669,6 +669,7 @@ class Test__name_from_project_path(unittest.TestCase): def _call_fut(self, path, project, template): from google.cloud._helpers import _name_from_project_path + return _name_from_project_path(path, project, template) def test_w_invalid_path_length(self): From c95ffb1e776d4b928657cc5a7181f146d3521bfe Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 24 Feb 2017 11:30:18 -0800 Subject: [PATCH 120/468] Upgrading all versions for umbrella release. --- packages/google-cloud-core/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-core/setup.py b/packages/google-cloud-core/setup.py index 0713f23cc27d..6ec06329bf76 100644 --- a/packages/google-cloud-core/setup.py +++ b/packages/google-cloud-core/setup.py @@ -60,7 +60,7 @@ setup( name='google-cloud-core', - version='0.23.0', + version='0.23.1', description='API Client library for Google Cloud: Core Helpers', long_description=README, namespace_packages=[ From 57a265b44acc117f9b28e50ecea46d5917630240 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 23 Mar 2017 14:49:26 -0700 Subject: [PATCH 121/468] CI Rehash (#3146) --- packages/google-cloud-core/.coveragerc | 8 +- packages/google-cloud-core/.flake8 | 6 + packages/google-cloud-core/LICENSE | 202 ++++++++++++++++++ packages/google-cloud-core/MANIFEST.in | 7 +- packages/google-cloud-core/nox.py | 66 ++++++ packages/google-cloud-core/setup.py | 2 +- packages/google-cloud-core/tests/__init__.py | 0 .../{unit_tests => tests/unit}/__init__.py | 0 .../unit}/streaming/__init__.py | 0 .../unit}/streaming/test_buffered_stream.py | 0 .../unit}/streaming/test_exceptions.py | 0 .../unit}/streaming/test_http_wrapper.py | 0 .../unit}/streaming/test_stream_slice.py | 0 .../unit}/streaming/test_transfer.py | 0 .../unit}/streaming/test_util.py | 0 .../unit}/test__helpers.py | 0 .../{unit_tests => tests/unit}/test__http.py | 0 .../{unit_tests => tests/unit}/test_client.py | 0 .../unit}/test_credentials.py | 0 .../unit}/test_exceptions.py | 0 .../unit}/test_iterator.py | 0 .../unit}/test_operation.py | 0 packages/google-cloud-core/tox.ini | 31 --- 23 files changed, 283 insertions(+), 39 deletions(-) create mode 100644 packages/google-cloud-core/.flake8 create mode 100644 packages/google-cloud-core/LICENSE create mode 100644 packages/google-cloud-core/nox.py create mode 100644 packages/google-cloud-core/tests/__init__.py rename packages/google-cloud-core/{unit_tests => tests/unit}/__init__.py (100%) rename packages/google-cloud-core/{unit_tests => tests/unit}/streaming/__init__.py (100%) rename packages/google-cloud-core/{unit_tests => tests/unit}/streaming/test_buffered_stream.py (100%) rename packages/google-cloud-core/{unit_tests => tests/unit}/streaming/test_exceptions.py (100%) rename packages/google-cloud-core/{unit_tests => tests/unit}/streaming/test_http_wrapper.py (100%) rename packages/google-cloud-core/{unit_tests => tests/unit}/streaming/test_stream_slice.py (100%) rename packages/google-cloud-core/{unit_tests => tests/unit}/streaming/test_transfer.py (100%) rename packages/google-cloud-core/{unit_tests => tests/unit}/streaming/test_util.py (100%) rename packages/google-cloud-core/{unit_tests => tests/unit}/test__helpers.py (100%) rename packages/google-cloud-core/{unit_tests => tests/unit}/test__http.py (100%) rename packages/google-cloud-core/{unit_tests => tests/unit}/test_client.py (100%) rename packages/google-cloud-core/{unit_tests => tests/unit}/test_credentials.py (100%) rename packages/google-cloud-core/{unit_tests => tests/unit}/test_exceptions.py (100%) rename packages/google-cloud-core/{unit_tests => tests/unit}/test_iterator.py (100%) rename packages/google-cloud-core/{unit_tests => tests/unit}/test_operation.py (100%) delete mode 100644 packages/google-cloud-core/tox.ini diff --git a/packages/google-cloud-core/.coveragerc b/packages/google-cloud-core/.coveragerc index dd1524307f5c..9d89b1db5666 100644 --- a/packages/google-cloud-core/.coveragerc +++ b/packages/google-cloud-core/.coveragerc @@ -3,9 +3,11 @@ branch = True [report] omit = - */_generated/*.py - # Packages in the "google.cloud" package that we don't own. - */google/cloud/gapic/* + google/cloud/_testing.py + google/cloud/__init__.py + google/cloud/environment_vars.py +fail_under = 100 +show_missing = True exclude_lines = # Re-enable the standard pragma pragma: NO COVER diff --git a/packages/google-cloud-core/.flake8 b/packages/google-cloud-core/.flake8 new file mode 100644 index 000000000000..25168dc87605 --- /dev/null +++ b/packages/google-cloud-core/.flake8 @@ -0,0 +1,6 @@ +[flake8] +exclude = + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-core/LICENSE b/packages/google-cloud-core/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-core/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-core/MANIFEST.in b/packages/google-cloud-core/MANIFEST.in index cb3a2b9ef4fa..24aa72fb370b 100644 --- a/packages/google-cloud-core/MANIFEST.in +++ b/packages/google-cloud-core/MANIFEST.in @@ -1,4 +1,3 @@ -include README.rst -graft google -graft unit_tests -global-exclude *.pyc +include README.rst LICENSE +recursive-include unit_tests * +global-exclude *.pyc __pycache__ diff --git a/packages/google-cloud-core/nox.py b/packages/google-cloud-core/nox.py new file mode 100644 index 000000000000..d8e068f3f00d --- /dev/null +++ b/packages/google-cloud-core/nox.py @@ -0,0 +1,66 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import os + +import nox + + +@nox.session +@nox.parametrize('python_version', ['2.7', '3.4', '3.5', '3.6']) +def unit_tests(session, python_version): + """Run the unit test suite.""" + + # Run unit tests against all supported versions of Python. + session.interpreter = 'python%s' % python_version + + # Install all test dependencies, then install this package in-place. + session.install('mock', 'pytest', 'pytest-cov', + 'grpcio >= 1.0.2') + session.install('-e', '.') + + # Run py.test against the unit tests. + session.run('py.test', '--quiet', + '--cov=google.cloud', '--cov=tests.unit', '--cov-append', + '--cov-config=.coveragerc', '--cov-report=', '--cov-fail-under=97', + 'tests/unit', + ) + + +@nox.session +def lint(session): + """Run flake8. + + Returns a failure if flake8 finds linting errors or sufficiently + serious code quality issues. + """ + session.interpreter = 'python3.6' + session.install('flake8') + session.install('.') + session.run('flake8', 'google/cloud/core') + + +@nox.session +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.interpreter = 'python3.6' + session.install('coverage', 'pytest-cov') + session.run('coverage', 'report', '--show-missing', '--fail-under=100') + session.run('coverage', 'erase') diff --git a/packages/google-cloud-core/setup.py b/packages/google-cloud-core/setup.py index 6ec06329bf76..73c00bf96de4 100644 --- a/packages/google-cloud-core/setup.py +++ b/packages/google-cloud-core/setup.py @@ -67,7 +67,7 @@ 'google', 'google.cloud', ], - packages=find_packages(), + packages=find_packages(exclude=('unit_tests*',)), install_requires=REQUIREMENTS, **SETUP_BASE ) diff --git a/packages/google-cloud-core/tests/__init__.py b/packages/google-cloud-core/tests/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-core/unit_tests/__init__.py b/packages/google-cloud-core/tests/unit/__init__.py similarity index 100% rename from packages/google-cloud-core/unit_tests/__init__.py rename to packages/google-cloud-core/tests/unit/__init__.py diff --git a/packages/google-cloud-core/unit_tests/streaming/__init__.py b/packages/google-cloud-core/tests/unit/streaming/__init__.py similarity index 100% rename from packages/google-cloud-core/unit_tests/streaming/__init__.py rename to packages/google-cloud-core/tests/unit/streaming/__init__.py diff --git a/packages/google-cloud-core/unit_tests/streaming/test_buffered_stream.py b/packages/google-cloud-core/tests/unit/streaming/test_buffered_stream.py similarity index 100% rename from packages/google-cloud-core/unit_tests/streaming/test_buffered_stream.py rename to packages/google-cloud-core/tests/unit/streaming/test_buffered_stream.py diff --git a/packages/google-cloud-core/unit_tests/streaming/test_exceptions.py b/packages/google-cloud-core/tests/unit/streaming/test_exceptions.py similarity index 100% rename from packages/google-cloud-core/unit_tests/streaming/test_exceptions.py rename to packages/google-cloud-core/tests/unit/streaming/test_exceptions.py diff --git a/packages/google-cloud-core/unit_tests/streaming/test_http_wrapper.py b/packages/google-cloud-core/tests/unit/streaming/test_http_wrapper.py similarity index 100% rename from packages/google-cloud-core/unit_tests/streaming/test_http_wrapper.py rename to packages/google-cloud-core/tests/unit/streaming/test_http_wrapper.py diff --git a/packages/google-cloud-core/unit_tests/streaming/test_stream_slice.py b/packages/google-cloud-core/tests/unit/streaming/test_stream_slice.py similarity index 100% rename from packages/google-cloud-core/unit_tests/streaming/test_stream_slice.py rename to packages/google-cloud-core/tests/unit/streaming/test_stream_slice.py diff --git a/packages/google-cloud-core/unit_tests/streaming/test_transfer.py b/packages/google-cloud-core/tests/unit/streaming/test_transfer.py similarity index 100% rename from packages/google-cloud-core/unit_tests/streaming/test_transfer.py rename to packages/google-cloud-core/tests/unit/streaming/test_transfer.py diff --git a/packages/google-cloud-core/unit_tests/streaming/test_util.py b/packages/google-cloud-core/tests/unit/streaming/test_util.py similarity index 100% rename from packages/google-cloud-core/unit_tests/streaming/test_util.py rename to packages/google-cloud-core/tests/unit/streaming/test_util.py diff --git a/packages/google-cloud-core/unit_tests/test__helpers.py b/packages/google-cloud-core/tests/unit/test__helpers.py similarity index 100% rename from packages/google-cloud-core/unit_tests/test__helpers.py rename to packages/google-cloud-core/tests/unit/test__helpers.py diff --git a/packages/google-cloud-core/unit_tests/test__http.py b/packages/google-cloud-core/tests/unit/test__http.py similarity index 100% rename from packages/google-cloud-core/unit_tests/test__http.py rename to packages/google-cloud-core/tests/unit/test__http.py diff --git a/packages/google-cloud-core/unit_tests/test_client.py b/packages/google-cloud-core/tests/unit/test_client.py similarity index 100% rename from packages/google-cloud-core/unit_tests/test_client.py rename to packages/google-cloud-core/tests/unit/test_client.py diff --git a/packages/google-cloud-core/unit_tests/test_credentials.py b/packages/google-cloud-core/tests/unit/test_credentials.py similarity index 100% rename from packages/google-cloud-core/unit_tests/test_credentials.py rename to packages/google-cloud-core/tests/unit/test_credentials.py diff --git a/packages/google-cloud-core/unit_tests/test_exceptions.py b/packages/google-cloud-core/tests/unit/test_exceptions.py similarity index 100% rename from packages/google-cloud-core/unit_tests/test_exceptions.py rename to packages/google-cloud-core/tests/unit/test_exceptions.py diff --git a/packages/google-cloud-core/unit_tests/test_iterator.py b/packages/google-cloud-core/tests/unit/test_iterator.py similarity index 100% rename from packages/google-cloud-core/unit_tests/test_iterator.py rename to packages/google-cloud-core/tests/unit/test_iterator.py diff --git a/packages/google-cloud-core/unit_tests/test_operation.py b/packages/google-cloud-core/tests/unit/test_operation.py similarity index 100% rename from packages/google-cloud-core/unit_tests/test_operation.py rename to packages/google-cloud-core/tests/unit/test_operation.py diff --git a/packages/google-cloud-core/tox.ini b/packages/google-cloud-core/tox.ini deleted file mode 100644 index 156ffc07e00e..000000000000 --- a/packages/google-cloud-core/tox.ini +++ /dev/null @@ -1,31 +0,0 @@ -[tox] -envlist = - py27,py34,py35,cover - -[testing] -deps = - grpcio >= 1.0.2 - mock - pytest -covercmd = - py.test --quiet \ - --cov=google.cloud \ - --cov=unit_tests \ - --cov-config {toxinidir}/.coveragerc \ - unit_tests - -[testenv] -commands = - py.test --quiet {posargs} unit_tests -deps = - {[testing]deps} - -[testenv:cover] -basepython = - python2.7 -commands = - {[testing]covercmd} -deps = - {[testenv]deps} - coverage - pytest-cov From a44b8519d87450c508e9ac07562ae732aeb5b32b Mon Sep 17 00:00:00 2001 From: Craig Citro Date: Fri, 24 Mar 2017 14:45:25 -0700 Subject: [PATCH 122/468] Fix typo in docstring google.cloud.core._helpers (#3207) --- packages/google-cloud-core/google/cloud/_helpers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-core/google/cloud/_helpers.py b/packages/google-cloud-core/google/cloud/_helpers.py index 22ed39b24ffc..f94df65c167f 100644 --- a/packages/google-cloud-core/google/cloud/_helpers.py +++ b/packages/google-cloud-core/google/cloud/_helpers.py @@ -264,7 +264,7 @@ def _time_from_iso8601_time_naive(value): def _rfc3339_to_datetime(dt_str): - """Convert a microsecond-precision timetamp to a native datetime. + """Convert a microsecond-precision timestamp to a native datetime. :type dt_str: str :param dt_str: The string to convert. From 0d52c27174c8b459b49ff5df55d2366a4130efae Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 27 Mar 2017 10:20:16 -0700 Subject: [PATCH 123/468] Fixing up some format strings in nox configs. Using `STRING_TEMPLATE % VARIABLE` can introduce hard-to-find bugs if `VARIABLE` is expected to be a string but ends up being a tuple. Instead of using percent formatting, just using `.format`. Also making tweaks to `get_target_packages` to make some path manipulation / checks OS-independent. --- packages/google-cloud-core/nox.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-core/nox.py b/packages/google-cloud-core/nox.py index d8e068f3f00d..ba1e72f8a5f7 100644 --- a/packages/google-cloud-core/nox.py +++ b/packages/google-cloud-core/nox.py @@ -25,7 +25,7 @@ def unit_tests(session, python_version): """Run the unit test suite.""" # Run unit tests against all supported versions of Python. - session.interpreter = 'python%s' % python_version + session.interpreter = 'python{}'.format(python_version) # Install all test dependencies, then install this package in-place. session.install('mock', 'pytest', 'pytest-cov', From a70ba10d64ff30070f17c8386389cf68a5c96d08 Mon Sep 17 00:00:00 2001 From: andy boot Date: Thu, 30 Mar 2017 12:01:55 +0100 Subject: [PATCH 124/468] Update exceptions.py Fix typo --- packages/google-cloud-core/google/cloud/exceptions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-core/google/cloud/exceptions.py b/packages/google-cloud-core/google/cloud/exceptions.py index 01bec56d5b27..ab0ede688ef3 100644 --- a/packages/google-cloud-core/google/cloud/exceptions.py +++ b/packages/google-cloud-core/google/cloud/exceptions.py @@ -181,7 +181,7 @@ class ServiceUnavailable(ServerError): class GatewayTimeout(ServerError): - """Excepption mapping a `504 Gateway Timeout'` response.""" + """Exception mapping a `504 Gateway Timeout'` response.""" code = 504 From d388a1864a188220b4ca62627721dd992030cdad Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 30 Mar 2017 07:51:10 -0700 Subject: [PATCH 125/468] Make clients explicitly unpickleable. (#3230) --- packages/google-cloud-core/google/cloud/client.py | 9 +++++++++ packages/google-cloud-core/tests/unit/test_client.py | 10 ++++++++++ 2 files changed, 19 insertions(+) diff --git a/packages/google-cloud-core/google/cloud/client.py b/packages/google-cloud-core/google/cloud/client.py index 7fa603b77527..09dd512fdc32 100644 --- a/packages/google-cloud-core/google/cloud/client.py +++ b/packages/google-cloud-core/google/cloud/client.py @@ -14,6 +14,8 @@ """Base classes for client used to interact with Google Cloud APIs.""" +from pickle import PicklingError + import google.auth.credentials from google.oauth2 import service_account import google_auth_httplib2 @@ -126,6 +128,13 @@ def __init__(self, credentials=None, http=None): credentials, self.SCOPE) self._http_internal = http + def __getstate__(self): + """Explicitly state that clients are not pickleable.""" + raise PicklingError('\n'.join([ + 'Pickling client objects is explicitly not supported.', + 'Clients have non-trivial state that is local and unpickleable.', + ])) + @property def _http(self): """Getter for object used for HTTP transport. diff --git a/packages/google-cloud-core/tests/unit/test_client.py b/packages/google-cloud-core/tests/unit/test_client.py index d76e3d776bfe..15fd795b06e5 100644 --- a/packages/google-cloud-core/tests/unit/test_client.py +++ b/packages/google-cloud-core/tests/unit/test_client.py @@ -47,6 +47,16 @@ def _get_target_class(): def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) + def test_unpickleable(self): + import pickle + + CREDENTIALS = _make_credentials() + HTTP = object() + + client_obj = self._make_one(credentials=CREDENTIALS, http=HTTP) + with self.assertRaises(pickle.PicklingError): + pickle.dumps(client_obj) + def test_ctor_defaults(self): from google.cloud._testing import _Monkey from google.cloud import client From c449800b028e6d49e8bcb1379d0597a8ec66b438 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 30 Mar 2017 08:43:22 -0700 Subject: [PATCH 126/468] Renaming http argument(s) as _http. (#3235) --- .../google-cloud-core/google/cloud/client.py | 50 ++++++++++--------- .../tests/unit/test_client.py | 8 +-- 2 files changed, 31 insertions(+), 27 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/client.py b/packages/google-cloud-core/google/cloud/client.py index 09dd512fdc32..e3f6f81326ef 100644 --- a/packages/google-cloud-core/google/cloud/client.py +++ b/packages/google-cloud-core/google/cloud/client.py @@ -74,12 +74,12 @@ def from_service_account_json(cls, json_credentials_path, *args, **kwargs): class Client(_ClientFactoryMixin): """Client to bundle configuration needed for API requests. - Stores ``credentials`` and ``http`` object so that subclasses + Stores ``credentials`` and an HTTP object so that subclasses can pass them along to a connection class. - If no value is passed in for ``http``, a :class:`httplib2.Http` object + If no value is passed in for ``_http``, a :class:`httplib2.Http` object will be created and authorized with the ``credentials``. If not, the - ``credentials`` and ``http`` need not be related. + ``credentials`` and ``_http`` need not be related. Callers and subclasses may seek to use the private key from ``credentials`` to sign data. @@ -94,21 +94,23 @@ class Client(_ClientFactoryMixin): In addition, ``redirections`` and ``connection_type`` may be used. - A custom ``http`` object will also need to be able to add a bearer token + A custom ``_http`` object will also need to be able to add a bearer token to API requests and handle token refresh on 401 errors. :type credentials: :class:`~google.auth.credentials.Credentials` :param credentials: (Optional) The OAuth2 Credentials to use for this - client. If not passed (and if no ``http`` object is + client. If not passed (and if no ``_http`` object is passed), falls back to the default inferred from the environment. - :type http: :class:`~httplib2.Http` - :param http: (Optional) HTTP object to make requests. Can be any object - that defines ``request()`` with the same interface as - :meth:`~httplib2.Http.request`. If not passed, an - ``http`` object is created that is bound to the - ``credentials`` for the current object. + :type _http: :class:`~httplib2.Http` + :param _http: (Optional) HTTP object to make requests. Can be any object + that defines ``request()`` with the same interface as + :meth:`~httplib2.Http.request`. If not passed, an + ``_http`` object is created that is bound to the + ``credentials`` for the current object. + This parameter should be considered private, and could + change in the future. """ SCOPE = None @@ -117,16 +119,16 @@ class Client(_ClientFactoryMixin): Needs to be set by subclasses. """ - def __init__(self, credentials=None, http=None): + def __init__(self, credentials=None, _http=None): if (credentials is not None and not isinstance( credentials, google.auth.credentials.Credentials)): raise ValueError(_GOOGLE_AUTH_CREDENTIALS_HELP) - if credentials is None and http is None: + if credentials is None and _http is None: credentials = get_credentials() self._credentials = google.auth.credentials.with_scopes_if_required( credentials, self.SCOPE) - self._http_internal = http + self._http_internal = _http def __getstate__(self): """Explicitly state that clients are not pickleable.""" @@ -188,21 +190,23 @@ class ClientWithProject(Client, _ClientProjectMixin): :type credentials: :class:`~google.auth.credentials.Credentials` :param credentials: (Optional) The OAuth2 Credentials to use for this - client. If not passed (and if no ``http`` object is + client. If not passed (and if no ``_http`` object is passed), falls back to the default inferred from the environment. - :type http: :class:`~httplib2.Http` - :param http: (Optional) HTTP object to make requests. Can be any object - that defines ``request()`` with the same interface as - :meth:`~httplib2.Http.request`. If not passed, an - ``http`` object is created that is bound to the - ``credentials`` for the current object. + :type _http: :class:`~httplib2.Http` + :param _http: (Optional) HTTP object to make requests. Can be any object + that defines ``request()`` with the same interface as + :meth:`~httplib2.Http.request`. If not passed, an + ``_http`` object is created that is bound to the + ``credentials`` for the current object. + This parameter should be considered private, and could + change in the future. :raises: :class:`ValueError` if the project is neither passed in nor set in the environment. """ - def __init__(self, project=None, credentials=None, http=None): + def __init__(self, project=None, credentials=None, _http=None): _ClientProjectMixin.__init__(self, project=project) - Client.__init__(self, credentials=credentials, http=http) + Client.__init__(self, credentials=credentials, _http=_http) diff --git a/packages/google-cloud-core/tests/unit/test_client.py b/packages/google-cloud-core/tests/unit/test_client.py index 15fd795b06e5..a5ae46fb609d 100644 --- a/packages/google-cloud-core/tests/unit/test_client.py +++ b/packages/google-cloud-core/tests/unit/test_client.py @@ -78,7 +78,7 @@ def mock_get_credentials(): def test_ctor_explicit(self): CREDENTIALS = _make_credentials() HTTP = object() - client_obj = self._make_one(credentials=CREDENTIALS, http=HTTP) + client_obj = self._make_one(credentials=CREDENTIALS, _http=HTTP) self.assertIs(client_obj._credentials, CREDENTIALS) self.assertIs(client_obj._http_internal, HTTP) @@ -116,7 +116,7 @@ def test_from_service_account_json_bad_args(self): def test__http_property_existing(self): credentials = _make_credentials() http = object() - client = self._make_one(credentials=credentials, http=http) + client = self._make_one(credentials=credentials, _http=http) self.assertIs(client._http_internal, http) self.assertIs(client._http, http) @@ -196,7 +196,7 @@ def test_ctor_w_invalid_project(self): HTTP = object() with self.assertRaises(ValueError): self._make_one(project=object(), credentials=CREDENTIALS, - http=HTTP) + _http=HTTP) def _explicit_ctor_helper(self, project): import six @@ -205,7 +205,7 @@ def _explicit_ctor_helper(self, project): HTTP = object() client_obj = self._make_one(project=project, credentials=CREDENTIALS, - http=HTTP) + _http=HTTP) if isinstance(project, six.binary_type): self.assertEqual(client_obj.project, project.decode('utf-8')) From f149074b093430043721a794a1892009fe6a7b46 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 30 Mar 2017 09:27:51 -0700 Subject: [PATCH 127/468] Fixing usage of http= vs. _http= in core unit test. (#3243) Issue caused by two unrelated PRs merging in close proximity: #3235 and #3230. --- packages/google-cloud-core/tests/unit/test_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-core/tests/unit/test_client.py b/packages/google-cloud-core/tests/unit/test_client.py index a5ae46fb609d..21a8bccc9845 100644 --- a/packages/google-cloud-core/tests/unit/test_client.py +++ b/packages/google-cloud-core/tests/unit/test_client.py @@ -53,7 +53,7 @@ def test_unpickleable(self): CREDENTIALS = _make_credentials() HTTP = object() - client_obj = self._make_one(credentials=CREDENTIALS, http=HTTP) + client_obj = self._make_one(credentials=CREDENTIALS, _http=HTTP) with self.assertRaises(pickle.PicklingError): pickle.dumps(client_obj) From e915e0339e1a9362c433b2004a7c9311aed65040 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 30 Mar 2017 14:45:10 -0700 Subject: [PATCH 128/468] GA and Beta Promotions (#3245) * Make clients explicitly unpickleable. Closes #3211. * Make clients explicitly unpickleable. Closes #3211. * Add GA designator, add 1.0 version numbers. * Version changes. Eep. * Oops, Speech is still alpha. * 0.24.0, not 0.24.1 * Remove double __getstate__ goof. * Version changes. Eep. * Oops, Speech is still alpha. * Remove double __getstate__ goof. * Adding 3.6 classifier where missing and fixing bad versions. Done via "git grep '0\.24'" and "git grep '0\.23'". * Fix Noxfiles forlocal packages. * Fixing copy-pasta issue in error reporting nox config. Also fixing bad indent in same file. * Depend on stable logging in error reporting package. * Fixing lint errors in error_reporting. These were masked because error_reporting's lint nox session was linting the datastore codebase. This also means that the error reporting package has gained __all__. * Fixing a syntax error in nox config for logging. Also fixing an indent error while I was in there. * Revert "Add docs for 'result_index' usage and a system test." This reverts commit b5742aa160f604ec7cd81873ad24ac9aa75e548d. * Fixing docs nox session for umbrella package. Two issues: - error_reporting came BEFORE logging (which means it would try to pull in a logging dep from PyPI that doesn't exist) - dns was NOT in the list of local packages * Updating upper bound on logging in error_reporting. * Un-revert typo fix. --- packages/google-cloud-core/setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-core/setup.py b/packages/google-cloud-core/setup.py index 73c00bf96de4..89526136b318 100644 --- a/packages/google-cloud-core/setup.py +++ b/packages/google-cloud-core/setup.py @@ -44,6 +44,7 @@ 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6', 'Topic :: Internet', ], } @@ -60,7 +61,7 @@ setup( name='google-cloud-core', - version='0.23.1', + version='0.24.0', description='API Client library for Google Cloud: Core Helpers', long_description=README, namespace_packages=[ From 5b0b91b675092599dca2ac34be6862be895232f1 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 22 Mar 2017 15:14:35 -0400 Subject: [PATCH 129/468] Factor common IAM policy bits into 'google.cloud.iam'. Pubsub-specific roles, permissions left behind in 'google.cloud.pubsub.iam'. 'google.cloud.pubsub.iam.Policy' subclasses the core one, extending it to deal with the pubsub-specific roles. --- .../google-cloud-core/google/cloud/iam.py | 217 ++++++++++++++++++ .../google-cloud-core/unit_tests/test_iam.py | 172 ++++++++++++++ 2 files changed, 389 insertions(+) create mode 100644 packages/google-cloud-core/google/cloud/iam.py create mode 100644 packages/google-cloud-core/unit_tests/test_iam.py diff --git a/packages/google-cloud-core/google/cloud/iam.py b/packages/google-cloud-core/google/cloud/iam.py new file mode 100644 index 000000000000..1303b3c42625 --- /dev/null +++ b/packages/google-cloud-core/google/cloud/iam.py @@ -0,0 +1,217 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Non-API-specific IAM policy definitions + +For allowed roles / permissions, see: +https://cloud.google.com/iam/docs/understanding-roles +""" + +# Generic IAM roles + +OWNER_ROLE = 'roles/owner' +"""Generic role implying all rights to an object.""" + +EDITOR_ROLE = 'roles/editor' +"""Generic role implying rights to modify an object.""" + +VIEWER_ROLE = 'roles/viewer' +"""Generic role implying rights to access an object.""" + + +class Policy(object): + """IAM Policy + + See: + https://cloud.google.com/iam/reference/rest/v1/Policy + + :type etag: str + :param etag: ETag used to identify a unique of the policy + + :type version: int + :param version: unique version of the policy + """ + _OWNER_ROLES = (OWNER_ROLE,) + """Roles mapped onto our ``owners`` attribute.""" + + _EDITOR_ROLES = (EDITOR_ROLE,) + """Roles mapped onto our ``editors`` attribute.""" + + _VIEWER_ROLES = (VIEWER_ROLE,) + """Roles mapped onto our ``viewers`` attribute.""" + + def __init__(self, etag=None, version=None): + self.etag = etag + self.version = version + self.owners = set() + self.editors = set() + self.viewers = set() + + @staticmethod + def user(email): + """Factory method for a user member. + + :type email: str + :param email: E-mail for this particular user. + + :rtype: str + :returns: A member string corresponding to the given user. + """ + return 'user:%s' % (email,) + + @staticmethod + def service_account(email): + """Factory method for a service account member. + + :type email: str + :param email: E-mail for this particular service account. + + :rtype: str + :returns: A member string corresponding to the given service account. + """ + return 'serviceAccount:%s' % (email,) + + @staticmethod + def group(email): + """Factory method for a group member. + + :type email: str + :param email: An id or e-mail for this particular group. + + :rtype: str + :returns: A member string corresponding to the given group. + """ + return 'group:%s' % (email,) + + @staticmethod + def domain(domain): + """Factory method for a domain member. + + :type domain: str + :param domain: The domain for this member. + + :rtype: str + :returns: A member string corresponding to the given domain. + """ + return 'domain:%s' % (domain,) + + @staticmethod + def all_users(): + """Factory method for a member representing all users. + + :rtype: str + :returns: A member string representing all users. + """ + return 'allUsers' + + @staticmethod + def authenticated_users(): + """Factory method for a member representing all authenticated users. + + :rtype: str + :returns: A member string representing all authenticated users. + """ + return 'allAuthenticatedUsers' + + def _bind_custom_role(self, role, members): # pylint: disable=no-self-use + """Bind an API-specific role to members. + + Helper for :meth:`from_api_repr`. + + :type role: str + :param role: role to bind. + + :type members: set of str + :param members: member IDs to be bound to the role. + + Subclasses may override. + """ + raise ValueError( + 'Unknown binding: role=%s, members=%s' % (role, members)) + + @classmethod + def from_api_repr(cls, resource): + """Create a policy from the resource returned from the API. + + :type resource: dict + :param resource: resource returned from the ``getIamPolicy`` API. + + :rtype: :class:`Policy` + :returns: the parsed policy + """ + version = resource.get('version') + etag = resource.get('etag') + policy = cls(etag, version) + for binding in resource.get('bindings', ()): + role = binding['role'] + members = set(binding['members']) + if role in cls._OWNER_ROLES: + policy.owners |= members + elif role in cls._EDITOR_ROLES: + policy.editors |= members + elif role in cls._VIEWER_ROLES: + policy.viewers |= members + else: + policy._bind_custom_role(role, members) + return policy + + def _role_bindings(self): + """Enumerate members bound to roles for the policy. + + Helper for :meth:`to_api_repr`. + + :rtype: list of mapping + :returns: zero or more mappings describing roles / members bound by + the policy. + + Subclasses may override. + """ + bindings = [] + + if self.owners: + bindings.append( + {'role': OWNER_ROLE, + 'members': sorted(self.owners)}) + + if self.editors: + bindings.append( + {'role': EDITOR_ROLE, + 'members': sorted(self.editors)}) + + if self.viewers: + bindings.append( + {'role': VIEWER_ROLE, + 'members': sorted(self.viewers)}) + + return bindings + + def to_api_repr(self): + """Construct a Policy resource. + + :rtype: dict + :returns: a resource to be passed to the ``setIamPolicy`` API. + """ + resource = {} + + if self.etag is not None: + resource['etag'] = self.etag + + if self.version is not None: + resource['version'] = self.version + + bindings = self._role_bindings() + + if bindings: + resource['bindings'] = bindings + + return resource diff --git a/packages/google-cloud-core/unit_tests/test_iam.py b/packages/google-cloud-core/unit_tests/test_iam.py new file mode 100644 index 000000000000..d4d270ad8dc2 --- /dev/null +++ b/packages/google-cloud-core/unit_tests/test_iam.py @@ -0,0 +1,172 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class TestPolicy(unittest.TestCase): + + @staticmethod + def _get_target_class(): + from google.cloud.iam import Policy + + return Policy + + def _make_one(self, *args, **kw): + return self._get_target_class()(*args, **kw) + + def test_ctor_defaults(self): + policy = self._make_one() + self.assertIsNone(policy.etag) + self.assertIsNone(policy.version) + self.assertEqual(list(policy.owners), []) + self.assertEqual(list(policy.editors), []) + self.assertEqual(list(policy.viewers), []) + + def test_ctor_explicit(self): + VERSION = 17 + ETAG = 'ETAG' + policy = self._make_one(ETAG, VERSION) + self.assertEqual(policy.etag, ETAG) + self.assertEqual(policy.version, VERSION) + self.assertEqual(list(policy.owners), []) + self.assertEqual(list(policy.editors), []) + self.assertEqual(list(policy.viewers), []) + + def test_user(self): + EMAIL = 'phred@example.com' + MEMBER = 'user:%s' % (EMAIL,) + policy = self._make_one() + self.assertEqual(policy.user(EMAIL), MEMBER) + + def test_service_account(self): + EMAIL = 'phred@example.com' + MEMBER = 'serviceAccount:%s' % (EMAIL,) + policy = self._make_one() + self.assertEqual(policy.service_account(EMAIL), MEMBER) + + def test_group(self): + EMAIL = 'phred@example.com' + MEMBER = 'group:%s' % (EMAIL,) + policy = self._make_one() + self.assertEqual(policy.group(EMAIL), MEMBER) + + def test_domain(self): + DOMAIN = 'example.com' + MEMBER = 'domain:%s' % (DOMAIN,) + policy = self._make_one() + self.assertEqual(policy.domain(DOMAIN), MEMBER) + + def test_all_users(self): + policy = self._make_one() + self.assertEqual(policy.all_users(), 'allUsers') + + def test_authenticated_users(self): + policy = self._make_one() + self.assertEqual(policy.authenticated_users(), 'allAuthenticatedUsers') + + def test_from_api_repr_only_etag(self): + RESOURCE = { + 'etag': 'ACAB', + } + klass = self._get_target_class() + policy = klass.from_api_repr(RESOURCE) + self.assertEqual(policy.etag, 'ACAB') + self.assertIsNone(policy.version) + self.assertEqual(list(policy.owners), []) + self.assertEqual(list(policy.editors), []) + self.assertEqual(list(policy.viewers), []) + + def test_from_api_repr_complete(self): + from google.cloud.iam import ( + OWNER_ROLE, + EDITOR_ROLE, + VIEWER_ROLE, + ) + + OWNER1 = 'user:phred@example.com' + OWNER2 = 'group:cloud-logs@google.com' + EDITOR1 = 'domain:google.com' + EDITOR2 = 'user:phred@example.com' + VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' + VIEWER2 = 'user:phred@example.com' + RESOURCE = { + 'etag': 'DEADBEEF', + 'version': 17, + 'bindings': [ + {'role': OWNER_ROLE, 'members': [OWNER1, OWNER2]}, + {'role': EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, + {'role': VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, + ], + } + klass = self._get_target_class() + policy = klass.from_api_repr(RESOURCE) + self.assertEqual(policy.etag, 'DEADBEEF') + self.assertEqual(policy.version, 17) + self.assertEqual(sorted(policy.owners), [OWNER2, OWNER1]) + self.assertEqual(sorted(policy.editors), [EDITOR1, EDITOR2]) + self.assertEqual(sorted(policy.viewers), [VIEWER1, VIEWER2]) + + def test_from_api_repr_bad_role(self): + BOGUS1 = 'user:phred@example.com' + BOGUS2 = 'group:cloud-logs@google.com' + RESOURCE = { + 'etag': 'DEADBEEF', + 'version': 17, + 'bindings': [ + {'role': 'nonesuch', 'members': [BOGUS1, BOGUS2]}, + ], + } + klass = self._get_target_class() + with self.assertRaises(ValueError): + klass.from_api_repr(RESOURCE) + + def test_to_api_repr_defaults(self): + policy = self._make_one() + self.assertEqual(policy.to_api_repr(), {}) + + def test_to_api_repr_only_etag(self): + policy = self._make_one('DEADBEEF') + self.assertEqual(policy.to_api_repr(), {'etag': 'DEADBEEF'}) + + def test_to_api_repr_full(self): + from google.cloud.iam import ( + OWNER_ROLE, + EDITOR_ROLE, + VIEWER_ROLE, + ) + + OWNER1 = 'group:cloud-logs@google.com' + OWNER2 = 'user:phred@example.com' + EDITOR1 = 'domain:google.com' + EDITOR2 = 'user:phred@example.com' + VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' + VIEWER2 = 'user:phred@example.com' + EXPECTED = { + 'etag': 'DEADBEEF', + 'version': 17, + 'bindings': [ + {'role': OWNER_ROLE, 'members': [OWNER1, OWNER2]}, + {'role': EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, + {'role': VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, + ], + } + policy = self._make_one('DEADBEEF', 17) + policy.owners.add(OWNER1) + policy.owners.add(OWNER2) + policy.editors.add(EDITOR1) + policy.editors.add(EDITOR2) + policy.viewers.add(VIEWER1) + policy.viewers.add(VIEWER2) + self.assertEqual(policy.to_api_repr(), EXPECTED) From 10d965e4023270e08eee6f3c5048f2ea892ee958 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 23 Mar 2017 13:25:44 -0400 Subject: [PATCH 130/468] Accomodate (future) user-defined roles. - google.cloud.iam.Policy holds a 'bindings' mapping, which doesn't enforce using known roles. - Its 'owners', 'editors', and 'viewers' are now properties which indirect over that 'bindings' attribute. Note that this is a breaking change, as users who relied on mutating one of those sets (rather than re-assigning it) will need to update. --- .../google-cloud-core/google/cloud/iam.py | 111 ++++++++---------- .../google-cloud-core/unit_tests/test_iam.py | 62 ++++++---- 2 files changed, 85 insertions(+), 88 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iam.py b/packages/google-cloud-core/google/cloud/iam.py index 1303b3c42625..452d969e9805 100644 --- a/packages/google-cloud-core/google/cloud/iam.py +++ b/packages/google-cloud-core/google/cloud/iam.py @@ -53,9 +53,49 @@ class Policy(object): def __init__(self, etag=None, version=None): self.etag = etag self.version = version - self.owners = set() - self.editors = set() - self.viewers = set() + self.bindings = {} + + @property + def owners(self): + """Legacy access to owner role.""" + result = set() + for role in self._OWNER_ROLES: + for member in self.bindings.get(role, ()): + result.add(member) + return result + + @owners.setter + def owners(self, value): + """Update owners.""" + self.bindings[OWNER_ROLE] = list(value) + + @property + def editors(self): + """Legacy access to editor role.""" + result = set() + for role in self._EDITOR_ROLES: + for member in self.bindings.get(role, ()): + result.add(member) + return result + + @editors.setter + def editors(self, value): + """Update editors.""" + self.bindings[EDITOR_ROLE] = list(value) + + @property + def viewers(self): + """Legacy access to viewer role.""" + result = set() + for role in self._VIEWER_ROLES: + for member in self.bindings.get(role, ()): + result.add(member) + return result + + @viewers.setter + def viewers(self, value): + """Update viewers.""" + self.bindings[VIEWER_ROLE] = list(value) @staticmethod def user(email): @@ -123,22 +163,6 @@ def authenticated_users(): """ return 'allAuthenticatedUsers' - def _bind_custom_role(self, role, members): # pylint: disable=no-self-use - """Bind an API-specific role to members. - - Helper for :meth:`from_api_repr`. - - :type role: str - :param role: role to bind. - - :type members: set of str - :param members: member IDs to be bound to the role. - - Subclasses may override. - """ - raise ValueError( - 'Unknown binding: role=%s, members=%s' % (role, members)) - @classmethod def from_api_repr(cls, resource): """Create a policy from the resource returned from the API. @@ -154,47 +178,10 @@ def from_api_repr(cls, resource): policy = cls(etag, version) for binding in resource.get('bindings', ()): role = binding['role'] - members = set(binding['members']) - if role in cls._OWNER_ROLES: - policy.owners |= members - elif role in cls._EDITOR_ROLES: - policy.editors |= members - elif role in cls._VIEWER_ROLES: - policy.viewers |= members - else: - policy._bind_custom_role(role, members) + members = sorted(binding['members']) + policy.bindings[role] = members return policy - def _role_bindings(self): - """Enumerate members bound to roles for the policy. - - Helper for :meth:`to_api_repr`. - - :rtype: list of mapping - :returns: zero or more mappings describing roles / members bound by - the policy. - - Subclasses may override. - """ - bindings = [] - - if self.owners: - bindings.append( - {'role': OWNER_ROLE, - 'members': sorted(self.owners)}) - - if self.editors: - bindings.append( - {'role': EDITOR_ROLE, - 'members': sorted(self.editors)}) - - if self.viewers: - bindings.append( - {'role': VIEWER_ROLE, - 'members': sorted(self.viewers)}) - - return bindings - def to_api_repr(self): """Construct a Policy resource. @@ -209,9 +196,9 @@ def to_api_repr(self): if self.version is not None: resource['version'] = self.version - bindings = self._role_bindings() - - if bindings: - resource['bindings'] = bindings + if len(self.bindings) > 0: + resource['bindings'] = [ + {'role': role, 'members': members} + for role, members in sorted(self.bindings.items())] return resource diff --git a/packages/google-cloud-core/unit_tests/test_iam.py b/packages/google-cloud-core/unit_tests/test_iam.py index d4d270ad8dc2..c9b62ee0455b 100644 --- a/packages/google-cloud-core/unit_tests/test_iam.py +++ b/packages/google-cloud-core/unit_tests/test_iam.py @@ -33,6 +33,7 @@ def test_ctor_defaults(self): self.assertEqual(list(policy.owners), []) self.assertEqual(list(policy.editors), []) self.assertEqual(list(policy.viewers), []) + self.assertEqual(dict(policy.bindings), {}) def test_ctor_explicit(self): VERSION = 17 @@ -43,6 +44,7 @@ def test_ctor_explicit(self): self.assertEqual(list(policy.owners), []) self.assertEqual(list(policy.editors), []) self.assertEqual(list(policy.viewers), []) + self.assertEqual(dict(policy.bindings), {}) def test_user(self): EMAIL = 'phred@example.com' @@ -87,6 +89,7 @@ def test_from_api_repr_only_etag(self): self.assertEqual(list(policy.owners), []) self.assertEqual(list(policy.editors), []) self.assertEqual(list(policy.viewers), []) + self.assertEqual(dict(policy.bindings), {}) def test_from_api_repr_complete(self): from google.cloud.iam import ( @@ -95,8 +98,8 @@ def test_from_api_repr_complete(self): VIEWER_ROLE, ) - OWNER1 = 'user:phred@example.com' - OWNER2 = 'group:cloud-logs@google.com' + OWNER1 = 'group:cloud-logs@google.com' + OWNER2 = 'user:phred@example.com' EDITOR1 = 'domain:google.com' EDITOR2 = 'user:phred@example.com' VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' @@ -114,23 +117,31 @@ def test_from_api_repr_complete(self): policy = klass.from_api_repr(RESOURCE) self.assertEqual(policy.etag, 'DEADBEEF') self.assertEqual(policy.version, 17) - self.assertEqual(sorted(policy.owners), [OWNER2, OWNER1]) + self.assertEqual(sorted(policy.owners), [OWNER1, OWNER2]) self.assertEqual(sorted(policy.editors), [EDITOR1, EDITOR2]) self.assertEqual(sorted(policy.viewers), [VIEWER1, VIEWER2]) - - def test_from_api_repr_bad_role(self): - BOGUS1 = 'user:phred@example.com' - BOGUS2 = 'group:cloud-logs@google.com' + self.assertEqual( + dict(policy.bindings), { + OWNER_ROLE: [OWNER1, OWNER2], + EDITOR_ROLE: [EDITOR1, EDITOR2], + VIEWER_ROLE: [VIEWER1, VIEWER2], + }) + + def test_from_api_repr_unknown_role(self): + USER = 'user:phred@example.com' + GROUP = 'group:cloud-logs@google.com' RESOURCE = { 'etag': 'DEADBEEF', 'version': 17, 'bindings': [ - {'role': 'nonesuch', 'members': [BOGUS1, BOGUS2]}, + {'role': 'unknown', 'members': [USER, GROUP]}, ], } klass = self._get_target_class() - with self.assertRaises(ValueError): - klass.from_api_repr(RESOURCE) + policy = klass.from_api_repr(RESOURCE) + self.assertEqual(policy.etag, 'DEADBEEF') + self.assertEqual(policy.version, 17) + self.assertEqual(policy.bindings, {'unknown': [GROUP, USER]}) def test_to_api_repr_defaults(self): policy = self._make_one() @@ -141,6 +152,7 @@ def test_to_api_repr_only_etag(self): self.assertEqual(policy.to_api_repr(), {'etag': 'DEADBEEF'}) def test_to_api_repr_full(self): + import operator from google.cloud.iam import ( OWNER_ROLE, EDITOR_ROLE, @@ -153,20 +165,18 @@ def test_to_api_repr_full(self): EDITOR2 = 'user:phred@example.com' VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' VIEWER2 = 'user:phred@example.com' - EXPECTED = { - 'etag': 'DEADBEEF', - 'version': 17, - 'bindings': [ - {'role': OWNER_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, - {'role': VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, - ], - } + BINDINGS = [ + {'role': OWNER_ROLE, 'members': [OWNER1, OWNER2]}, + {'role': EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, + {'role': VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, + ] policy = self._make_one('DEADBEEF', 17) - policy.owners.add(OWNER1) - policy.owners.add(OWNER2) - policy.editors.add(EDITOR1) - policy.editors.add(EDITOR2) - policy.viewers.add(VIEWER1) - policy.viewers.add(VIEWER2) - self.assertEqual(policy.to_api_repr(), EXPECTED) + policy.owners = [OWNER1, OWNER2] + policy.editors = [EDITOR1, EDITOR2] + policy.viewers = [VIEWER1, VIEWER2] + resource = policy.to_api_repr() + self.assertEqual(resource['etag'], 'DEADBEEF') + self.assertEqual(resource['version'], 17) + key = operator.itemgetter('role') + self.assertEqual( + sorted(resource['bindings'], key=key), sorted(BINDINGS, key=key)) From e7eaa31a9cc1594aa31ed63e4faaeee6332e60b9 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 23 Mar 2017 14:59:10 -0400 Subject: [PATCH 131/468] Address review: - Don't pass roles w/ empty members to back-end. - De-duplicate role members when passing to back-end. --- packages/google-cloud-core/google/cloud/iam.py | 11 ++++++++--- .../google-cloud-core/unit_tests/test_iam.py | 16 ++++++++++++++++ 2 files changed, 24 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iam.py b/packages/google-cloud-core/google/cloud/iam.py index 452d969e9805..e19928ff6b34 100644 --- a/packages/google-cloud-core/google/cloud/iam.py +++ b/packages/google-cloud-core/google/cloud/iam.py @@ -197,8 +197,13 @@ def to_api_repr(self): resource['version'] = self.version if len(self.bindings) > 0: - resource['bindings'] = [ - {'role': role, 'members': members} - for role, members in sorted(self.bindings.items())] + bindings = resource['bindings'] = [] + for role, members in sorted(self.bindings.items()): + if len(members) > 0: + bindings.append( + {'role': role, 'members': sorted(set(members))}) + + if len(bindings) == 0: + del resource['bindings'] return resource diff --git a/packages/google-cloud-core/unit_tests/test_iam.py b/packages/google-cloud-core/unit_tests/test_iam.py index c9b62ee0455b..be3719de464c 100644 --- a/packages/google-cloud-core/unit_tests/test_iam.py +++ b/packages/google-cloud-core/unit_tests/test_iam.py @@ -151,6 +151,22 @@ def test_to_api_repr_only_etag(self): policy = self._make_one('DEADBEEF') self.assertEqual(policy.to_api_repr(), {'etag': 'DEADBEEF'}) + def test_to_api_repr_binding_wo_members(self): + policy = self._make_one() + policy.bindings['empty'] = [] + self.assertEqual(policy.to_api_repr(), {}) + + def test_to_api_repr_binding_w_duplicates(self): + from google.cloud.iam import OWNER_ROLE + + OWNER = 'group:cloud-logs@google.com' + policy = self._make_one() + policy.owners = [OWNER, OWNER] + self.assertEqual( + policy.to_api_repr(), { + 'bindings': [{'role': OWNER_ROLE, 'members': [OWNER]}], + }) + def test_to_api_repr_full(self): import operator from google.cloud.iam import ( From a88e445e7f97fdf8931f521686a5d13a9cfa982f Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 5 Apr 2017 17:42:20 -0400 Subject: [PATCH 132/468] Accomodate noxification. --- packages/google-cloud-core/{unit_tests => tests/unit}/test_iam.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename packages/google-cloud-core/{unit_tests => tests/unit}/test_iam.py (100%) diff --git a/packages/google-cloud-core/unit_tests/test_iam.py b/packages/google-cloud-core/tests/unit/test_iam.py similarity index 100% rename from packages/google-cloud-core/unit_tests/test_iam.py rename to packages/google-cloud-core/tests/unit/test_iam.py From 3ffb53d63dd05411a5dcddd71de71630f623bdff Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 5 Apr 2017 19:43:07 -0400 Subject: [PATCH 133/468] Return frozensets from named Policy properties. Updating them in place never actually worked (they were sets created on the fly), but at least we give an appropriate error now if the user tries. --- packages/google-cloud-core/google/cloud/iam.py | 6 +++--- packages/google-cloud-core/tests/unit/test_iam.py | 3 +++ 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iam.py b/packages/google-cloud-core/google/cloud/iam.py index e19928ff6b34..ce2d3e445ba0 100644 --- a/packages/google-cloud-core/google/cloud/iam.py +++ b/packages/google-cloud-core/google/cloud/iam.py @@ -62,7 +62,7 @@ def owners(self): for role in self._OWNER_ROLES: for member in self.bindings.get(role, ()): result.add(member) - return result + return frozenset(result) @owners.setter def owners(self, value): @@ -76,7 +76,7 @@ def editors(self): for role in self._EDITOR_ROLES: for member in self.bindings.get(role, ()): result.add(member) - return result + return frozenset(result) @editors.setter def editors(self, value): @@ -90,7 +90,7 @@ def viewers(self): for role in self._VIEWER_ROLES: for member in self.bindings.get(role, ()): result.add(member) - return result + return frozenset(result) @viewers.setter def viewers(self, value): diff --git a/packages/google-cloud-core/tests/unit/test_iam.py b/packages/google-cloud-core/tests/unit/test_iam.py index be3719de464c..425e3572bc34 100644 --- a/packages/google-cloud-core/tests/unit/test_iam.py +++ b/packages/google-cloud-core/tests/unit/test_iam.py @@ -30,8 +30,11 @@ def test_ctor_defaults(self): policy = self._make_one() self.assertIsNone(policy.etag) self.assertIsNone(policy.version) + self.assertIsInstance(policy.owners, frozenset) self.assertEqual(list(policy.owners), []) + self.assertIsInstance(policy.editors, frozenset) self.assertEqual(list(policy.editors), []) + self.assertIsInstance(policy.viewers, frozenset) self.assertEqual(list(policy.viewers), []) self.assertEqual(dict(policy.bindings), {}) From ff31578e401dea4ea9d7fbe0e4e90550fbeb7571 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 6 Apr 2017 11:49:10 -0400 Subject: [PATCH 134/468] Make IAM Policy objects dict-line. Keys are roles, values are lists of principals. --- .../google-cloud-core/google/cloud/iam.py | 42 ++++++++++++++----- .../google-cloud-core/tests/unit/test_iam.py | 39 ++++++++++++++--- 2 files changed, 64 insertions(+), 17 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iam.py b/packages/google-cloud-core/google/cloud/iam.py index ce2d3e445ba0..d2987986123b 100644 --- a/packages/google-cloud-core/google/cloud/iam.py +++ b/packages/google-cloud-core/google/cloud/iam.py @@ -17,6 +17,8 @@ https://cloud.google.com/iam/docs/understanding-roles """ +import collections + # Generic IAM roles OWNER_ROLE = 'roles/owner' @@ -29,7 +31,7 @@ """Generic role implying rights to access an object.""" -class Policy(object): +class Policy(collections.MutableMapping): """IAM Policy See: @@ -53,49 +55,64 @@ class Policy(object): def __init__(self, etag=None, version=None): self.etag = etag self.version = version - self.bindings = {} + self._bindings = {} + + def __iter__(self): + return iter(self._bindings) + + def __len__(self): + return len(self._bindings) + + def __getitem__(self, key): + return self._bindings[key] + + def __setitem__(self, key, value): + self._bindings[key] = value + + def __delitem__(self, key): + del self._bindings[key] @property def owners(self): """Legacy access to owner role.""" result = set() for role in self._OWNER_ROLES: - for member in self.bindings.get(role, ()): + for member in self._bindings.get(role, ()): result.add(member) return frozenset(result) @owners.setter def owners(self, value): """Update owners.""" - self.bindings[OWNER_ROLE] = list(value) + self._bindings[OWNER_ROLE] = list(value) @property def editors(self): """Legacy access to editor role.""" result = set() for role in self._EDITOR_ROLES: - for member in self.bindings.get(role, ()): + for member in self._bindings.get(role, ()): result.add(member) return frozenset(result) @editors.setter def editors(self, value): """Update editors.""" - self.bindings[EDITOR_ROLE] = list(value) + self._bindings[EDITOR_ROLE] = list(value) @property def viewers(self): """Legacy access to viewer role.""" result = set() for role in self._VIEWER_ROLES: - for member in self.bindings.get(role, ()): + for member in self._bindings.get(role, ()): result.add(member) return frozenset(result) @viewers.setter def viewers(self, value): """Update viewers.""" - self.bindings[VIEWER_ROLE] = list(value) + self._bindings[VIEWER_ROLE] = list(value) @staticmethod def user(email): @@ -179,7 +196,7 @@ def from_api_repr(cls, resource): for binding in resource.get('bindings', ()): role = binding['role'] members = sorted(binding['members']) - policy.bindings[role] = members + policy._bindings[role] = members return policy def to_api_repr(self): @@ -196,9 +213,9 @@ def to_api_repr(self): if self.version is not None: resource['version'] = self.version - if len(self.bindings) > 0: + if len(self._bindings) > 0: bindings = resource['bindings'] = [] - for role, members in sorted(self.bindings.items()): + for role, members in sorted(self._bindings.items()): if len(members) > 0: bindings.append( {'role': role, 'members': sorted(set(members))}) @@ -207,3 +224,6 @@ def to_api_repr(self): del resource['bindings'] return resource + + +collections.MutableMapping.register(Policy) diff --git a/packages/google-cloud-core/tests/unit/test_iam.py b/packages/google-cloud-core/tests/unit/test_iam.py index 425e3572bc34..f2b162143b4d 100644 --- a/packages/google-cloud-core/tests/unit/test_iam.py +++ b/packages/google-cloud-core/tests/unit/test_iam.py @@ -36,7 +36,8 @@ def test_ctor_defaults(self): self.assertEqual(list(policy.editors), []) self.assertIsInstance(policy.viewers, frozenset) self.assertEqual(list(policy.viewers), []) - self.assertEqual(dict(policy.bindings), {}) + self.assertEqual(len(policy), 0) + self.assertEqual(dict(policy), {}) def test_ctor_explicit(self): VERSION = 17 @@ -47,7 +48,33 @@ def test_ctor_explicit(self): self.assertEqual(list(policy.owners), []) self.assertEqual(list(policy.editors), []) self.assertEqual(list(policy.viewers), []) - self.assertEqual(dict(policy.bindings), {}) + self.assertEqual(len(policy), 0) + self.assertEqual(dict(policy), {}) + + def test___getitem___miss(self): + policy = self._make_one() + with self.assertRaises(KeyError): + policy['nonesuch'] + + def test___setitem__(self): + USER = 'user:phred@example.com' + policy = self._make_one() + policy['rolename'] = [USER] + self.assertEqual(policy['rolename'], [USER]) + self.assertEqual(len(policy), 1) + self.assertEqual(dict(policy), {'rolename': [USER]}) + + def test___delitem___hit(self): + policy = self._make_one() + policy._bindings['rolename'] = ['phred@example.com'] + del policy['rolename'] + self.assertEqual(len(policy), 0) + self.assertEqual(dict(policy), {}) + + def test___delitem___miss(self): + policy = self._make_one() + with self.assertRaises(KeyError): + del policy['nonesuch'] def test_user(self): EMAIL = 'phred@example.com' @@ -92,7 +119,7 @@ def test_from_api_repr_only_etag(self): self.assertEqual(list(policy.owners), []) self.assertEqual(list(policy.editors), []) self.assertEqual(list(policy.viewers), []) - self.assertEqual(dict(policy.bindings), {}) + self.assertEqual(dict(policy), {}) def test_from_api_repr_complete(self): from google.cloud.iam import ( @@ -124,7 +151,7 @@ def test_from_api_repr_complete(self): self.assertEqual(sorted(policy.editors), [EDITOR1, EDITOR2]) self.assertEqual(sorted(policy.viewers), [VIEWER1, VIEWER2]) self.assertEqual( - dict(policy.bindings), { + dict(policy), { OWNER_ROLE: [OWNER1, OWNER2], EDITOR_ROLE: [EDITOR1, EDITOR2], VIEWER_ROLE: [VIEWER1, VIEWER2], @@ -144,7 +171,7 @@ def test_from_api_repr_unknown_role(self): policy = klass.from_api_repr(RESOURCE) self.assertEqual(policy.etag, 'DEADBEEF') self.assertEqual(policy.version, 17) - self.assertEqual(policy.bindings, {'unknown': [GROUP, USER]}) + self.assertEqual(dict(policy), {'unknown': [GROUP, USER]}) def test_to_api_repr_defaults(self): policy = self._make_one() @@ -156,7 +183,7 @@ def test_to_api_repr_only_etag(self): def test_to_api_repr_binding_wo_members(self): policy = self._make_one() - policy.bindings['empty'] = [] + policy['empty'] = [] self.assertEqual(policy.to_api_repr(), {}) def test_to_api_repr_binding_w_duplicates(self): From 4d32b2adec00ce9b1a3aca08b5c03c63ad7111a2 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 6 Apr 2017 12:27:21 -0400 Subject: [PATCH 135/468] Deprecate assignment to legacy role attributes. --- .../google-cloud-core/google/cloud/iam.py | 13 +++++ .../google-cloud-core/tests/unit/test_iam.py | 52 +++++++++++++++++++ 2 files changed, 65 insertions(+) diff --git a/packages/google-cloud-core/google/cloud/iam.py b/packages/google-cloud-core/google/cloud/iam.py index d2987986123b..7aa34fa83f67 100644 --- a/packages/google-cloud-core/google/cloud/iam.py +++ b/packages/google-cloud-core/google/cloud/iam.py @@ -18,6 +18,7 @@ """ import collections +import warnings # Generic IAM roles @@ -30,6 +31,9 @@ VIEWER_ROLE = 'roles/viewer' """Generic role implying rights to access an object.""" +_ASSIGNMENT_DEPRECATED_MSG = """\ +Assigning to '{}' is deprecated. Replace with 'policy[{}] = members.""" + class Policy(collections.MutableMapping): """IAM Policy @@ -84,6 +88,9 @@ def owners(self): @owners.setter def owners(self, value): """Update owners.""" + warnings.warn( + _ASSIGNMENT_DEPRECATED_MSG.format('owners', OWNER_ROLE), + DeprecationWarning) self._bindings[OWNER_ROLE] = list(value) @property @@ -98,6 +105,9 @@ def editors(self): @editors.setter def editors(self, value): """Update editors.""" + warnings.warn( + _ASSIGNMENT_DEPRECATED_MSG.format('editors', EDITOR_ROLE), + DeprecationWarning) self._bindings[EDITOR_ROLE] = list(value) @property @@ -112,6 +122,9 @@ def viewers(self): @viewers.setter def viewers(self, value): """Update viewers.""" + warnings.warn( + _ASSIGNMENT_DEPRECATED_MSG.format('viewers', VIEWER_ROLE), + DeprecationWarning) self._bindings[VIEWER_ROLE] = list(value) @staticmethod diff --git a/packages/google-cloud-core/tests/unit/test_iam.py b/packages/google-cloud-core/tests/unit/test_iam.py index f2b162143b4d..888f6968be3c 100644 --- a/packages/google-cloud-core/tests/unit/test_iam.py +++ b/packages/google-cloud-core/tests/unit/test_iam.py @@ -76,6 +76,58 @@ def test___delitem___miss(self): with self.assertRaises(KeyError): del policy['nonesuch'] + def test_owners_getter(self): + from google.cloud.iam import OWNER_ROLE + MEMBER = 'user:phred@example.com' + policy = self._make_one() + policy[OWNER_ROLE] = [MEMBER] + self.assertIsInstance(policy.owners, frozenset) + self.assertEqual(list(policy.owners), [MEMBER]) + + def test_owners_setter(self): + import warnings + from google.cloud.iam import OWNER_ROLE + MEMBER = 'user:phred@example.com' + policy = self._make_one() + with warnings.catch_warnings(): + policy.owners = [MEMBER] + self.assertEqual(list(policy[OWNER_ROLE]), [MEMBER]) + + def test_editors_getter(self): + from google.cloud.iam import EDITOR_ROLE + MEMBER = 'user:phred@example.com' + policy = self._make_one() + policy[EDITOR_ROLE] = [MEMBER] + self.assertIsInstance(policy.editors, frozenset) + self.assertEqual(list(policy.editors), [MEMBER]) + + def test_editors_setter(self): + import warnings + from google.cloud.iam import EDITOR_ROLE + MEMBER = 'user:phred@example.com' + policy = self._make_one() + with warnings.catch_warnings(): + policy.editors = [MEMBER] + self.assertEqual(list(policy[EDITOR_ROLE]), [MEMBER]) + + def test_viewers_getter(self): + from google.cloud.iam import VIEWER_ROLE + MEMBER = 'user:phred@example.com' + policy = self._make_one() + policy[VIEWER_ROLE] = [MEMBER] + self.assertIsInstance(policy.viewers, frozenset) + self.assertEqual(list(policy.viewers), [MEMBER]) + + def test_viewers_setter(self): + import warnings + from google.cloud.iam import VIEWER_ROLE + MEMBER = 'user:phred@example.com' + policy = self._make_one() + with warnings.catch_warnings(): + warnings.simplefilter('always') + policy.viewers = [MEMBER] + self.assertEqual(list(policy[VIEWER_ROLE]), [MEMBER]) + def test_user(self): EMAIL = 'phred@example.com' MEMBER = 'user:%s' % (EMAIL,) From 8f45ac865474527067be005406a1d61783d424a0 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 12 Apr 2017 15:11:56 -0700 Subject: [PATCH 136/468] Coerce role-principal values to frozenset. --- packages/google-cloud-core/google/cloud/iam.py | 2 +- packages/google-cloud-core/tests/unit/test_iam.py | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iam.py b/packages/google-cloud-core/google/cloud/iam.py index 7aa34fa83f67..4747b39bbf07 100644 --- a/packages/google-cloud-core/google/cloud/iam.py +++ b/packages/google-cloud-core/google/cloud/iam.py @@ -71,7 +71,7 @@ def __getitem__(self, key): return self._bindings[key] def __setitem__(self, key, value): - self._bindings[key] = value + self._bindings[key] = frozenset(value) def __delitem__(self, key): del self._bindings[key] diff --git a/packages/google-cloud-core/tests/unit/test_iam.py b/packages/google-cloud-core/tests/unit/test_iam.py index 888f6968be3c..9f1bd9b3904f 100644 --- a/packages/google-cloud-core/tests/unit/test_iam.py +++ b/packages/google-cloud-core/tests/unit/test_iam.py @@ -58,11 +58,12 @@ def test___getitem___miss(self): def test___setitem__(self): USER = 'user:phred@example.com' + PRINCIPALS = frozenset([USER]) policy = self._make_one() policy['rolename'] = [USER] - self.assertEqual(policy['rolename'], [USER]) + self.assertEqual(policy['rolename'], PRINCIPALS) self.assertEqual(len(policy), 1) - self.assertEqual(dict(policy), {'rolename': [USER]}) + self.assertEqual(dict(policy), {'rolename': PRINCIPALS}) def test___delitem___hit(self): policy = self._make_one() From 8b239ed4096fa2383563538774216e90070ca359 Mon Sep 17 00:00:00 2001 From: Jacob Geiger Date: Mon, 17 Apr 2017 16:02:56 -0700 Subject: [PATCH 137/468] Add new subscription fields Also simplify a helper method to use built-in logic from google.protobuf to convert between Duration and timedelta --- packages/google-cloud-core/google/cloud/_helpers.py | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/_helpers.py b/packages/google-cloud-core/google/cloud/_helpers.py index f94df65c167f..2c2f08dcfb45 100644 --- a/packages/google-cloud-core/google/cloud/_helpers.py +++ b/packages/google-cloud-core/google/cloud/_helpers.py @@ -439,16 +439,9 @@ def _timedelta_to_duration_pb(timedelta_val): :rtype: :class:`google.protobuf.duration_pb2.Duration` :returns: A duration object equivalent to the time delta. """ - seconds_decimal = timedelta_val.total_seconds() - # Truncate the parts other than the integer. - seconds = int(seconds_decimal) - if seconds_decimal < 0: - signed_micros = timedelta_val.microseconds - 10**6 - else: - signed_micros = timedelta_val.microseconds - # Convert nanoseconds to microseconds. - nanos = 1000 * signed_micros - return duration_pb2.Duration(seconds=seconds, nanos=nanos) + duration_pb = duration_pb2.Duration() + duration_pb.FromTimedelta(timedelta_val) + return duration_pb def _duration_pb_to_timedelta(duration_pb): From 79652e8d638f01ff320c3d75a11d06c14d7dadb4 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 19 Apr 2017 11:16:06 -0400 Subject: [PATCH 138/468] Store policy bindings as sets, not frozensets. (#3308) The legacy accessors still return frozensets, as they cannot safely be mutated in plcae. --- .../google-cloud-core/google/cloud/iam.py | 10 +-- .../google-cloud-core/tests/unit/test_iam.py | 64 ++++++++++--------- 2 files changed, 40 insertions(+), 34 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iam.py b/packages/google-cloud-core/google/cloud/iam.py index 4747b39bbf07..653cebda1e71 100644 --- a/packages/google-cloud-core/google/cloud/iam.py +++ b/packages/google-cloud-core/google/cloud/iam.py @@ -71,7 +71,7 @@ def __getitem__(self, key): return self._bindings[key] def __setitem__(self, key, value): - self._bindings[key] = frozenset(value) + self._bindings[key] = set(value) def __delitem__(self, key): del self._bindings[key] @@ -91,7 +91,7 @@ def owners(self, value): warnings.warn( _ASSIGNMENT_DEPRECATED_MSG.format('owners', OWNER_ROLE), DeprecationWarning) - self._bindings[OWNER_ROLE] = list(value) + self[OWNER_ROLE] = value @property def editors(self): @@ -108,7 +108,7 @@ def editors(self, value): warnings.warn( _ASSIGNMENT_DEPRECATED_MSG.format('editors', EDITOR_ROLE), DeprecationWarning) - self._bindings[EDITOR_ROLE] = list(value) + self[EDITOR_ROLE] = value @property def viewers(self): @@ -125,7 +125,7 @@ def viewers(self, value): warnings.warn( _ASSIGNMENT_DEPRECATED_MSG.format('viewers', VIEWER_ROLE), DeprecationWarning) - self._bindings[VIEWER_ROLE] = list(value) + self[VIEWER_ROLE] = value @staticmethod def user(email): @@ -209,7 +209,7 @@ def from_api_repr(cls, resource): for binding in resource.get('bindings', ()): role = binding['role'] members = sorted(binding['members']) - policy._bindings[role] = members + policy[role] = members return policy def to_api_repr(self): diff --git a/packages/google-cloud-core/tests/unit/test_iam.py b/packages/google-cloud-core/tests/unit/test_iam.py index 9f1bd9b3904f..42fac7c623c1 100644 --- a/packages/google-cloud-core/tests/unit/test_iam.py +++ b/packages/google-cloud-core/tests/unit/test_iam.py @@ -27,27 +27,26 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): + empty = frozenset() policy = self._make_one() self.assertIsNone(policy.etag) self.assertIsNone(policy.version) - self.assertIsInstance(policy.owners, frozenset) - self.assertEqual(list(policy.owners), []) - self.assertIsInstance(policy.editors, frozenset) - self.assertEqual(list(policy.editors), []) - self.assertIsInstance(policy.viewers, frozenset) - self.assertEqual(list(policy.viewers), []) + self.assertEqual(policy.owners, empty) + self.assertEqual(policy.editors, empty) + self.assertEqual(policy.viewers, empty) self.assertEqual(len(policy), 0) self.assertEqual(dict(policy), {}) def test_ctor_explicit(self): VERSION = 17 ETAG = 'ETAG' + empty = frozenset() policy = self._make_one(ETAG, VERSION) self.assertEqual(policy.etag, ETAG) self.assertEqual(policy.version, VERSION) - self.assertEqual(list(policy.owners), []) - self.assertEqual(list(policy.editors), []) - self.assertEqual(list(policy.viewers), []) + self.assertEqual(policy.owners, empty) + self.assertEqual(policy.editors, empty) + self.assertEqual(policy.viewers, empty) self.assertEqual(len(policy), 0) self.assertEqual(dict(policy), {}) @@ -58,7 +57,7 @@ def test___getitem___miss(self): def test___setitem__(self): USER = 'user:phred@example.com' - PRINCIPALS = frozenset([USER]) + PRINCIPALS = set([USER]) policy = self._make_one() policy['rolename'] = [USER] self.assertEqual(policy['rolename'], PRINCIPALS) @@ -80,54 +79,59 @@ def test___delitem___miss(self): def test_owners_getter(self): from google.cloud.iam import OWNER_ROLE MEMBER = 'user:phred@example.com' + expected = frozenset([MEMBER]) policy = self._make_one() policy[OWNER_ROLE] = [MEMBER] - self.assertIsInstance(policy.owners, frozenset) - self.assertEqual(list(policy.owners), [MEMBER]) + self.assertEqual(policy.owners, expected) def test_owners_setter(self): import warnings from google.cloud.iam import OWNER_ROLE MEMBER = 'user:phred@example.com' + expected = set([MEMBER]) policy = self._make_one() with warnings.catch_warnings(): + warnings.simplefilter('always') policy.owners = [MEMBER] - self.assertEqual(list(policy[OWNER_ROLE]), [MEMBER]) + self.assertEqual(policy[OWNER_ROLE], expected) def test_editors_getter(self): from google.cloud.iam import EDITOR_ROLE MEMBER = 'user:phred@example.com' + expected = frozenset([MEMBER]) policy = self._make_one() policy[EDITOR_ROLE] = [MEMBER] - self.assertIsInstance(policy.editors, frozenset) - self.assertEqual(list(policy.editors), [MEMBER]) + self.assertEqual(policy.editors, expected) def test_editors_setter(self): import warnings from google.cloud.iam import EDITOR_ROLE MEMBER = 'user:phred@example.com' + expected = set([MEMBER]) policy = self._make_one() with warnings.catch_warnings(): + warnings.simplefilter('always') policy.editors = [MEMBER] - self.assertEqual(list(policy[EDITOR_ROLE]), [MEMBER]) + self.assertEqual(policy[EDITOR_ROLE], expected) def test_viewers_getter(self): from google.cloud.iam import VIEWER_ROLE MEMBER = 'user:phred@example.com' + expected = frozenset([MEMBER]) policy = self._make_one() policy[VIEWER_ROLE] = [MEMBER] - self.assertIsInstance(policy.viewers, frozenset) - self.assertEqual(list(policy.viewers), [MEMBER]) + self.assertEqual(policy.viewers, expected) def test_viewers_setter(self): import warnings from google.cloud.iam import VIEWER_ROLE MEMBER = 'user:phred@example.com' + expected = set([MEMBER]) policy = self._make_one() with warnings.catch_warnings(): warnings.simplefilter('always') policy.viewers = [MEMBER] - self.assertEqual(list(policy[VIEWER_ROLE]), [MEMBER]) + self.assertEqual(policy[VIEWER_ROLE], expected) def test_user(self): EMAIL = 'phred@example.com' @@ -162,6 +166,7 @@ def test_authenticated_users(self): self.assertEqual(policy.authenticated_users(), 'allAuthenticatedUsers') def test_from_api_repr_only_etag(self): + empty = frozenset() RESOURCE = { 'etag': 'ACAB', } @@ -169,9 +174,9 @@ def test_from_api_repr_only_etag(self): policy = klass.from_api_repr(RESOURCE) self.assertEqual(policy.etag, 'ACAB') self.assertIsNone(policy.version) - self.assertEqual(list(policy.owners), []) - self.assertEqual(list(policy.editors), []) - self.assertEqual(list(policy.viewers), []) + self.assertEqual(policy.owners, empty) + self.assertEqual(policy.editors, empty) + self.assertEqual(policy.viewers, empty) self.assertEqual(dict(policy), {}) def test_from_api_repr_complete(self): @@ -196,18 +201,19 @@ def test_from_api_repr_complete(self): {'role': VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, ], } + empty = frozenset() klass = self._get_target_class() policy = klass.from_api_repr(RESOURCE) self.assertEqual(policy.etag, 'DEADBEEF') self.assertEqual(policy.version, 17) - self.assertEqual(sorted(policy.owners), [OWNER1, OWNER2]) - self.assertEqual(sorted(policy.editors), [EDITOR1, EDITOR2]) - self.assertEqual(sorted(policy.viewers), [VIEWER1, VIEWER2]) + self.assertEqual(policy.owners, frozenset([OWNER1, OWNER2])) + self.assertEqual(policy.editors, frozenset([EDITOR1, EDITOR2])) + self.assertEqual(policy.viewers, frozenset([VIEWER1, VIEWER2])) self.assertEqual( dict(policy), { - OWNER_ROLE: [OWNER1, OWNER2], - EDITOR_ROLE: [EDITOR1, EDITOR2], - VIEWER_ROLE: [VIEWER1, VIEWER2], + OWNER_ROLE: set([OWNER1, OWNER2]), + EDITOR_ROLE: set([EDITOR1, EDITOR2]), + VIEWER_ROLE: set([VIEWER1, VIEWER2]), }) def test_from_api_repr_unknown_role(self): @@ -224,7 +230,7 @@ def test_from_api_repr_unknown_role(self): policy = klass.from_api_repr(RESOURCE) self.assertEqual(policy.etag, 'DEADBEEF') self.assertEqual(policy.version, 17) - self.assertEqual(dict(policy), {'unknown': [GROUP, USER]}) + self.assertEqual(dict(policy), {'unknown': set([GROUP, USER])}) def test_to_api_repr_defaults(self): policy = self._make_one() From 20eec54e86c37459127cb8f11ed30c192fb2b498 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 20 Apr 2017 13:00:32 -0700 Subject: [PATCH 139/468] Adding check that **all** setup.py README's are valid RST. (#3318) * Adding check that **all** setup.py README's are valid RST. Follow up to #3316. Fixes #2446. * Fixing duplicate reference in Logging README. * Fixing duplicate reference in Monitoring README. --- packages/google-cloud-core/nox.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/packages/google-cloud-core/nox.py b/packages/google-cloud-core/nox.py index ba1e72f8a5f7..1b9ef352e3a5 100644 --- a/packages/google-cloud-core/nox.py +++ b/packages/google-cloud-core/nox.py @@ -53,6 +53,15 @@ def lint(session): session.run('flake8', 'google/cloud/core') +@nox.session +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.interpreter = 'python3.6' + session.install('docutils', 'Pygments') + session.run( + 'python', 'setup.py', 'check', '--restructuredtext', '--strict') + + @nox.session def cover(session): """Run the final coverage report. From a0fb786150b1f9492087dfd86dd63633de3c177e Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 21 Apr 2017 10:03:56 -0700 Subject: [PATCH 140/468] Ignore tests (rather than unit_tests) in setup.py files. (#3319) --- packages/google-cloud-core/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-core/setup.py b/packages/google-cloud-core/setup.py index 89526136b318..c1c8c7ae59cd 100644 --- a/packages/google-cloud-core/setup.py +++ b/packages/google-cloud-core/setup.py @@ -68,7 +68,7 @@ 'google', 'google.cloud', ], - packages=find_packages(exclude=('unit_tests*',)), + packages=find_packages(exclude=('tests*',)), install_requires=REQUIREMENTS, **SETUP_BASE ) From d46cd6e439a5de3b96c90df16d74681520f5ab78 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 28 Apr 2017 11:15:27 -0700 Subject: [PATCH 141/468] Cut releases of core, error reporting, pubsub, spanner and storage. (#3340) Also updating the umbrella/uber package along the way. --- packages/google-cloud-core/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-core/setup.py b/packages/google-cloud-core/setup.py index c1c8c7ae59cd..3dfa13ef5284 100644 --- a/packages/google-cloud-core/setup.py +++ b/packages/google-cloud-core/setup.py @@ -61,7 +61,7 @@ setup( name='google-cloud-core', - version='0.24.0', + version='0.24.1', description='API Client library for Google Cloud: Core Helpers', long_description=README, namespace_packages=[ From 624ecbf5513bb7d7ede9db57f85d66e8f0da7df6 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 16 May 2017 14:45:38 -0400 Subject: [PATCH 142/468] iam.Policy: return empty set on missing key. Closes #3346. --- packages/google-cloud-core/google/cloud/iam.py | 2 +- packages/google-cloud-core/tests/unit/test_iam.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/iam.py b/packages/google-cloud-core/google/cloud/iam.py index 653cebda1e71..eefc084a5f5c 100644 --- a/packages/google-cloud-core/google/cloud/iam.py +++ b/packages/google-cloud-core/google/cloud/iam.py @@ -59,7 +59,7 @@ class Policy(collections.MutableMapping): def __init__(self, etag=None, version=None): self.etag = etag self.version = version - self._bindings = {} + self._bindings = collections.defaultdict(set) def __iter__(self): return iter(self._bindings) diff --git a/packages/google-cloud-core/tests/unit/test_iam.py b/packages/google-cloud-core/tests/unit/test_iam.py index 42fac7c623c1..d076edd6eba9 100644 --- a/packages/google-cloud-core/tests/unit/test_iam.py +++ b/packages/google-cloud-core/tests/unit/test_iam.py @@ -52,8 +52,7 @@ def test_ctor_explicit(self): def test___getitem___miss(self): policy = self._make_one() - with self.assertRaises(KeyError): - policy['nonesuch'] + self.assertEqual(policy['nonesuch'], set()) def test___setitem__(self): USER = 'user:phred@example.com' From c250ff07042d76709c6c383f792d65d049db924e Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 2 Jun 2017 14:36:29 -0700 Subject: [PATCH 143/468] Vision semi-GAPIC (#3373) --- packages/google-cloud-core/google/cloud/exceptions.py | 4 ++-- packages/google-cloud-core/google/cloud/iam.py | 2 +- packages/google-cloud-core/google/cloud/operation.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/exceptions.py b/packages/google-cloud-core/google/cloud/exceptions.py index ab0ede688ef3..32080de7ff50 100644 --- a/packages/google-cloud-core/google/cloud/exceptions.py +++ b/packages/google-cloud-core/google/cloud/exceptions.py @@ -14,7 +14,7 @@ """Custom exceptions for :mod:`google.cloud` package. -See: https://cloud.google.com/storage/docs/json_api/v1/status-codes +See https://cloud.google.com/storage/docs/json_api/v1/status-codes """ # Avoid the grpc and google.cloud.grpc collision. @@ -48,7 +48,7 @@ class GoogleCloudError(Exception): code = None """HTTP status code. Concrete subclasses *must* define. - See: http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html + See http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html """ def __init__(self, message, errors=()): diff --git a/packages/google-cloud-core/google/cloud/iam.py b/packages/google-cloud-core/google/cloud/iam.py index eefc084a5f5c..49bb11266cee 100644 --- a/packages/google-cloud-core/google/cloud/iam.py +++ b/packages/google-cloud-core/google/cloud/iam.py @@ -38,7 +38,7 @@ class Policy(collections.MutableMapping): """IAM Policy - See: + See https://cloud.google.com/iam/reference/rest/v1/Policy :type etag: str diff --git a/packages/google-cloud-core/google/cloud/operation.py b/packages/google-cloud-core/google/cloud/operation.py index 8bc848e7facb..4e700a553e4f 100644 --- a/packages/google-cloud-core/google/cloud/operation.py +++ b/packages/google-cloud-core/google/cloud/operation.py @@ -43,7 +43,7 @@ def _compute_type_url(klass, prefix=_GOOGLE_APIS_PREFIX): def register_type(klass, type_url=None): """Register a klass as the factory for a given type URL. - :type klass: type + :type klass: :class:`type` :param klass: class to be used as a factory for the given type :type type_url: str From c7be5a7394172d98340327a257d94bcbc24f350f Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 6 Jun 2017 10:11:59 -0700 Subject: [PATCH 144/468] Adding optional switch to capture project ID in from_service_account_json(). (#3436) Fixes #1883. --- .../google-cloud-core/google/cloud/client.py | 18 ++++- packages/google-cloud-core/nox.py | 3 +- .../tests/unit/test_client.py | 69 +++++++++++++++++-- 3 files changed, 80 insertions(+), 10 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/client.py b/packages/google-cloud-core/google/cloud/client.py index e3f6f81326ef..e7e43faf1e45 100644 --- a/packages/google-cloud-core/google/cloud/client.py +++ b/packages/google-cloud-core/google/cloud/client.py @@ -14,6 +14,8 @@ """Base classes for client used to interact with Google Cloud APIs.""" +import io +import json from pickle import PicklingError import google.auth.credentials @@ -40,6 +42,8 @@ class _ClientFactoryMixin(object): This class is virtual. """ + _SET_PROJECT = False + @classmethod def from_service_account_json(cls, json_credentials_path, *args, **kwargs): """Factory to retrieve JSON credentials while creating client. @@ -58,15 +62,21 @@ def from_service_account_json(cls, json_credentials_path, *args, **kwargs): :type kwargs: dict :param kwargs: Remaining keyword arguments to pass to constructor. - :rtype: :class:`google.cloud.pubsub.client.Client` + :rtype: :class:`_ClientFactoryMixin` :returns: The client created with the retrieved JSON credentials. :raises: :class:`TypeError` if there is a conflict with the kwargs and the credentials created by the factory. """ if 'credentials' in kwargs: raise TypeError('credentials must not be in keyword arguments') - credentials = service_account.Credentials.from_service_account_file( - json_credentials_path) + with io.open(json_credentials_path, 'r', encoding='utf-8') as json_fi: + credentials_info = json.load(json_fi) + credentials = service_account.Credentials.from_service_account_info( + credentials_info) + if cls._SET_PROJECT: + if 'project' not in kwargs: + kwargs['project'] = credentials_info.get('project_id') + kwargs['credentials'] = credentials return cls(*args, **kwargs) @@ -207,6 +217,8 @@ class ClientWithProject(Client, _ClientProjectMixin): set in the environment. """ + _SET_PROJECT = True # Used by from_service_account_json() + def __init__(self, project=None, credentials=None, _http=None): _ClientProjectMixin.__init__(self, project=project) Client.__init__(self, credentials=credentials, _http=_http) diff --git a/packages/google-cloud-core/nox.py b/packages/google-cloud-core/nox.py index 1b9ef352e3a5..d941d60092b8 100644 --- a/packages/google-cloud-core/nox.py +++ b/packages/google-cloud-core/nox.py @@ -33,7 +33,8 @@ def unit_tests(session, python_version): session.install('-e', '.') # Run py.test against the unit tests. - session.run('py.test', '--quiet', + session.run( + 'py.test', '--quiet', '--cov=google.cloud', '--cov=tests.unit', '--cov-append', '--cov-config=.coveragerc', '--cov-report=', '--cov-fail-under=97', 'tests/unit', diff --git a/packages/google-cloud-core/tests/unit/test_client.py b/packages/google-cloud-core/tests/unit/test_client.py index 21a8bccc9845..14eac68abee3 100644 --- a/packages/google-cloud-core/tests/unit/test_client.py +++ b/packages/google-cloud-core/tests/unit/test_client.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import io +import json import unittest import mock @@ -90,21 +92,32 @@ def test_ctor_bad_credentials(self): self._make_one(credentials=CREDENTIALS) def test_from_service_account_json(self): - KLASS = self._get_target_class() + from google.cloud import _helpers + + klass = self._get_target_class() + # Mock both the file opening and the credentials constructor. + info = {'dummy': 'value', 'valid': 'json'} + json_fi = io.StringIO(_helpers._bytes_to_unicode(json.dumps(info))) + file_open_patch = mock.patch( + 'io.open', return_value=json_fi) constructor_patch = mock.patch( 'google.oauth2.service_account.Credentials.' - 'from_service_account_file', + 'from_service_account_info', return_value=_make_credentials()) - with constructor_patch as constructor: - client_obj = KLASS.from_service_account_json( - mock.sentinel.filename) + with file_open_patch as file_open: + with constructor_patch as constructor: + client_obj = klass.from_service_account_json( + mock.sentinel.filename) self.assertIs( client_obj._credentials, constructor.return_value) self.assertIsNone(client_obj._http_internal) - constructor.assert_called_once_with(mock.sentinel.filename) + # Check that mocks were called as expected. + file_open.assert_called_once_with( + mock.sentinel.filename, 'r', encoding='utf-8') + constructor.assert_called_once_with(info) def test_from_service_account_json_bad_args(self): KLASS = self._get_target_class() @@ -221,3 +234,47 @@ def test_ctor_explicit_bytes(self): def test_ctor_explicit_unicode(self): PROJECT = u'PROJECT' self._explicit_ctor_helper(PROJECT) + + def _from_service_account_json_helper(self, project=None): + from google.cloud import _helpers + + klass = self._get_target_class() + + info = {'dummy': 'value', 'valid': 'json'} + if project is None: + expected_project = 'eye-d-of-project' + else: + expected_project = project + + info['project_id'] = expected_project + # Mock both the file opening and the credentials constructor. + json_fi = io.StringIO(_helpers._bytes_to_unicode(json.dumps(info))) + file_open_patch = mock.patch( + 'io.open', return_value=json_fi) + constructor_patch = mock.patch( + 'google.oauth2.service_account.Credentials.' + 'from_service_account_info', + return_value=_make_credentials()) + + with file_open_patch as file_open: + with constructor_patch as constructor: + kwargs = {} + if project is not None: + kwargs['project'] = project + client_obj = klass.from_service_account_json( + mock.sentinel.filename, **kwargs) + + self.assertIs( + client_obj._credentials, constructor.return_value) + self.assertIsNone(client_obj._http_internal) + self.assertEqual(client_obj.project, expected_project) + # Check that mocks were called as expected. + file_open.assert_called_once_with( + mock.sentinel.filename, 'r', encoding='utf-8') + constructor.assert_called_once_with(info) + + def test_from_service_account_json(self): + self._from_service_account_json_helper() + + def test_from_service_account_json_project_set(self): + self._from_service_account_json_helper(project='prah-jekt') From b3cdb189d295aa245a7ea94bfcf20ad957024718 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 7 Jun 2017 16:02:22 -0400 Subject: [PATCH 145/468] Remap new Gax conflict error code (#3443) * Add testing support for 'ALREADY_EXISTS' gRPC error code. * Cover both possible gRPC conflict error codes. Closes #3175. * Exercise conflict-on-create in systests for topic/sub/snap. --- packages/google-cloud-core/google/cloud/_testing.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/packages/google-cloud-core/google/cloud/_testing.py b/packages/google-cloud-core/google/cloud/_testing.py index f9d2b57fda52..a544fffc5fe4 100644 --- a/packages/google-cloud-core/google/cloud/_testing.py +++ b/packages/google-cloud-core/google/cloud/_testing.py @@ -95,6 +95,10 @@ def _make_grpc_failed_precondition(self): from grpc import StatusCode return self._make_grpc_error(StatusCode.FAILED_PRECONDITION) + def _make_grpc_already_exists(self): + from grpc import StatusCode + return self._make_grpc_error(StatusCode.ALREADY_EXISTS) + def _make_grpc_deadline_exceeded(self): from grpc import StatusCode return self._make_grpc_error(StatusCode.DEADLINE_EXCEEDED) From 7f2f49a5b9b911ef1079c27c9a35ce91cab567a0 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 21 Jun 2017 10:39:29 -0400 Subject: [PATCH 146/468] Fix broken link in the client Google Auth credentials help text (#3517) --- packages/google-cloud-core/google/cloud/client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/client.py b/packages/google-cloud-core/google/cloud/client.py index e7e43faf1e45..9bdbf507d201 100644 --- a/packages/google-cloud-core/google/cloud/client.py +++ b/packages/google-cloud-core/google/cloud/client.py @@ -29,8 +29,8 @@ _GOOGLE_AUTH_CREDENTIALS_HELP = ( 'This library only supports credentials from google-auth-library-python. ' - 'See https://google-cloud-python.readthedocs.io/en/latest/' - 'google-cloud-auth.html for help on authentication with this library.' + 'See https://google-cloud-python.readthedocs.io/en/latest/core/auth.html ' + 'for help on authentication with this library.' ) From 64e37c41d24b3208e25c172187cc71e700997605 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 22 Jun 2017 12:46:11 -0700 Subject: [PATCH 147/468] Add back pylint as info-only for core (#3515) --- packages/google-cloud-core/nox.py | 15 ++++++++----- packages/google-cloud-core/pylint.config.py | 25 +++++++++++++++++++++ 2 files changed, 35 insertions(+), 5 deletions(-) create mode 100644 packages/google-cloud-core/pylint.config.py diff --git a/packages/google-cloud-core/nox.py b/packages/google-cloud-core/nox.py index d941d60092b8..38268bcd2f90 100644 --- a/packages/google-cloud-core/nox.py +++ b/packages/google-cloud-core/nox.py @@ -14,8 +14,6 @@ from __future__ import absolute_import -import os - import nox @@ -43,15 +41,22 @@ def unit_tests(session, python_version): @nox.session def lint(session): - """Run flake8. + """Run linters. - Returns a failure if flake8 finds linting errors or sufficiently + Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ session.interpreter = 'python3.6' - session.install('flake8') + session.install('flake8', 'pylint', 'gcp-devrel-py-tools') session.install('.') session.run('flake8', 'google/cloud/core') + session.run( + 'gcp-devrel-py-tools', 'run-pylint', + '--config', 'pylint.config.py', + '--library-filesets', 'google', + '--test-filesets', 'tests', + # Temporarily allow this to fail. + success_codes=range(0, 100)) @nox.session diff --git a/packages/google-cloud-core/pylint.config.py b/packages/google-cloud-core/pylint.config.py new file mode 100644 index 000000000000..d8ca7b92e85e --- /dev/null +++ b/packages/google-cloud-core/pylint.config.py @@ -0,0 +1,25 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module is used to configure gcp-devrel-py-tools run-pylint.""" + +# Library configuration + +# library_additions = {} +# library_replacements = {} + +# Test configuration + +# test_additions = copy.deepcopy(library_additions) +# test_replacements = copy.deepcopy(library_replacements) From bf78c5d1c5cd80dd2f8886ba67632e3195e0b417 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 26 Jun 2017 17:28:58 -0400 Subject: [PATCH 148/468] Prep core-0.25.0 release. (#3526) --- packages/google-cloud-core/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-core/setup.py b/packages/google-cloud-core/setup.py index 3dfa13ef5284..5a2f43adf464 100644 --- a/packages/google-cloud-core/setup.py +++ b/packages/google-cloud-core/setup.py @@ -61,7 +61,7 @@ setup( name='google-cloud-core', - version='0.24.1', + version='0.25.0', description='API Client library for Google Cloud: Core Helpers', long_description=README, namespace_packages=[ From 287d068a0a3dee395fbd45b29b1255e68c3ac006 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 27 Jun 2017 10:32:30 -0700 Subject: [PATCH 149/468] Fix inclusion of tests in manifest.in (#3552) --- packages/google-cloud-core/MANIFEST.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-core/MANIFEST.in b/packages/google-cloud-core/MANIFEST.in index 24aa72fb370b..1fbc0d0b321e 100644 --- a/packages/google-cloud-core/MANIFEST.in +++ b/packages/google-cloud-core/MANIFEST.in @@ -1,3 +1,3 @@ include README.rst LICENSE -recursive-include unit_tests * +recursive-include tests * global-exclude *.pyc __pycache__ From f22bf9e63229e267cdf6b1758183fecf1d215ba1 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 28 Jun 2017 14:07:25 -0700 Subject: [PATCH 150/468] Making all LICENSE headers "uniform". (#3563) --- packages/google-cloud-core/pylint.config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-core/pylint.config.py b/packages/google-cloud-core/pylint.config.py index d8ca7b92e85e..b618319b8b61 100644 --- a/packages/google-cloud-core/pylint.config.py +++ b/packages/google-cloud-core/pylint.config.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, From edc6e31fe83f481f719e083d0d51e6f1bef2bae4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sean=20L=C3=B6fgren?= Date: Wed, 5 Jul 2017 18:00:40 +0100 Subject: [PATCH 151/468] Update doc reference for setting up a service account (#3578) --- packages/google-cloud-core/google/cloud/credentials.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-core/google/cloud/credentials.py b/packages/google-cloud-core/google/cloud/credentials.py index 52cba9b22fcc..6a1bf512f7a9 100644 --- a/packages/google-cloud-core/google/cloud/credentials.py +++ b/packages/google-cloud-core/google/cloud/credentials.py @@ -61,7 +61,8 @@ def _get_signed_query_params(credentials, expiration, string_to_sign): """ if not isinstance(credentials, google.auth.credentials.Signing): auth_uri = ('http://google-cloud-python.readthedocs.io/en/latest/' - 'google-cloud-auth.html#setting-up-a-service-account') + 'core/auth.html?highlight=authentication#setting-up-' + 'a-service-account') raise AttributeError('you need a private key to sign credentials.' 'the credentials you are currently using %s ' 'just contains a token. see %s for more ' From 02f4ac67035c4b44e0e96e08de9e2c9685f3ecab Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 6 Jul 2017 16:41:31 -0400 Subject: [PATCH 152/468] Shorten nox virtualenv names to avoid hashing. (#3585) --- packages/google-cloud-core/nox.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/google-cloud-core/nox.py b/packages/google-cloud-core/nox.py index 38268bcd2f90..c8f4a942e7a2 100644 --- a/packages/google-cloud-core/nox.py +++ b/packages/google-cloud-core/nox.py @@ -25,6 +25,9 @@ def unit_tests(session, python_version): # Run unit tests against all supported versions of Python. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'unit-' + python_version + # Install all test dependencies, then install this package in-place. session.install('mock', 'pytest', 'pytest-cov', 'grpcio >= 1.0.2') @@ -63,6 +66,10 @@ def lint(session): def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.interpreter = 'python3.6' + + # Set the virtualenv dirname. + session.virtualenv_dirname = 'setup' + session.install('docutils', 'Pygments') session.run( 'python', 'setup.py', 'check', '--restructuredtext', '--strict') From cb40d1d0a7700ac06a5840c6607acbf77c196a7b Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 11 Jul 2017 10:51:40 -0700 Subject: [PATCH 153/468] Updating author_email in all setup.py. (#3598) Done via: $ git grep -l author_email | \ > xargs sed -i s/jjg+google-cloud-python@google.com/googleapis-publisher@google.com/g and manually editing `videointelligence/setup.py` and `vision/setup.py`. --- packages/google-cloud-core/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-core/setup.py b/packages/google-cloud-core/setup.py index 5a2f43adf464..cd461c5f2526 100644 --- a/packages/google-cloud-core/setup.py +++ b/packages/google-cloud-core/setup.py @@ -27,7 +27,7 @@ # consolidate. SETUP_BASE = { 'author': 'Google Cloud Platform', - 'author_email': 'jjg+google-cloud-python@google.com', + 'author_email': 'googleapis-publisher@google.com', 'scripts': [], 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', 'license': 'Apache 2.0', From c211446a0d2e19856308d349400b7b69b7b52093 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Mon, 17 Jul 2017 14:27:51 -0700 Subject: [PATCH 154/468] Add base future package to google.cloud (#3616) --- packages/google-cloud-core/.coveragerc | 3 + .../google/cloud/_helpers.py | 23 +++ .../google/cloud/future/__init__.py | 21 +++ .../google/cloud/future/_helpers.py | 39 ++++ .../google/cloud/future/base.py | 175 ++++++++++++++++++ .../tests/unit/future/__init__.py | 0 .../tests/unit/future/test__helpers.py | 37 ++++ .../tests/unit/future/test_base.py | 145 +++++++++++++++ .../tests/unit/test__helpers.py | 29 +++ 9 files changed, 472 insertions(+) create mode 100644 packages/google-cloud-core/google/cloud/future/__init__.py create mode 100644 packages/google-cloud-core/google/cloud/future/_helpers.py create mode 100644 packages/google-cloud-core/google/cloud/future/base.py create mode 100644 packages/google-cloud-core/tests/unit/future/__init__.py create mode 100644 packages/google-cloud-core/tests/unit/future/test__helpers.py create mode 100644 packages/google-cloud-core/tests/unit/future/test_base.py diff --git a/packages/google-cloud-core/.coveragerc b/packages/google-cloud-core/.coveragerc index 9d89b1db5666..ce75f605a508 100644 --- a/packages/google-cloud-core/.coveragerc +++ b/packages/google-cloud-core/.coveragerc @@ -13,3 +13,6 @@ exclude_lines = pragma: NO COVER # Ignore debug-only repr def __repr__ + # Ignore abstract methods + raise NotImplementedError + raise NotImplementedError() diff --git a/packages/google-cloud-core/google/cloud/_helpers.py b/packages/google-cloud-core/google/cloud/_helpers.py index 2c2f08dcfb45..72918e064507 100644 --- a/packages/google-cloud-core/google/cloud/_helpers.py +++ b/packages/google-cloud-core/google/cloud/_helpers.py @@ -379,6 +379,29 @@ def _bytes_to_unicode(value): raise ValueError('%r could not be converted to unicode' % (value,)) +def _from_any_pb(pb_type, any_pb): + """Converts an Any protobuf to the specified message type + + Args: + pb_type (type): the type of the message that any_pb stores an instance + of. + any_pb (google.protobuf.any_pb2.Any): the object to be converted. + + Returns: + pb_type: An instance of the pb_type message. + + Raises: + TypeError: if the message could not be converted. + """ + msg = pb_type() + if not any_pb.Unpack(msg): + raise TypeError( + 'Could not convert {} to {}'.format( + any_pb.__class__.__name__, pb_type.__name__)) + + return msg + + def _pb_timestamp_to_datetime(timestamp_pb): """Convert a Timestamp protobuf to a datetime object. diff --git a/packages/google-cloud-core/google/cloud/future/__init__.py b/packages/google-cloud-core/google/cloud/future/__init__.py new file mode 100644 index 000000000000..e5cf2b20ce7e --- /dev/null +++ b/packages/google-cloud-core/google/cloud/future/__init__.py @@ -0,0 +1,21 @@ +# Copyright 2017, Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Futures for dealing with asynchronous operations.""" + +from google.cloud.future.base import Future + +__all__ = [ + 'Future', +] diff --git a/packages/google-cloud-core/google/cloud/future/_helpers.py b/packages/google-cloud-core/google/cloud/future/_helpers.py new file mode 100644 index 000000000000..933d0b8b2d44 --- /dev/null +++ b/packages/google-cloud-core/google/cloud/future/_helpers.py @@ -0,0 +1,39 @@ +# Copyright 2017, Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Private helpers for futures.""" + +import logging +import threading + + +_LOGGER = logging.getLogger(__name__) + + +def start_daemon_thread(*args, **kwargs): + """Starts a thread and marks it as a daemon thread.""" + thread = threading.Thread(*args, **kwargs) + thread.daemon = True + thread.start() + return thread + + +def safe_invoke_callback(callback, *args, **kwargs): + """Invoke a callback, swallowing and logging any exceptions.""" + # pylint: disable=bare-except + # We intentionally want to swallow all exceptions. + try: + return callback(*args, **kwargs) + except: + _LOGGER.exception('Error while executing Future callback.') diff --git a/packages/google-cloud-core/google/cloud/future/base.py b/packages/google-cloud-core/google/cloud/future/base.py new file mode 100644 index 000000000000..928269506b65 --- /dev/null +++ b/packages/google-cloud-core/google/cloud/future/base.py @@ -0,0 +1,175 @@ +# Copyright 2017, Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Abstract and helper bases for Future implementations.""" + +import abc + +import six + +from google.cloud.future import _helpers + + +@six.add_metaclass(abc.ABCMeta) +class Future(object): + # pylint: disable=missing-docstring + # We inherit the interfaces here from concurrent.futures. + + """Future interface. + + This interface is based on :class:`concurrent.futures.Future`. + """ + + @abc.abstractmethod + def cancel(self): + raise NotImplementedError() + + @abc.abstractmethod + def cancelled(self): + raise NotImplementedError() + + @abc.abstractmethod + def running(self): + raise NotImplementedError() + + @abc.abstractmethod + def done(self): + raise NotImplementedError() + + @abc.abstractmethod + def result(self, timeout=None): + raise NotImplementedError() + + @abc.abstractmethod + def exception(self, timeout=None): + raise NotImplementedError() + + @abc.abstractmethod + def add_done_callback(self, fn): + # pylint: disable=invalid-name + raise NotImplementedError() + + @abc.abstractmethod + def set_result(self, result): + raise NotImplementedError() + + @abc.abstractmethod + def set_exception(self, exception): + raise NotImplementedError() + + +class PollingFuture(Future): + """A Future that needs to poll some service to check its status. + + The private :meth:`_blocking_poll` method should be implemented by + subclasses. + + .. note: Privacy here is intended to prevent the final class from + overexposing, not to prevent subclasses from accessing methods. + """ + def __init__(self): + super(PollingFuture, self).__init__() + self._result = None + self._exception = None + self._result_set = False + """bool: Set to True when the result has been set via set_result or + set_exception.""" + self._polling_thread = None + self._done_callbacks = [] + + @abc.abstractmethod + def _blocking_poll(self, timeout=None): + """Poll and wait for the Future to be resolved. + + Args: + timeout (int): How long to wait for the operation to complete. + If None, wait indefinitely. + """ + # pylint: disable=missing-raises + raise NotImplementedError() + + def result(self, timeout=None): + """Get the result of the operation, blocking if necessary. + + Args: + timeout (int): How long to wait for the operation to complete. + If None, wait indefinitely. + + Returns: + google.protobuf.Message: The Operation's result. + + Raises: + google.gax.GaxError: If the operation errors or if the timeout is + reached before the operation completes. + """ + self._blocking_poll() + + if self._exception is not None: + # pylint: disable=raising-bad-type + # Pylint doesn't recognize that this is valid in this case. + raise self._exception + + return self._result + + def exception(self, timeout=None): + """Get the exception from the operation, blocking if necessary. + + Args: + timeout (int): How long to wait for the operation to complete. + If None, wait indefinitely. + + Returns: + Optional[google.gax.GaxError]: The operation's error. + """ + self._blocking_poll() + return self._exception + + def add_done_callback(self, fn): + """Add a callback to be executed when the operation is complete. + + If the operation is not already complete, this will start a helper + thread to poll for the status of the operation in the background. + + Args: + fn (Callable[Future]): The callback to execute when the operation + is complete. + """ + if self._result_set: + _helpers.safe_invoke_callback(fn, self) + return + + self._done_callbacks.append(fn) + + if self._polling_thread is None: + # The polling thread will exit on its own as soon as the operation + # is done. + self._polling_thread = _helpers.start_daemon_thread( + target=self._blocking_poll) + + def _invoke_callbacks(self, *args, **kwargs): + """Invoke all done callbacks.""" + for callback in self._done_callbacks: + _helpers.safe_invoke_callback(callback, *args, **kwargs) + + def set_result(self, result): + """Set the Future's result.""" + self._result = result + self._result_set = True + self._invoke_callbacks(self) + + def set_exception(self, exception): + """Set the Future's exception.""" + self._exception = exception + self._result_set = True + self._invoke_callbacks(self) diff --git a/packages/google-cloud-core/tests/unit/future/__init__.py b/packages/google-cloud-core/tests/unit/future/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-core/tests/unit/future/test__helpers.py b/packages/google-cloud-core/tests/unit/future/test__helpers.py new file mode 100644 index 000000000000..cbca5ba4d4df --- /dev/null +++ b/packages/google-cloud-core/tests/unit/future/test__helpers.py @@ -0,0 +1,37 @@ +# Copyright 2017, Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import mock + +from google.cloud.future import _helpers + + +@mock.patch('threading.Thread', autospec=True) +def test_start_deamon_thread(unused_thread): + deamon_thread = _helpers.start_daemon_thread(target=mock.sentinel.target) + assert deamon_thread.daemon is True + + +def test_safe_invoke_callback(): + callback = mock.Mock(spec=['__call__'], return_value=42) + result = _helpers.safe_invoke_callback(callback, 'a', b='c') + assert result == 42 + callback.assert_called_once_with('a', b='c') + + +def test_safe_invoke_callback_exception(): + callback = mock.Mock(spec=['__call__'], side_effect=ValueError()) + result = _helpers.safe_invoke_callback(callback, 'a', b='c') + assert result is None + callback.assert_called_once_with('a', b='c') diff --git a/packages/google-cloud-core/tests/unit/future/test_base.py b/packages/google-cloud-core/tests/unit/future/test_base.py new file mode 100644 index 000000000000..f10c10b24fb4 --- /dev/null +++ b/packages/google-cloud-core/tests/unit/future/test_base.py @@ -0,0 +1,145 @@ +# Copyright 2017, Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import threading + +import mock +import pytest + +from google.cloud.future import base + + +class PollingFutureImpl(base.PollingFuture): + def _blocking_poll(self, timeout=None): # pragma: NO COVER + pass + + def cancel(self): + return True + + def cancelled(self): + return False + + def done(self): + return False + + def running(self): + return True + + +def test_polling_future_constructor(): + future = PollingFutureImpl() + assert not future.done() + assert not future.cancelled() + assert future.running() + assert future.cancel() + + +def test_set_result(): + future = PollingFutureImpl() + callback = mock.Mock() + + future.set_result(1) + + assert future.result() == 1 + future.add_done_callback(callback) + callback.assert_called_once_with(future) + + +def test_set_exception(): + future = PollingFutureImpl() + exception = ValueError('meep') + + future.set_exception(exception) + + assert future.exception() == exception + with pytest.raises(ValueError): + future.result() + + callback = mock.Mock() + future.add_done_callback(callback) + callback.assert_called_once_with(future) + + +def test_invoke_callback_exception(): + future = PollingFutureImplWithPoll() + future.set_result(42) + + # This should not raise, despite the callback causing an exception. + callback = mock.Mock(side_effect=ValueError) + future.add_done_callback(callback) + callback.assert_called_once_with(future) + + +class PollingFutureImplWithPoll(PollingFutureImpl): + def __init__(self): + super(PollingFutureImplWithPoll, self).__init__() + self.poll_count = 0 + self.event = threading.Event() + + def _blocking_poll(self, timeout=None): + if self._result_set: + return + + self.poll_count += 1 + self.event.wait() + self.set_result(42) + + +def test_result_with_polling(): + future = PollingFutureImplWithPoll() + + future.event.set() + result = future.result() + + assert result == 42 + assert future.poll_count == 1 + # Repeated calls should not cause additional polling + assert future.result() == result + assert future.poll_count == 1 + + +def test_callback_background_thread(): + future = PollingFutureImplWithPoll() + callback = mock.Mock() + + future.add_done_callback(callback) + + assert future._polling_thread is not None + assert future.poll_count == 1 + + future.event.set() + future._polling_thread.join() + + callback.assert_called_once_with(future) + + +def test_double_callback_background_thread(): + future = PollingFutureImplWithPoll() + callback = mock.Mock() + callback2 = mock.Mock() + + future.add_done_callback(callback) + current_thread = future._polling_thread + assert current_thread is not None + + # only one polling thread should be created. + future.add_done_callback(callback2) + assert future._polling_thread is current_thread + + future.event.set() + future._polling_thread.join() + + assert future.poll_count == 1 + callback.assert_called_once_with(future) + callback2.assert_called_once_with(future) diff --git a/packages/google-cloud-core/tests/unit/test__helpers.py b/packages/google-cloud-core/tests/unit/test__helpers.py index fcd47f7535bc..f7ba1b2c109f 100644 --- a/packages/google-cloud-core/tests/unit/test__helpers.py +++ b/packages/google-cloud-core/tests/unit/test__helpers.py @@ -554,6 +554,35 @@ def test_it(self): self.assertEqual(self._call_fut(timestamp), dt_stamp) +class Test__from_any_pb(unittest.TestCase): + + def _call_fut(self, pb_type, any_pb): + from google.cloud._helpers import _from_any_pb + + return _from_any_pb(pb_type, any_pb) + + def test_success(self): + from google.protobuf import any_pb2 + from google.type import date_pb2 + + in_message = date_pb2.Date(year=1990) + in_message_any = any_pb2.Any() + in_message_any.Pack(in_message) + out_message = self._call_fut(date_pb2.Date, in_message_any) + self.assertEqual(in_message, out_message) + + def test_failure(self, ): + from google.protobuf import any_pb2 + from google.type import date_pb2 + from google.type import timeofday_pb2 + + in_message = any_pb2.Any() + in_message.Pack(date_pb2.Date(year=1990)) + + with self.assertRaises(TypeError): + self._call_fut(timeofday_pb2.TimeOfDay, in_message) + + class Test__pb_timestamp_to_rfc3339(unittest.TestCase): def _call_fut(self, timestamp): From a0b84ef7dd026c8ae1403c5719a1d56709f7cc99 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 19 Jul 2017 09:41:18 -0700 Subject: [PATCH 155/468] Add operation future (#3618) --- .../google/cloud/future/base.py | 51 +++- .../google/cloud/future/operation.py | 247 ++++++++++++++++++ packages/google-cloud-core/setup.py | 1 + .../tests/unit/future/test_base.py | 30 ++- .../tests/unit/future/test_operation.py | 207 +++++++++++++++ 5 files changed, 522 insertions(+), 14 deletions(-) create mode 100644 packages/google-cloud-core/google/cloud/future/operation.py create mode 100644 packages/google-cloud-core/tests/unit/future/test_operation.py diff --git a/packages/google-cloud-core/google/cloud/future/base.py b/packages/google-cloud-core/google/cloud/future/base.py index 928269506b65..aed1dfd80e5d 100644 --- a/packages/google-cloud-core/google/cloud/future/base.py +++ b/packages/google-cloud-core/google/cloud/future/base.py @@ -15,8 +15,12 @@ """Abstract and helper bases for Future implementations.""" import abc +import concurrent.futures +import functools +import operator import six +import tenacity from google.cloud.future import _helpers @@ -72,8 +76,8 @@ def set_exception(self, exception): class PollingFuture(Future): """A Future that needs to poll some service to check its status. - The private :meth:`_blocking_poll` method should be implemented by - subclasses. + The :meth:`done` method should be implemented by subclasses. The polling + behavior will repeatedly call ``done`` until it returns True. .. note: Privacy here is intended to prevent the final class from overexposing, not to prevent subclasses from accessing methods. @@ -89,6 +93,19 @@ def __init__(self): self._done_callbacks = [] @abc.abstractmethod + def done(self): + """Checks to see if the operation is complete. + + Returns: + bool: True if the operation is complete, False otherwise. + """ + # pylint: disable=redundant-returns-doc, missing-raises-doc + raise NotImplementedError() + + def running(self): + """True if the operation is currently running.""" + return not self.done() + def _blocking_poll(self, timeout=None): """Poll and wait for the Future to be resolved. @@ -96,8 +113,32 @@ def _blocking_poll(self, timeout=None): timeout (int): How long to wait for the operation to complete. If None, wait indefinitely. """ - # pylint: disable=missing-raises - raise NotImplementedError() + if self._result_set: + return + + retry_on = tenacity.retry_if_result( + functools.partial(operator.is_not, True)) + # Use exponential backoff with jitter. + wait_on = ( + tenacity.wait_exponential(multiplier=1, max=10) + + tenacity.wait_random(0, 1)) + + if timeout is None: + retry = tenacity.retry(retry=retry_on, wait=wait_on) + else: + retry = tenacity.retry( + retry=retry_on, + wait=wait_on, + stop=tenacity.stop_after_delay(timeout)) + + try: + retry(self.done)() + except tenacity.RetryError as exc: + six.raise_from( + concurrent.futures.TimeoutError( + 'Operation did not complete within the designated ' + 'timeout.'), + exc) def result(self, timeout=None): """Get the result of the operation, blocking if necessary. @@ -113,7 +154,7 @@ def result(self, timeout=None): google.gax.GaxError: If the operation errors or if the timeout is reached before the operation completes. """ - self._blocking_poll() + self._blocking_poll(timeout=timeout) if self._exception is not None: # pylint: disable=raising-bad-type diff --git a/packages/google-cloud-core/google/cloud/future/operation.py b/packages/google-cloud-core/google/cloud/future/operation.py new file mode 100644 index 000000000000..5bbfda1a8f0b --- /dev/null +++ b/packages/google-cloud-core/google/cloud/future/operation.py @@ -0,0 +1,247 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Futures for long-running operations returned from Google Cloud APIs.""" + +import functools +import threading + +from google.longrunning import operations_pb2 +from google.protobuf import json_format +from google.rpc import code_pb2 + +from google.cloud import _helpers +from google.cloud import exceptions +from google.cloud.future import base + + +class Operation(base.PollingFuture): + """A Future for interacting with a Google API Long-Running Operation. + + Args: + operation (google.longrunning.operations_pb2.Operation): The + initial operation. + refresh (Callable[[], Operation]): A callable that returns the + latest state of the operation. + cancel (Callable[[], None]), A callable that tries to cancel + the operation. + result_type (type): The protobuf type for the operation's result. + metadata_type (type): The protobuf type for the operation's + metadata. + """ + + def __init__( + self, operation, refresh, cancel, + result_type, metadata_type=None): + super(Operation, self).__init__() + self._operation = operation + self._refresh = refresh + self._cancel = cancel + self._result_type = result_type + self._metadata_type = metadata_type + self._completion_lock = threading.Lock() + # Invoke this in case the operation came back already complete. + self._set_result_from_operation() + + @property + def operation(self): + """google.longrunning.Operation: The current long-running operation.""" + return self._operation + + @property + def metadata(self): + """google.protobuf.Message: the current operation metadata.""" + if not self._operation.HasField('metadata'): + return None + + return _helpers._from_any_pb( + self._metadata_type, self._operation.metadata) + + def _set_result_from_operation(self): + """Set the result or exception from the operation if it is complete.""" + # This must be done in a lock to prevent the polling thread + # and main thread from both executing the completion logic + # at the same time. + with self._completion_lock: + # If the operation isn't complete or if the result has already been + # set, do not call set_result/set_exception again. + # Note: self._result_set is set to True in set_result and + # set_exception, in case those methods are invoked directly. + if not self._operation.done or self._result_set: + return + + if self._operation.HasField('response'): + response = _helpers._from_any_pb( + self._result_type, self._operation.response) + self.set_result(response) + elif self._operation.HasField('error'): + exception = exceptions.GoogleCloudError( + self._operation.error.message, + errors=(self._operation.error)) + self.set_exception(exception) + else: + exception = exceptions.GoogleCloudError( + 'Unexpected state: Long-running operation had neither ' + 'response nor error set.') + self.set_exception(exception) + + def _refresh_and_update(self): + """Refresh the operation and update the result if needed.""" + # If the currently cached operation is done, no need to make another + # RPC as it will not change once done. + if not self._operation.done: + self._operation = self._refresh() + self._set_result_from_operation() + + def done(self): + """Checks to see if the operation is complete. + + Returns: + bool: True if the operation is complete, False otherwise. + """ + self._refresh_and_update() + return self._operation.done + + def cancel(self): + """Attempt to cancel the operation. + + Returns: + bool: True if the cancel RPC was made, False if the operation is + already complete. + """ + if self.done(): + return False + + self._cancel() + return True + + def cancelled(self): + """True if the operation was cancelled.""" + self._refresh_and_update() + return (self._operation.HasField('error') and + self._operation.error.code == code_pb2.CANCELLED) + + +def _refresh_http(api_request, operation_name): + """Refresh an operation using a JSON/HTTP client. + + Args: + api_request (Callable): A callable used to make an API request. This + should generally be + :meth:`google.cloud._http.Connection.api_request`. + operation_name (str): The name of the operation. + + Returns: + google.longrunning.operations_pb2.Operation: The operation. + """ + path = 'operations/{}'.format(operation_name) + api_response = api_request(method='GET', path=path) + return json_format.ParseDict( + api_response, operations_pb2.Operation()) + + +def _cancel_http(api_request, operation_name): + """Cancel an operation using a JSON/HTTP client. + + Args: + api_request (Callable): A callable used to make an API request. This + should generally be + :meth:`google.cloud._http.Connection.api_request`. + operation_name (str): The name of the operation. + """ + path = 'operations/{}:cancel'.format(operation_name) + api_request(method='POST', path=path) + + +def from_http_json(operation, api_request, result_type, **kwargs): + """Create an operation future from using a HTTP/JSON client. + + This interacts with the long-running operations `service`_ (specific + to a given API) vis `HTTP/JSON`_. + + .. _HTTP/JSON: https://cloud.google.com/speech/reference/rest/\ + v1beta1/operations#Operation + + Args: + operation (dict): Operation as a dictionary. + api_request (Callable): A callable used to make an API request. This + should generally be + :meth:`google.cloud._http.Connection.api_request`. + result_type (type): The protobuf result type. + kwargs: Keyword args passed into the :class:`Operation` constructor. + + Returns: + Operation: The operation future to track the given operation. + """ + operation_proto = json_format.ParseDict( + operation, operations_pb2.Operation()) + refresh = functools.partial( + _refresh_http, api_request, operation_proto.name) + cancel = functools.partial( + _cancel_http, api_request, operation_proto.name) + return Operation(operation_proto, refresh, cancel, result_type, **kwargs) + + +def _refresh_grpc(operations_stub, operation_name): + """Refresh an operation using a gRPC client. + + Args: + operations_stub (google.longrunning.operations_pb2.OperationsStub): + The gRPC operations stub. + operation_name (str): The name of the operation. + + Returns: + google.longrunning.operations_pb2.Operation: The operation. + """ + request_pb = operations_pb2.GetOperationRequest(name=operation_name) + return operations_stub.GetOperation(request_pb) + + +def _cancel_grpc(operations_stub, operation_name): + """Cancel an operation using a gRPC client. + + Args: + operations_stub (google.longrunning.operations_pb2.OperationsStub): + The gRPC operations stub. + operation_name (str): The name of the operation. + """ + request_pb = operations_pb2.CancelOperationRequest(name=operation_name) + operations_stub.CancelOperation(request_pb) + + +def from_grpc(operation, operations_stub, result_type, **kwargs): + """Create an operation future from using a gRPC client. + + This interacts with the long-running operations `service`_ (specific + to a given API) via gRPC. + + .. _service: https://github.com/googleapis/googleapis/blob/\ + 050400df0fdb16f63b63e9dee53819044bffc857/\ + google/longrunning/operations.proto#L38 + + Args: + operation (google.longrunning.operations_pb2.Operation): The operation. + operations_stub (google.longrunning.operations_pb2.OperationsStub): + The operations stub. + result_type (type): The protobuf result type. + kwargs: Keyword args passed into the :class:`Operation` constructor. + + Returns: + Operation: The operation future to track the given operation. + """ + refresh = functools.partial( + _refresh_grpc, operations_stub, operation.name) + cancel = functools.partial( + _cancel_grpc, operations_stub, operation.name) + return Operation(operation, refresh, cancel, result_type, **kwargs) diff --git a/packages/google-cloud-core/setup.py b/packages/google-cloud-core/setup.py index cd461c5f2526..ba84f2347d18 100644 --- a/packages/google-cloud-core/setup.py +++ b/packages/google-cloud-core/setup.py @@ -57,6 +57,7 @@ 'google-auth >= 0.4.0, < 2.0.0dev', 'google-auth-httplib2', 'six', + 'tenacity >= 4.0.0, <5.0.0dev' ] setup( diff --git a/packages/google-cloud-core/tests/unit/future/test_base.py b/packages/google-cloud-core/tests/unit/future/test_base.py index f10c10b24fb4..69a0348e68d9 100644 --- a/packages/google-cloud-core/tests/unit/future/test_base.py +++ b/packages/google-cloud-core/tests/unit/future/test_base.py @@ -12,7 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +import concurrent.futures import threading +import time import mock import pytest @@ -21,8 +23,8 @@ class PollingFutureImpl(base.PollingFuture): - def _blocking_poll(self, timeout=None): # pragma: NO COVER - pass + def done(self): + return False def cancel(self): return True @@ -30,9 +32,6 @@ def cancel(self): def cancelled(self): return False - def done(self): - return False - def running(self): return True @@ -87,13 +86,11 @@ def __init__(self): self.poll_count = 0 self.event = threading.Event() - def _blocking_poll(self, timeout=None): - if self._result_set: - return - + def done(self): self.poll_count += 1 self.event.wait() self.set_result(42) + return True def test_result_with_polling(): @@ -109,6 +106,18 @@ def test_result_with_polling(): assert future.poll_count == 1 +class PollingFutureImplTimeout(PollingFutureImplWithPoll): + def done(self): + time.sleep(1) + return False + + +def test_result_timeout(): + future = PollingFutureImplTimeout() + with pytest.raises(concurrent.futures.TimeoutError): + future.result(timeout=1) + + def test_callback_background_thread(): future = PollingFutureImplWithPoll() callback = mock.Mock() @@ -116,6 +125,9 @@ def test_callback_background_thread(): future.add_done_callback(callback) assert future._polling_thread is not None + + # Give the thread a second to poll + time.sleep(1) assert future.poll_count == 1 future.event.set() diff --git a/packages/google-cloud-core/tests/unit/future/test_operation.py b/packages/google-cloud-core/tests/unit/future/test_operation.py new file mode 100644 index 000000000000..0e29aa687ee6 --- /dev/null +++ b/packages/google-cloud-core/tests/unit/future/test_operation.py @@ -0,0 +1,207 @@ +# Copyright 2017, Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import mock + +from google.cloud.future import operation +from google.longrunning import operations_pb2 +from google.protobuf import struct_pb2 +from google.rpc import code_pb2 +from google.rpc import status_pb2 + +TEST_OPERATION_NAME = 'test/operation' + + +def make_operation_proto( + name=TEST_OPERATION_NAME, metadata=None, response=None, + error=None, **kwargs): + operation_proto = operations_pb2.Operation( + name=name, **kwargs) + + if metadata is not None: + operation_proto.metadata.Pack(metadata) + + if response is not None: + operation_proto.response.Pack(response) + + if error is not None: + operation_proto.error.CopyFrom(error) + + return operation_proto + + +def make_operation_future(client_operations_responses=None): + if client_operations_responses is None: + client_operations_responses = [make_operation_proto()] + + refresh = mock.Mock( + spec=['__call__'], side_effect=client_operations_responses) + refresh.responses = client_operations_responses + cancel = mock.Mock(spec=['__call__']) + operation_future = operation.Operation( + client_operations_responses[0], + refresh, + cancel, + result_type=struct_pb2.Struct, + metadata_type=struct_pb2.Struct) + + return operation_future, refresh, cancel + + +def test_constructor(): + future, refresh, cancel = make_operation_future() + + assert future.operation == refresh.responses[0] + assert future.operation.done is False + assert future.operation.name == TEST_OPERATION_NAME + assert future.metadata is None + assert future.running() + + +def test_metadata(): + expected_metadata = struct_pb2.Struct() + future, _, _ = make_operation_future( + [make_operation_proto(metadata=expected_metadata)]) + + assert future.metadata == expected_metadata + + +def test_cancellation(): + responses = [ + make_operation_proto(), + # Second response indicates that the operation was cancelled. + make_operation_proto( + done=True, + error=status_pb2.Status(code=code_pb2.CANCELLED))] + future, _, cancel = make_operation_future(responses) + + assert future.cancel() + assert future.cancelled() + cancel.assert_called_once_with() + + # Cancelling twice should have no effect. + assert not future.cancel() + cancel.assert_called_once_with() + + +def test_result(): + expected_result = struct_pb2.Struct() + responses = [ + make_operation_proto(), + # Second operation response includes the result. + make_operation_proto(done=True, response=expected_result)] + future, _, _ = make_operation_future(responses) + + result = future.result() + + assert result == expected_result + assert future.done() + + +def test_exception(): + expected_exception = status_pb2.Status(message='meep') + responses = [ + make_operation_proto(), + # Second operation response includes the error. + make_operation_proto(done=True, error=expected_exception)] + future, _, _ = make_operation_future(responses) + + exception = future.exception() + + assert expected_exception.message in '{!r}'.format(exception) + + +def test_unexpected_result(): + responses = [ + make_operation_proto(), + # Second operation response is done, but has not error or response. + make_operation_proto(done=True)] + future, _, _ = make_operation_future(responses) + + exception = future.exception() + + assert 'Unexpected state' in '{!r}'.format(exception) + + +def test__refresh_http(): + api_request = mock.Mock( + return_value={'name': TEST_OPERATION_NAME, 'done': True}) + + result = operation._refresh_http(api_request, TEST_OPERATION_NAME) + + assert result.name == TEST_OPERATION_NAME + assert result.done is True + api_request.assert_called_once_with( + method='GET', path='operations/{}'.format(TEST_OPERATION_NAME)) + + +def test__cancel_http(): + api_request = mock.Mock() + + operation._cancel_http(api_request, TEST_OPERATION_NAME) + + api_request.assert_called_once_with( + method='POST', path='operations/{}:cancel'.format(TEST_OPERATION_NAME)) + + +def test_from_http_json(): + operation_json = {'name': TEST_OPERATION_NAME, 'done': True} + api_request = mock.sentinel.api_request + + future = operation.from_http_json( + operation_json, api_request, struct_pb2.Struct, + metadata_type=struct_pb2.Struct) + + assert future._result_type == struct_pb2.Struct + assert future._metadata_type == struct_pb2.Struct + assert future.operation.name == TEST_OPERATION_NAME + assert future.done + + +def test__refresh_grpc(): + operations_stub = mock.Mock(spec=['GetOperation']) + expected_result = make_operation_proto(done=True) + operations_stub.GetOperation.return_value = expected_result + + result = operation._refresh_grpc(operations_stub, TEST_OPERATION_NAME) + + assert result == expected_result + expected_request = operations_pb2.GetOperationRequest( + name=TEST_OPERATION_NAME) + operations_stub.GetOperation.assert_called_once_with(expected_request) + + +def test__cancel_grpc(): + operations_stub = mock.Mock(spec=['CancelOperation']) + + operation._cancel_grpc(operations_stub, TEST_OPERATION_NAME) + + expected_request = operations_pb2.CancelOperationRequest( + name=TEST_OPERATION_NAME) + operations_stub.CancelOperation.assert_called_once_with(expected_request) + + +def test_from_grpc(): + operation_proto = make_operation_proto(done=True) + operations_stub = mock.sentinel.operations_stub + + future = operation.from_grpc( + operation_proto, operations_stub, struct_pb2.Struct, + metadata_type=struct_pb2.Struct) + + assert future._result_type == struct_pb2.Struct + assert future._metadata_type == struct_pb2.Struct + assert future.operation.name == TEST_OPERATION_NAME + assert future.done From c3676336c4210c73ff1d40d9f83c8580f92dfdba Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 19 Jul 2017 14:58:17 -0700 Subject: [PATCH 156/468] Fixing references to "dead" docs links. (#3631) * Fixing references to "dead" docs links. Done via: $ git grep -l 'google-cloud-auth.html' | \ > xargs sed -i s/'google-cloud-auth.html'/'core\/auth.html'/g $ git grep -l 'http\:\/\/google-cloud-python.readthedocs.io' | \ > xargs sed -i s/'http\:\/\/google-cloud-python.readthedocs.io'/\ > 'https\:\/\/google-cloud-python.readthedocs.io'/g Fixes #3531. * Fixing up other docs that were moved in #3459. --- packages/google-cloud-core/README.rst | 2 +- packages/google-cloud-core/google/cloud/credentials.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-core/README.rst b/packages/google-cloud-core/README.rst index 5088505addc7..e9e7e19278ce 100644 --- a/packages/google-cloud-core/README.rst +++ b/packages/google-cloud-core/README.rst @@ -9,7 +9,7 @@ used by all of the ``google-cloud-*``. - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/google-cloud-api.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/core/modules.html Quick Start ----------- diff --git a/packages/google-cloud-core/google/cloud/credentials.py b/packages/google-cloud-core/google/cloud/credentials.py index 6a1bf512f7a9..e5fe30245ea5 100644 --- a/packages/google-cloud-core/google/cloud/credentials.py +++ b/packages/google-cloud-core/google/cloud/credentials.py @@ -60,7 +60,7 @@ def _get_signed_query_params(credentials, expiration, string_to_sign): signed payload. """ if not isinstance(credentials, google.auth.credentials.Signing): - auth_uri = ('http://google-cloud-python.readthedocs.io/en/latest/' + auth_uri = ('https://google-cloud-python.readthedocs.io/en/latest/' 'core/auth.html?highlight=authentication#setting-up-' 'a-service-account') raise AttributeError('you need a private key to sign credentials.' From d09da1b2cbd24d560f616b65681185786d4a343f Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 20 Jul 2017 09:33:21 -0700 Subject: [PATCH 157/468] Changing all pypi.python.org links to warehouse links. (#3641) Done via $ export OLD='https\:\/\/pypi.python.org\/pypi\/' $ export NEW='https\:\/\/pypi.org\/project\/' $ git grep -l ${OLD} | xargs sed -i s/${OLD}/${NEW}/g Then manually going through and adding a trailing slash to all warehouse links. (Though I did undo changes to `docs/json/`.) --- packages/google-cloud-core/README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-core/README.rst b/packages/google-cloud-core/README.rst index e9e7e19278ce..53cbd311a50e 100644 --- a/packages/google-cloud-core/README.rst +++ b/packages/google-cloud-core/README.rst @@ -19,6 +19,6 @@ Quick Start $ pip install --upgrade google-cloud-core .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-core.svg - :target: https://pypi.python.org/pypi/google-cloud-core + :target: https://pypi.org/project/google-cloud-core/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-core.svg - :target: https://pypi.python.org/pypi/google-cloud-core + :target: https://pypi.org/project/google-cloud-core/ From 740bebec9e868cfd523470aeeb646759a5f517a5 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 21 Jul 2017 16:20:38 -0700 Subject: [PATCH 158/468] Removing vendored in google.cloud.streaming. (#3654) * Removing vendored in google.cloud.streaming. * Modifying setup.cfg so pytest errors are sane. This is **not** to be merged, just to debug the b0rken build: https://circleci.com/gh/GoogleCloudPlatform/google-cloud-python/2515 --- .../google/cloud/streaming/__init__.py | 17 - .../google/cloud/streaming/buffered_stream.py | 106 - .../google/cloud/streaming/exceptions.py | 122 - .../google/cloud/streaming/http_wrapper.py | 396 ---- .../google/cloud/streaming/stream_slice.py | 87 - .../google/cloud/streaming/transfer.py | 1223 ---------- .../google/cloud/streaming/util.py | 74 - .../tests/unit/streaming/__init__.py | 13 - .../unit/streaming/test_buffered_stream.py | 141 -- .../tests/unit/streaming/test_exceptions.py | 105 - .../tests/unit/streaming/test_http_wrapper.py | 498 ---- .../tests/unit/streaming/test_stream_slice.py | 90 - .../tests/unit/streaming/test_transfer.py | 2035 ----------------- .../tests/unit/streaming/test_util.py | 66 - 14 files changed, 4973 deletions(-) delete mode 100644 packages/google-cloud-core/google/cloud/streaming/__init__.py delete mode 100644 packages/google-cloud-core/google/cloud/streaming/buffered_stream.py delete mode 100644 packages/google-cloud-core/google/cloud/streaming/exceptions.py delete mode 100644 packages/google-cloud-core/google/cloud/streaming/http_wrapper.py delete mode 100644 packages/google-cloud-core/google/cloud/streaming/stream_slice.py delete mode 100644 packages/google-cloud-core/google/cloud/streaming/transfer.py delete mode 100644 packages/google-cloud-core/google/cloud/streaming/util.py delete mode 100644 packages/google-cloud-core/tests/unit/streaming/__init__.py delete mode 100644 packages/google-cloud-core/tests/unit/streaming/test_buffered_stream.py delete mode 100644 packages/google-cloud-core/tests/unit/streaming/test_exceptions.py delete mode 100644 packages/google-cloud-core/tests/unit/streaming/test_http_wrapper.py delete mode 100644 packages/google-cloud-core/tests/unit/streaming/test_stream_slice.py delete mode 100644 packages/google-cloud-core/tests/unit/streaming/test_transfer.py delete mode 100644 packages/google-cloud-core/tests/unit/streaming/test_util.py diff --git a/packages/google-cloud-core/google/cloud/streaming/__init__.py b/packages/google-cloud-core/google/cloud/streaming/__init__.py deleted file mode 100644 index 44e00907cb66..000000000000 --- a/packages/google-cloud-core/google/cloud/streaming/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Vendored-in from google-apitools 0.4.11 - -"""Base ``google.cloud.streaming`` package.""" diff --git a/packages/google-cloud-core/google/cloud/streaming/buffered_stream.py b/packages/google-cloud-core/google/cloud/streaming/buffered_stream.py deleted file mode 100644 index 24a52176cb66..000000000000 --- a/packages/google-cloud-core/google/cloud/streaming/buffered_stream.py +++ /dev/null @@ -1,106 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Small helper class to provide a small slice of a stream. - -This class reads ahead to detect if we are at the end of the stream. -""" - - -class BufferedStream(object): - """Buffers a stream, reading ahead to determine if we're at the end. - - :type stream: readable file-like object - :param stream: the stream to be buffered - - :type start: int - :param start: the starting point in the stream - - :type size: int - :param size: the size of the buffer - """ - def __init__(self, stream, start, size): - self._stream = stream - self._start_pos = start - self._buffer_pos = 0 - - if not hasattr(self._stream, 'closed') or not self._stream.closed: - self._buffered_data = self._stream.read(size) - else: - self._buffered_data = b'' - - self._stream_at_end = len(self._buffered_data) < size - self._end_pos = self._start_pos + len(self._buffered_data) - - def __repr__(self): - return ('Buffered stream %s from position %s-%s with %s ' - 'bytes remaining' % (self._stream, self._start_pos, - self._end_pos, self._bytes_remaining)) - - def __len__(self): - return len(self._buffered_data) - - @property - def stream_exhausted(self): - """Does the stream have bytes remaining beyond the buffer - - :rtype: bool - :returns: Boolean indicating if the stream is exhausted. - """ - return self._stream_at_end - - @property - def stream_end_position(self): - """Point to which stream was read into the buffer - - :rtype: int - :returns: The end-position of the stream. - """ - return self._end_pos - - @property - def _bytes_remaining(self): - """Bytes remaining to be read from the buffer - - :rtype: int - :returns: The number of bytes remaining. - """ - return len(self._buffered_data) - self._buffer_pos - - def read(self, size=None): - """Read bytes from the buffer. - - :type size: int - :param size: - (Optional) How many bytes to read (defaults to all remaining - bytes). - - :rtype: str - :returns: The data read from the stream. - """ - if size is None or size < 0: - raise ValueError( - 'Illegal read of size %s requested on BufferedStream. ' - 'Wrapped stream %s is at position %s-%s, ' - '%s bytes remaining.' % - (size, self._stream, self._start_pos, self._end_pos, - self._bytes_remaining)) - - if not self._bytes_remaining: - return b'' - - size = min(size, self._bytes_remaining) - data = self._buffered_data[self._buffer_pos:self._buffer_pos + size] - self._buffer_pos += size - return data diff --git a/packages/google-cloud-core/google/cloud/streaming/exceptions.py b/packages/google-cloud-core/google/cloud/streaming/exceptions.py deleted file mode 100644 index cfeb8f8fa41f..000000000000 --- a/packages/google-cloud-core/google/cloud/streaming/exceptions.py +++ /dev/null @@ -1,122 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Exceptions for generated client libraries.""" - - -class Error(Exception): - """Base class for all exceptions.""" - - -class CommunicationError(Error): - """Any communication error talking to an API server.""" - - -class HttpError(CommunicationError): - """Error making a request. Soon to be HttpError. - - :type response: dict - :param response: headers from the response which returned the error - - :type content: bytes - :param content: payload of the response which returned the error - - :type url: str - :param url: URL of the response which returned the error - """ - def __init__(self, response, content, url): - super(HttpError, self).__init__() - self.response = response - self.content = content - self.url = url - - def __str__(self): - content = self.content.decode('ascii', 'replace') - return 'HttpError accessing <%s>: response: <%s>, content <%s>' % ( - self.url, self.response, content) - - @property - def status_code(self): - """Status code for the response. - - :rtype: int - :returns: the code - """ - return int(self.response['status']) - - @classmethod - def from_response(cls, http_response): - """Factory: construct an exception from a response. - - :type http_response: :class:`~.http_wrapper.Response` - :param http_response: the response which returned the error - - :rtype: :class:`HttpError` - :returns: The error created from the response. - """ - return cls(http_response.info, http_response.content, - http_response.request_url) - - -class TransferError(CommunicationError): - """Errors related to transfers.""" - - -class TransferRetryError(TransferError): - """Retryable errors related to transfers.""" - - -class TransferInvalidError(TransferError): - """The given transfer is invalid.""" - - -class RequestError(CommunicationError): - """The request was not successful.""" - - -class RetryAfterError(HttpError): - """The response contained a retry-after header. - - :type response: dict - :param response: headers from the response which returned the error. - - :type content: bytes - :param content: payload of the response which returned the error. - - :type url: str - :param url: URL of the response which returned the error. - - :type retry_after: int - :param retry_after: seconds to wait before retrying. - """ - def __init__(self, response, content, url, retry_after): - super(RetryAfterError, self).__init__(response, content, url) - self.retry_after = int(retry_after) - - @classmethod - def from_response(cls, http_response): - """Factory: construct an exception from a response. - - :type http_response: :class:`~.http_wrapper.Response` - :param http_response: the response which returned the error. - - :rtype: :class:`RetryAfterError` - :returns: The error created from the response. - """ - return cls(http_response.info, http_response.content, - http_response.request_url, http_response.retry_after) - - -class BadStatusCodeError(HttpError): - """The request completed but returned a bad status code.""" diff --git a/packages/google-cloud-core/google/cloud/streaming/http_wrapper.py b/packages/google-cloud-core/google/cloud/streaming/http_wrapper.py deleted file mode 100644 index e80e105175e7..000000000000 --- a/packages/google-cloud-core/google/cloud/streaming/http_wrapper.py +++ /dev/null @@ -1,396 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""HTTP wrapper for apitools. - -This library wraps the underlying http library we use, which is -currently :mod:`httplib2`. -""" - -import collections -import contextlib -import logging -import socket -import time - -import httplib2 -import six -from six.moves import http_client -from six.moves.urllib import parse - -from google.cloud.streaming.exceptions import BadStatusCodeError -from google.cloud.streaming.exceptions import RequestError -from google.cloud.streaming.exceptions import RetryAfterError -from google.cloud.streaming.util import calculate_wait_for_retry - - -_REDIRECTIONS = 5 -# 308 and 429 don't have names in httplib. -RESUME_INCOMPLETE = 308 -TOO_MANY_REQUESTS = 429 - - -_REDIRECT_STATUS_CODES = ( - http_client.MOVED_PERMANENTLY, - http_client.FOUND, - http_client.SEE_OTHER, - http_client.TEMPORARY_REDIRECT, - RESUME_INCOMPLETE, -) - - -_RETRYABLE_EXCEPTIONS = ( - http_client.BadStatusLine, - http_client.IncompleteRead, - http_client.ResponseNotReady, - socket.error, - httplib2.ServerNotFoundError, - ValueError, - RequestError, - BadStatusCodeError, - RetryAfterError, -) - - -@contextlib.contextmanager -def _httplib2_debug_level(http_request, level, http=None): - """Temporarily change the value of httplib2.debuglevel, if necessary. - - If http_request has a `loggable_body` distinct from `body`, then we - need to prevent httplib2 from logging the full body. This sets - httplib2.debuglevel for the duration of the `with` block; however, - that alone won't change the value of existing HTTP connections. If - an httplib2.Http object is provided, we'll also change the level on - any cached connections attached to it. - - :type http_request: :class:`Request` - :param http_request: the request to be logged. - - :type level: int - :param level: the debuglevel for logging. - - :type http: :class:`httplib2.Http` - :param http: - (Optional) the instance on whose connections to set the debuglevel. - """ - if http_request.loggable_body is None: - yield - return - old_level = httplib2.debuglevel - http_levels = {} - httplib2.debuglevel = level - if http is not None and getattr(http, 'connections', None) is not None: - for connection_key, connection in http.connections.items(): - # httplib2 stores two kinds of values in this dict, connection - # classes and instances. Since the connection types are all - # old-style classes, we can't easily distinguish by connection - # type -- so instead we use the key pattern. - if ':' not in connection_key: - continue - http_levels[connection_key] = connection.debuglevel - connection.set_debuglevel(level) - yield - httplib2.debuglevel = old_level - if http is not None: - for connection_key, old_level in http_levels.items(): - http.connections[connection_key].set_debuglevel(old_level) - - -class Request(object): - """Encapsulates the data for an HTTP request. - - :type url: str - :param url: the URL for the request - - :type http_method: str - :param http_method: the HTTP method to use for the request - - :type headers: mapping - :param headers: (Optional) headers to be sent with the request - - :type body: str - :param body: body to be sent with the request - """ - def __init__(self, url='', http_method='GET', headers=None, body=''): - self.url = url - self.http_method = http_method - self.headers = headers or {} - self._body = None - self._loggable_body = None - self.body = body - - @property - def loggable_body(self): - """Request body for logging purposes - - :rtype: str - :returns: The body to be logged. - """ - return self._loggable_body - - @loggable_body.setter - def loggable_body(self, value): - """Update request body for logging purposes - - :type value: str - :param value: updated body - - :raises: :exc:`RequestError` if the request does not have a body. - """ - if self.body is None: - raise RequestError( - 'Cannot set loggable body on request with no body') - self._loggable_body = value - - @property - def body(self): - """Request body - - :rtype: str - :returns: The body of the request. - """ - return self._body - - @body.setter - def body(self, value): - """Update the request body - - Handles logging and length measurement. - - :type value: str - :param value: updated body - """ - self._body = value - if value is not None: - # Avoid calling len() which cannot exceed 4GiB in 32-bit python. - body_length = getattr( - self._body, 'length', None) or len(self._body) - self.headers['content-length'] = str(body_length) - else: - self.headers.pop('content-length', None) - # This line ensures we don't try to print large requests. - if not isinstance(value, (type(None), six.string_types)): - self.loggable_body = '' - - -def _process_content_range(content_range): - """Convert a 'Content-Range' header into a length for the response. - - Helper for :meth:`Response.length`. - - :type content_range: str - :param content_range: the header value being parsed. - - :rtype: int - :returns: the length of the response chunk. - """ - _, _, range_spec = content_range.partition(' ') - byte_range, _, _ = range_spec.partition('/') - start, _, end = byte_range.partition('-') - return int(end) - int(start) + 1 - - -# Note: currently the order of fields here is important, since we want -# to be able to pass in the result from httplib2.request. -_ResponseTuple = collections.namedtuple( - 'HttpResponse', ['info', 'content', 'request_url']) - - -class Response(_ResponseTuple): - """Encapsulates data for an HTTP response. - """ - __slots__ = () - - def __len__(self): - return self.length - - @property - def length(self): - """Length of this response. - - Exposed as an attribute since using ``len()`` directly can fail - for responses larger than ``sys.maxint``. - - :rtype: int or long - :returns: The length of the response. - """ - if 'content-encoding' in self.info and 'content-range' in self.info: - # httplib2 rewrites content-length in the case of a compressed - # transfer; we can't trust the content-length header in that - # case, but we *can* trust content-range, if it's present. - return _process_content_range(self.info['content-range']) - elif 'content-length' in self.info: - return int(self.info.get('content-length')) - elif 'content-range' in self.info: - return _process_content_range(self.info['content-range']) - return len(self.content) - - @property - def status_code(self): - """HTTP status code - - :rtype: int - :returns: The response status code. - """ - return int(self.info['status']) - - @property - def retry_after(self): - """Retry interval (if set). - - :rtype: int - :returns: interval in seconds - """ - if 'retry-after' in self.info: - return int(self.info['retry-after']) - - @property - def is_redirect(self): - """Does this response contain a redirect - - :rtype: bool - :returns: True if the status code indicates a redirect and the - 'location' header is present. - """ - return (self.status_code in _REDIRECT_STATUS_CODES and - 'location' in self.info) - - -def _check_response(response): - """Validate a response - - :type response: :class:`Response` - :param response: the response to validate - - :raises: :exc:`google.cloud.streaming.exceptions.RequestError` if response - is None, :exc:`~.exceptions.BadStatusCodeError` if response status - code indicates an error, or :exc:`~.exceptions.RetryAfterError` - if response indicates a retry interval. - """ - if response is None: - # Caller shouldn't call us if the response is None, but handle anyway. - raise RequestError( - 'Request did not return a response.') - elif (response.status_code >= 500 or - response.status_code == TOO_MANY_REQUESTS): - raise BadStatusCodeError.from_response(response) - elif response.retry_after: - raise RetryAfterError.from_response(response) - - -def _reset_http_connections(http): - """Rebuild all http connections in the httplib2.Http instance. - - httplib2 overloads the map in http.connections to contain two different - types of values: - { scheme string: connection class } and - { scheme + authority string : actual http connection } - Here we remove all of the entries for actual connections so that on the - next request httplib2 will rebuild them from the connection types. - - :type http: :class:`httplib2.Http` - :param http: the instance whose connections are to be rebuilt - """ - if getattr(http, 'connections', None): - for conn_key in list(http.connections.keys()): - if ':' in conn_key: - del http.connections[conn_key] - - -def _make_api_request_no_retry(http, http_request, redirections=_REDIRECTIONS): - """Send an HTTP request via the given http instance. - - This wrapper exists to handle translation between the plain httplib2 - request/response types and the Request and Response types above. - - :type http: :class:`httplib2.Http` - :param http: an instance which impelements the `Http` API. - - :type http_request: :class:`Request` - :param http_request: the request to send. - - :type redirections: int - :param redirections: Number of redirects to follow. - - :rtype: :class:`Response` - :returns: an object representing the server's response - - :raises: :exc:`google.cloud.streaming.exceptions.RequestError` if no - response could be parsed. - """ - connection_type = None - # Handle overrides for connection types. This is used if the caller - # wants control over the underlying connection for managing callbacks - # or hash digestion. - if getattr(http, 'connections', None): - url_scheme = parse.urlsplit(http_request.url).scheme - if url_scheme and url_scheme in http.connections: - connection_type = http.connections[url_scheme] - - # Custom printing only at debuglevel 4 - new_debuglevel = 4 if httplib2.debuglevel == 4 else 0 - with _httplib2_debug_level(http_request, new_debuglevel, http=http): - info, content = http.request( - str(http_request.url), method=str(http_request.http_method), - body=http_request.body, headers=http_request.headers, - redirections=redirections, connection_type=connection_type) - - if info is None: - raise RequestError() - - response = Response(info, content, http_request.url) - _check_response(response) - return response - - -def make_api_request(http, http_request, retries=7, - redirections=_REDIRECTIONS): - """Send an HTTP request via the given http, performing error/retry handling. - - :type http: :class:`httplib2.Http` - :param http: an instance which implements the `Http` API. - - :type http_request: :class:`Request` - :param http_request: the request to send. - - :type retries: int - :param retries: Number of retries to attempt on retryable - responses (such as 429 or 5XX). - - :type redirections: int - :param redirections: Number of redirects to follow. - - :rtype: :class:`Response` - :returns: an object representing the server's response. - - :raises: :exc:`google.cloud.streaming.exceptions.RequestError` if no - response could be parsed. - """ - retry = 0 - while True: - try: - return _make_api_request_no_retry(http, http_request, - redirections=redirections) - except _RETRYABLE_EXCEPTIONS as exc: - retry += 1 - if retry >= retries: - raise - retry_after = getattr(exc, 'retry_after', None) - if retry_after is None: - retry_after = calculate_wait_for_retry(retry) - - _reset_http_connections(http) - logging.debug('Retrying request to url %s after exception %s', - http_request.url, type(exc).__name__) - time.sleep(retry_after) diff --git a/packages/google-cloud-core/google/cloud/streaming/stream_slice.py b/packages/google-cloud-core/google/cloud/streaming/stream_slice.py deleted file mode 100644 index 3a13337bb993..000000000000 --- a/packages/google-cloud-core/google/cloud/streaming/stream_slice.py +++ /dev/null @@ -1,87 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Small helper class to provide a small slice of a stream.""" - -from six.moves import http_client - - -class StreamSlice(object): - """Provides a slice-like object for streams. - - :type stream: readable file-like object - :param stream: the stream to be buffered. - - :type max_bytes: int - :param max_bytes: maximum number of bytes to return in the slice. - """ - def __init__(self, stream, max_bytes): - self._stream = stream - self._remaining_bytes = max_bytes - self._max_bytes = max_bytes - - def __repr__(self): - return 'Slice of stream %s with %s/%s bytes not yet read' % ( - self._stream, self._remaining_bytes, self._max_bytes) - - def __len__(self): - return self._max_bytes - - def __nonzero__(self): - # For 32-bit python2.x, len() cannot exceed a 32-bit number; avoid - # accidental len() calls from httplib in the form of "if this_object:". - return bool(self._max_bytes) - - @property - def length(self): - """Maximum number of bytes to return in the slice. - - .. note:: - - For 32-bit python2.x, len() cannot exceed a 32-bit number. - - :rtype: int - :returns: The max "length" of the stream. - """ - return self._max_bytes - - def read(self, size=None): - """Read bytes from the slice. - - Compared to other streams, there is one case where we may - unexpectedly raise an exception on read: if the underlying stream - is exhausted (i.e. returns no bytes on read), and the size of this - slice indicates we should still be able to read more bytes, we - raise :exc:`IncompleteRead`. - - :type size: int - :param size: - (Optional) If provided, read no more than size bytes from the - stream. - - :rtype: bytes - :returns: bytes read from this slice. - - :raises: :exc:`IncompleteRead` - """ - if size is not None: - read_size = min(size, self._remaining_bytes) - else: - read_size = self._remaining_bytes - data = self._stream.read(read_size) - if read_size > 0 and not data: - raise http_client.IncompleteRead( - self._max_bytes - self._remaining_bytes, self._max_bytes) - self._remaining_bytes -= len(data) - return data diff --git a/packages/google-cloud-core/google/cloud/streaming/transfer.py b/packages/google-cloud-core/google/cloud/streaming/transfer.py deleted file mode 100644 index 3d6d5b8e6016..000000000000 --- a/packages/google-cloud-core/google/cloud/streaming/transfer.py +++ /dev/null @@ -1,1223 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# pylint: disable=too-many-lines - -"""Upload and download support for apitools.""" - -import email.generator as email_generator -import email.mime.multipart as mime_multipart -import email.mime.nonmultipart as mime_nonmultipart -import mimetypes -import os - -import httplib2 -import six -from six.moves import http_client - -from google.cloud._helpers import _to_bytes -from google.cloud.streaming.buffered_stream import BufferedStream -from google.cloud.streaming.exceptions import CommunicationError -from google.cloud.streaming.exceptions import HttpError -from google.cloud.streaming.exceptions import TransferInvalidError -from google.cloud.streaming.exceptions import TransferRetryError -from google.cloud.streaming.http_wrapper import make_api_request -from google.cloud.streaming.http_wrapper import Request -from google.cloud.streaming.http_wrapper import RESUME_INCOMPLETE -from google.cloud.streaming.stream_slice import StreamSlice -from google.cloud.streaming.util import acceptable_mime_type - - -RESUMABLE_UPLOAD_THRESHOLD = 5 << 20 -SIMPLE_UPLOAD = 'simple' -RESUMABLE_UPLOAD = 'resumable' - - -_DEFAULT_CHUNKSIZE = 1 << 20 - - -class _Transfer(object): - """Generic bits common to Uploads and Downloads. - - :type stream: file-like object - :param stream: stream to/from which data is downloaded/uploaded. - - :type close_stream: bool - :param close_stream: should this instance close the stream when deleted - - :type chunksize: int - :param chunksize: the size of chunks used to download/upload a file. - - :type auto_transfer: bool - :param auto_transfer: should this instance automatically begin transfering - data when initialized - - :type http: :class:`httplib2.Http` (or workalike) - :param http: Http instance used to perform requests. - - :type num_retries: int - :param num_retries: how many retries should the transfer attempt - """ - - _num_retries = None - - def __init__(self, stream, close_stream=False, - chunksize=_DEFAULT_CHUNKSIZE, auto_transfer=True, - http=None, num_retries=5): - self._bytes_http = None - self._close_stream = close_stream - self._http = http - self._stream = stream - self._url = None - - # Let the @property do validation. - self.num_retries = num_retries - - self.auto_transfer = auto_transfer - self.chunksize = chunksize - - def __repr__(self): - return str(self) - - @property - def close_stream(self): - """Should this instance close the stream when deleted. - - :rtype: bool - :returns: Boolean indicated if the stream should be closed. - """ - return self._close_stream - - @property - def http(self): - """Http instance used to perform requests. - - :rtype: :class:`httplib2.Http` (or workalike) - :returns: The HTTP object used for requests. - """ - return self._http - - @property - def bytes_http(self): - """Http instance used to perform binary requests. - - Defaults to :attr:`http`. - - :rtype: :class:`httplib2.Http` (or workalike) - :returns: The HTTP object used for binary requests. - """ - return self._bytes_http or self.http - - @bytes_http.setter - def bytes_http(self, value): - """Update Http instance used to perform binary requests. - - :type value: :class:`httplib2.Http` (or workalike) - :param value: new instance - """ - self._bytes_http = value - - @property - def num_retries(self): - """How many retries should the transfer attempt - - :rtype: int - :returns: The number of retries allowed. - """ - return self._num_retries - - @num_retries.setter - def num_retries(self, value): - """Update how many retries should the transfer attempt - - :type value: int - """ - if not isinstance(value, six.integer_types): - raise ValueError("num_retries: pass an integer") - - if value < 0: - raise ValueError( - 'Cannot have negative value for num_retries') - self._num_retries = value - - @property - def stream(self): - """Stream to/from which data is downloaded/uploaded. - - :rtype: file-like object - :returns: The stream that sends/receives data. - """ - return self._stream - - @property - def url(self): - """URL to / from which data is downloaded/uploaded. - - :rtype: str - :returns: The URL where data is sent/received. - """ - return self._url - - def _initialize(self, http, url): - """Initialize this download by setting :attr:`http` and :attr`url`. - - Allow the user to be able to pre-initialize :attr:`http` by setting - the value in the constructor; in that case, we ignore the provided - http. - - :type http: :class:`httplib2.Http` (or a worklike) or None. - :param http: the Http instance to use to make requests. - - :type url: str - :param url: The url for this transfer. - """ - self._ensure_uninitialized() - if self.http is None: - self._http = http or httplib2.Http() - self._url = url - - @property - def initialized(self): - """Has the instance been initialized - - :rtype: bool - :returns: Boolean indicating if the current transfer - has been initialized. - """ - return self.url is not None and self.http is not None - - def _ensure_initialized(self): - """Helper: assert that the instance is initialized. - - :raises: :exc:`google.cloud.streaming.exceptions.TransferInvalidError` - if the instance is not initialized. - """ - if not self.initialized: - raise TransferInvalidError( - 'Cannot use uninitialized %s', type(self).__name__) - - def _ensure_uninitialized(self): - """Helper: assert that the instance is not initialized. - - :raises: :exc:`google.cloud.streaming.exceptions.TransferInvalidError` - if the instance is already initialized. - """ - if self.initialized: - raise TransferInvalidError( - 'Cannot re-initialize %s', type(self).__name__) - - def __del__(self): - if self._close_stream: - self._stream.close() - - -class Download(_Transfer): - """Represent a single download. - - :type stream: file-like object - :param stream: stream to/from which data is downloaded/uploaded. - - :type kwds: dict - :param kwds: keyword arguments: all except ``total_size`` are passed - through to :meth:`_Transfer.__init__()`. - """ - _ACCEPTABLE_STATUSES = set(( - http_client.OK, - http_client.NO_CONTENT, - http_client.PARTIAL_CONTENT, - http_client.REQUESTED_RANGE_NOT_SATISFIABLE, - )) - - def __init__(self, stream, **kwds): - total_size = kwds.pop('total_size', None) - super(Download, self).__init__(stream, **kwds) - self._initial_response = None - self._progress = 0 - self._total_size = total_size - self._encoding = None - - @classmethod - def from_file(cls, filename, overwrite=False, auto_transfer=True, **kwds): - """Create a new download object from a filename. - - :type filename: str - :param filename: path/filename for the target file - - :type overwrite: bool - :param overwrite: should an existing file be overwritten - - :type auto_transfer: bool - :param auto_transfer: should the transfer be started immediately - - :type kwds: dict - :param kwds: keyword arguments: passed - through to :meth:`_Transfer.__init__()`. - - :rtype: :class:`Download` - :returns: The download initiated from the file passed. - """ - path = os.path.expanduser(filename) - if os.path.exists(path) and not overwrite: - raise ValueError( - 'File %s exists and overwrite not specified' % path) - return cls(open(path, 'wb'), close_stream=True, - auto_transfer=auto_transfer, **kwds) - - @classmethod - def from_stream(cls, stream, auto_transfer=True, total_size=None, **kwds): - """Create a new Download object from a stream. - - :type stream: writable file-like object - :param stream: the target file - - :type total_size: int - :param total_size: (Optional) total size of the file to be downloaded - - :type auto_transfer: bool - :param auto_transfer: should the transfer be started immediately - - :type kwds: dict - :param kwds: keyword arguments: passed - through to :meth:`_Transfer.__init__()`. - - :rtype: :class:`Download` - :returns: The download initiated from the stream passed. - """ - return cls(stream, auto_transfer=auto_transfer, total_size=total_size, - **kwds) - - @property - def progress(self): - """Number of bytes have been downloaded. - - :rtype: int >= 0 - :returns: The number of downloaded bytes. - """ - return self._progress - - @property - def total_size(self): - """Total number of bytes to be downloaded. - - :rtype: int or None - :returns: The total number of bytes to download. - """ - return self._total_size - - @property - def encoding(self): - """'Content-Encoding' used to transfer the file - - :rtype: str or None - :returns: The encoding of the downloaded content. - """ - return self._encoding - - def __repr__(self): - if not self.initialized: - return 'Download (uninitialized)' - else: - return 'Download with %d/%s bytes transferred from url %s' % ( - self.progress, self.total_size, self.url) - - def configure_request(self, http_request, url_builder): - """Update http_request/url_builder with download-appropriate values. - - :type http_request: :class:`~.streaming.http_wrapper.Request` - :param http_request: the request to be updated - - :type url_builder: instance with settable 'query_params' attribute. - :param url_builder: transfer policy object to be updated - """ - url_builder.query_params['alt'] = 'media' - http_request.headers['Range'] = 'bytes=0-%d' % (self.chunksize - 1,) - - def _set_total(self, info): - """Update 'total_size' based on data from a response. - - :type info: mapping - :param info: response headers - """ - if 'content-range' in info: - _, _, total = info['content-range'].rpartition('/') - if total != '*': - self._total_size = int(total) - # Note "total_size is None" means we don't know it; if no size - # info was returned on our initial range request, that means we - # have a 0-byte file. (That last statement has been verified - # empirically, but is not clearly documented anywhere.) - if self.total_size is None: - self._total_size = 0 - - def initialize_download(self, http_request, http): - """Initialize this download. - - If the instance has :attr:`auto_transfer` enabled, begins the - download immediately. - - :type http_request: :class:`~.streaming.http_wrapper.Request` - :param http_request: the request to use to initialize this download. - - :type http: :class:`httplib2.Http` (or workalike) - :param http: Http instance for this request. - """ - self._ensure_uninitialized() - url = http_request.url - if self.auto_transfer: - end_byte = self._compute_end_byte(0) - self._set_range_header(http_request, 0, end_byte) - response = make_api_request( - self.bytes_http or http, http_request) - if response.status_code not in self._ACCEPTABLE_STATUSES: - raise HttpError.from_response(response) - self._initial_response = response - self._set_total(response.info) - url = response.info.get('content-location', response.request_url) - self._initialize(http, url) - # Unless the user has requested otherwise, we want to just - # go ahead and pump the bytes now. - if self.auto_transfer: - self.stream_file(use_chunks=True, headers=http_request.headers) - - def _normalize_start_end(self, start, end=None): - """Validate / fix up byte range. - - :type start: int - :param start: start byte of the range: if negative, used as an - offset from the end. - - :type end: int - :param end: end byte of the range. - - :rtype: tuple, (start, end) - :returns: the normalized start, end pair. - :raises: :exc:`google.cloud.streaming.exceptions.TransferInvalidError` - for invalid combinations of start, end. - """ - if end is not None: - if start < 0: - raise TransferInvalidError( - 'Cannot have end index with negative start index') - elif start >= self.total_size: - raise TransferInvalidError( - 'Cannot have start index greater than total size') - end = min(end, self.total_size - 1) - if end < start: - raise TransferInvalidError( - 'Range requested with end[%s] < start[%s]' % (end, start)) - return start, end - else: - if start < 0: - start = max(0, start + self.total_size) - return start, self.total_size - 1 - - @staticmethod - def _set_range_header(request, start, end=None): - """Update the 'Range' header in a request to match a byte range. - - :type request: :class:`google.cloud.streaming.http_wrapper.Request` - :param request: the request to update - - :type start: int - :param start: start byte of the range: if negative, used as an - offset from the end. - - :type end: int - :param end: end byte of the range. - """ - if start < 0: - request.headers['range'] = 'bytes=%d' % start - elif end is None: - request.headers['range'] = 'bytes=%d-' % start - else: - request.headers['range'] = 'bytes=%d-%d' % (start, end) - - def _compute_end_byte(self, start, end=None, use_chunks=True): - """Compute the last byte to fetch for this request. - - Based on the HTTP spec for Range and Content-Range. - - .. note:: - This is potentially confusing in several ways: - - the value for the last byte is 0-based, eg "fetch 10 bytes - from the beginning" would return 9 here. - - if we have no information about size, and don't want to - use the chunksize, we'll return None. - - :type start: int - :param start: start byte of the range. - - :type end: int - :param end: (Optional) suggested last byte of the range. - - :type use_chunks: bool - :param use_chunks: If False, ignore :attr:`chunksize`. - - :rtype: str - :returns: Last byte to use in a 'Range' header, or None. - """ - end_byte = end - - if start < 0 and not self.total_size: - return end_byte - - if use_chunks: - alternate = start + self.chunksize - 1 - if end_byte is not None: - end_byte = min(end_byte, alternate) - else: - end_byte = alternate - - if self.total_size: - alternate = self.total_size - 1 - if end_byte is not None: - end_byte = min(end_byte, alternate) - else: - end_byte = alternate - - return end_byte - - def _get_chunk(self, start, end, headers=None): - """Retrieve a chunk of the file. - - :type start: int - :param start: start byte of the range. - - :type end: int - :param end: (Optional) end byte of the range. - - :type headers: dict - :param headers: (Optional) Headers to be used for the ``Request``. - - :rtype: :class:`google.cloud.streaming.http_wrapper.Response` - :returns: response from the chunk request. - """ - self._ensure_initialized() - request = Request(url=self.url, headers=headers) - self._set_range_header(request, start, end=end) - return make_api_request( - self.bytes_http, request, retries=self.num_retries) - - def _process_response(self, response): - """Update attribtes and writing stream, based on response. - - :type response: :class:`google.cloud.streaming.http_wrapper.Response` - :param response: response from a download request. - - :rtype: :class:`google.cloud.streaming.http_wrapper.Response` - :returns: the response - :raises: :exc:`google.cloud.streaming.exceptions.HttpError` for - missing / unauthorized responses; - :exc:`google.cloud.streaming.exceptions.TransferRetryError` - for other error responses. - """ - if response.status_code not in self._ACCEPTABLE_STATUSES: - # We distinguish errors that mean we made a mistake in setting - # up the transfer versus something we should attempt again. - if response.status_code in (http_client.FORBIDDEN, - http_client.NOT_FOUND): - raise HttpError.from_response(response) - else: - raise TransferRetryError(response.content) - if response.status_code in (http_client.OK, - http_client.PARTIAL_CONTENT): - self.stream.write(response.content) - self._progress += response.length - if response.info and 'content-encoding' in response.info: - self._encoding = response.info['content-encoding'] - elif response.status_code == http_client.NO_CONTENT: - # It's important to write something to the stream for the case - # of a 0-byte download to a file, as otherwise python won't - # create the file. - self.stream.write('') - return response - - def get_range(self, start, end=None, use_chunks=True): - """Retrieve a given byte range from this download, inclusive. - - Writes retrieved bytes into :attr:`stream`. - - Range must be of one of these three forms: - * 0 <= start, end = None: Fetch from start to the end of the file. - * 0 <= start <= end: Fetch the bytes from start to end. - * start < 0, end = None: Fetch the last -start bytes of the file. - - (These variations correspond to those described in the HTTP 1.1 - protocol for range headers in RFC 2616, sec. 14.35.1.) - - :type start: int - :param start: Where to start fetching bytes. (See above.) - - :type end: int - :param end: (Optional) Where to stop fetching bytes. (See above.) - - :type use_chunks: bool - :param use_chunks: If False, ignore :attr:`chunksize` - and fetch this range in a single request. - If True, streams via chunks. - - :raises: :exc:`google.cloud.streaming.exceptions.TransferRetryError` - if a request returns an empty response. - """ - self._ensure_initialized() - progress_end_normalized = False - if self.total_size is not None: - progress, end_byte = self._normalize_start_end(start, end) - progress_end_normalized = True - else: - progress = start - end_byte = end - while (not progress_end_normalized or end_byte is None or - progress <= end_byte): - end_byte = self._compute_end_byte(progress, end=end_byte, - use_chunks=use_chunks) - response = self._get_chunk(progress, end_byte) - if not progress_end_normalized: - self._set_total(response.info) - progress, end_byte = self._normalize_start_end(start, end) - progress_end_normalized = True - response = self._process_response(response) - progress += response.length - if response.length == 0: - raise TransferRetryError( - 'Zero bytes unexpectedly returned in download response') - - def stream_file(self, use_chunks=True, headers=None): - """Stream the entire download. - - Writes retrieved bytes into :attr:`stream`. - - :type use_chunks: bool - :param use_chunks: If False, ignore :attr:`chunksize` - and stream this download in a single request. - If True, streams via chunks. - - :type headers: dict - :param headers: (Optional) Headers to be used for the ``Request``. - """ - self._ensure_initialized() - while True: - if self._initial_response is not None: - response = self._initial_response - self._initial_response = None - else: - end_byte = self._compute_end_byte(self.progress, - use_chunks=use_chunks) - response = self._get_chunk(self.progress, end_byte, - headers=headers) - if self.total_size is None: - self._set_total(response.info) - response = self._process_response(response) - if (response.status_code == http_client.OK or - self.progress >= self.total_size): - break - - -class Upload(_Transfer): - """Represent a single Upload. - - :type stream: file-like object - :param stream: stream to/from which data is downloaded/uploaded. - - :type mime_type: str: - :param mime_type: MIME type of the upload. - - :type total_size: int - :param total_size: (Optional) Total upload size for the stream. - - :type http: :class:`httplib2.Http` (or workalike) - :param http: Http instance used to perform requests. - - :type close_stream: bool - :param close_stream: should this instance close the stream when deleted - - :type auto_transfer: bool - :param auto_transfer: should this instance automatically begin transfering - data when initialized - - :type kwds: dict - :param kwds: keyword arguments: all except ``total_size`` are passed - through to :meth:`_Transfer.__init__()`. - """ - _REQUIRED_SERIALIZATION_KEYS = set(( - 'auto_transfer', 'mime_type', 'total_size', 'url')) - - def __init__(self, stream, mime_type, total_size=None, http=None, - close_stream=False, auto_transfer=True, - **kwds): - super(Upload, self).__init__( - stream, close_stream=close_stream, auto_transfer=auto_transfer, - http=http, **kwds) - self._final_response = None - self._server_chunk_granularity = None - self._complete = False - self._mime_type = mime_type - self._progress = 0 - self._strategy = None - self._total_size = total_size - - @classmethod - def from_file(cls, filename, mime_type=None, auto_transfer=True, **kwds): - """Create a new Upload object from a filename. - - :type filename: str - :param filename: path/filename to the file being uploaded - - :type mime_type: str - :param mime_type: MIMEtype of the file being uploaded - - :type auto_transfer: bool - :param auto_transfer: - (Optional) should the transfer be started immediately - - :type kwds: dict - :param kwds: keyword arguments: passed - through to :meth:`_Transfer.__init__()`. - - :rtype: :class:`Upload` - :returns: The upload initiated from the file passed. - """ - path = os.path.expanduser(filename) - if not mime_type: - mime_type, _ = mimetypes.guess_type(path) - if mime_type is None: - raise ValueError( - 'Could not guess mime type for %s' % path) - size = os.stat(path).st_size - return cls(open(path, 'rb'), mime_type, total_size=size, - close_stream=True, auto_transfer=auto_transfer, **kwds) - - @classmethod - def from_stream(cls, stream, mime_type, - total_size=None, auto_transfer=True, **kwds): - """Create a new Upload object from a stream. - - :type stream: writable file-like object - :param stream: the target file - - :type mime_type: str - :param mime_type: MIMEtype of the file being uploaded - - :type total_size: int - :param total_size: (Optional) Size of the file being uploaded - - :type auto_transfer: bool - :param auto_transfer: - (Optional) should the transfer be started immediately - - :type kwds: dict - :param kwds: keyword arguments: passed - through to :meth:`_Transfer.__init__()`. - - :rtype: :class:`Upload` - :returns: The upload initiated from the stream passed. - """ - if mime_type is None: - raise ValueError( - 'No mime_type specified for stream') - return cls(stream, mime_type, total_size=total_size, - close_stream=False, auto_transfer=auto_transfer, **kwds) - - @property - def complete(self): - """Has the entire stream been uploaded. - - :rtype: bool - :returns: Boolean indicated if the upload is complete. - """ - return self._complete - - @property - def mime_type(self): - """MIMEtype of the file being uploaded. - - :rtype: str - :returns: The mime-type of the upload. - """ - return self._mime_type - - @property - def progress(self): - """Bytes uploaded so far - - :rtype: int - :returns: The amount uploaded so far. - """ - return self._progress - - @property - def strategy(self): - """Upload strategy to use - - :rtype: str or None - :returns: The strategy used to upload the data. - """ - return self._strategy - - @strategy.setter - def strategy(self, value): - """Update upload strategy to use - - :type value: str (one of :data:`SIMPLE_UPLOAD` or - :data:`RESUMABLE_UPLOAD`) - - :raises: :exc:`ValueError` if value is not one of the two allowed - strings. - """ - if value not in (SIMPLE_UPLOAD, RESUMABLE_UPLOAD): - raise ValueError(( - 'Invalid value "%s" for upload strategy, must be one of ' - '"simple" or "resumable".') % value) - self._strategy = value - - @property - def total_size(self): - """Total size of the stream to be uploaded. - - :rtype: int or None - :returns: The total size to be uploaded. - """ - return self._total_size - - @total_size.setter - def total_size(self, value): - """Update total size of the stream to be uploaded. - - :type value: int - :param value: (Optional) the size - """ - self._ensure_uninitialized() - self._total_size = value - - def __repr__(self): - if not self.initialized: - return 'Upload (uninitialized)' - else: - return 'Upload with %d/%s bytes transferred for url %s' % ( - self.progress, self.total_size or '???', self.url) - - def _set_default_strategy(self, upload_config, http_request): - """Determine and set the default upload strategy for this upload. - - We generally prefer simple or multipart, unless we're forced to - use resumable. This happens when any of (1) the upload is too - large, (2) the simple endpoint doesn't support multipart requests - and we have metadata, or (3) there is no simple upload endpoint. - - :type upload_config: instance w/ ``max_size`` and ``accept`` - attributes - :param upload_config: Configuration for the upload endpoint. - - :type http_request: :class:`~.streaming.http_wrapper.Request` - :param http_request: The associated http request. - """ - if upload_config.resumable_path is None: - self.strategy = SIMPLE_UPLOAD - if self.strategy is not None: - return - strategy = SIMPLE_UPLOAD - if (self.total_size is not None and - self.total_size > RESUMABLE_UPLOAD_THRESHOLD): - strategy = RESUMABLE_UPLOAD - if http_request.body and not upload_config.simple_multipart: - strategy = RESUMABLE_UPLOAD - if not upload_config.simple_path: - strategy = RESUMABLE_UPLOAD - self.strategy = strategy - - def configure_request(self, upload_config, http_request, url_builder): - """Configure the request and url for this upload. - - :type upload_config: instance w/ ``max_size`` and ``accept`` - attributes - :param upload_config: transfer policy object to be queried - - :type http_request: :class:`~.streaming.http_wrapper.Request` - :param http_request: the request to be updated - - :type url_builder: instance with settable 'relative_path' and - 'query_params' attributes. - :param url_builder: transfer policy object to be updated - - :raises: :exc:`ValueError` if the requested upload is too big, - or does not have an acceptable MIME type. - """ - # Validate total_size vs. max_size - if (self.total_size and upload_config.max_size and - self.total_size > upload_config.max_size): - raise ValueError( - 'Upload too big: %s larger than max size %s' % ( - self.total_size, upload_config.max_size)) - # Validate mime type - if not acceptable_mime_type(upload_config.accept, self.mime_type): - raise ValueError( - 'MIME type %s does not match any accepted MIME ranges %s' % ( - self.mime_type, upload_config.accept)) - - self._set_default_strategy(upload_config, http_request) - if self.strategy == SIMPLE_UPLOAD: - url_builder.relative_path = upload_config.simple_path - if http_request.body: - url_builder.query_params['uploadType'] = 'multipart' - self._configure_multipart_request(http_request) - else: - url_builder.query_params['uploadType'] = 'media' - self._configure_media_request(http_request) - else: - url_builder.relative_path = upload_config.resumable_path - url_builder.query_params['uploadType'] = 'resumable' - self._configure_resumable_request(http_request) - - def _configure_media_request(self, http_request): - """Helper for 'configure_request': set up simple request.""" - http_request.headers['content-type'] = self.mime_type - http_request.body = self.stream.read() - http_request.loggable_body = '' - - def _configure_multipart_request(self, http_request): - """Helper for 'configure_request': set up multipart request.""" - # This is a multipart/related upload. - msg_root = mime_multipart.MIMEMultipart('related') - # msg_root should not write out its own headers - setattr(msg_root, '_write_headers', lambda self: None) - - # attach the body as one part - msg = mime_nonmultipart.MIMENonMultipart( - *http_request.headers['content-type'].split('/')) - msg.set_payload(http_request.body) - msg_root.attach(msg) - - # attach the media as the second part - msg = mime_nonmultipart.MIMENonMultipart(*self.mime_type.split('/')) - msg['Content-Transfer-Encoding'] = 'binary' - msg.set_payload(self.stream.read()) - msg_root.attach(msg) - - # NOTE: generate multipart message as bytes, not text - stream = six.BytesIO() - if six.PY3: # pragma: NO COVER Python3 - generator_class = email_generator.BytesGenerator - else: - generator_class = email_generator.Generator - generator = generator_class(stream, mangle_from_=False) - generator.flatten(msg_root, unixfrom=False) - http_request.body = stream.getvalue() - - multipart_boundary = msg_root.get_boundary() - http_request.headers['content-type'] = ( - 'multipart/related; boundary="%s"' % multipart_boundary) - - boundary_bytes = _to_bytes(multipart_boundary) - body_components = http_request.body.split(boundary_bytes) - headers, _, _ = body_components[-2].partition(b'\n\n') - body_components[-2] = b'\n\n'.join([headers, b'\n\n--']) - http_request.loggable_body = boundary_bytes.join(body_components) - - def _configure_resumable_request(self, http_request): - """Helper for 'configure_request': set up resumable request.""" - http_request.headers['X-Upload-Content-Type'] = self.mime_type - if self.total_size is not None: - http_request.headers[ - 'X-Upload-Content-Length'] = str(self.total_size) - - def refresh_upload_state(self): - """Refresh the state of a resumable upload via query to the back-end. - """ - if self.strategy != RESUMABLE_UPLOAD: - return - self._ensure_initialized() - # NOTE: Per RFC 2616[1]/7231[2], a 'PUT' request is inappropriate - # here: it is intended to be used to replace the entire - # resource, not to query for a status. - # - # If the back-end doesn't provide a way to query for this state - # via a 'GET' request, somebody should be spanked. - # - # The violation is documented[3]. - # - # [1] http://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html#sec9.6 - # [2] http://tools.ietf.org/html/rfc7231#section-4.3.4 - # [3] - # https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#resume-upload - refresh_request = Request( - url=self.url, http_method='PUT', - headers={'Content-Range': 'bytes */*'}) - refresh_response = make_api_request( - self.http, refresh_request, redirections=0, - retries=self.num_retries) - range_header = self._get_range_header(refresh_response) - if refresh_response.status_code in (http_client.OK, - http_client.CREATED): - self._complete = True - self._progress = self.total_size - self.stream.seek(self.progress) - # If we're finished, the refresh response will contain the metadata - # originally requested. Cache it so it can be returned in - # StreamInChunks. - self._final_response = refresh_response - elif refresh_response.status_code == RESUME_INCOMPLETE: - if range_header is None: - self._progress = 0 - else: - self._progress = self._last_byte(range_header) + 1 - self.stream.seek(self.progress) - else: - raise HttpError.from_response(refresh_response) - - @staticmethod - def _get_range_header(response): - """Return a 'Range' header from a response. - - :type response: :class:`google.cloud.streaming.http_wrapper.Response` - :param response: response to be queried - - :rtype: str - :returns: The header used to determine the bytes range. - """ - # NOTE: Per RFC 2616[1]/7233[2][3], 'Range' is a request header, - # not a response header. If the back-end is actually setting - # 'Range' on responses, somebody should be spanked: it should - # be sending 'Content-Range' (including the # '/' - # trailer). - # - # The violation is documented[4]. - # - # [1] http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html - # [2] http://tools.ietf.org/html/rfc7233#section-3.1 - # [3] http://tools.ietf.org/html/rfc7233#section-4.2 - # [4] - # https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#chunking - return response.info.get('Range', response.info.get('range')) - - def initialize_upload(self, http_request, http): - """Initialize this upload from the given http_request. - - :type http_request: :class:`~.streaming.http_wrapper.Request` - :param http_request: the request to be used - - :type http: :class:`httplib2.Http` (or workalike) - :param http: Http instance for this request. - - :raises: :exc:`ValueError` if the instance has not been configured - with a strategy. - :rtype: :class:`~google.cloud.streaming.http_wrapper.Response` - :returns: The response if the upload is resumable and auto transfer - is not used. - """ - if self.strategy is None: - raise ValueError( - 'No upload strategy set; did you call configure_request?') - if self.strategy != RESUMABLE_UPLOAD: - return - self._ensure_uninitialized() - http_response = make_api_request(http, http_request, - retries=self.num_retries) - if http_response.status_code != http_client.OK: - raise HttpError.from_response(http_response) - - granularity = http_response.info.get('X-Goog-Upload-Chunk-Granularity') - if granularity is not None: - granularity = int(granularity) - self._server_chunk_granularity = granularity - url = http_response.info['location'] - self._initialize(http, url) - - # Unless the user has requested otherwise, we want to just - # go ahead and pump the bytes now. - if self.auto_transfer: - return self.stream_file(use_chunks=True) - else: - return http_response - - @staticmethod - def _last_byte(range_header): - """Parse the last byte from a 'Range' header. - - :type range_header: str - :param range_header: 'Range' header value per RFC 2616/7233 - - :rtype: int - :returns: The last byte from a range header. - """ - _, _, end = range_header.partition('-') - return int(end) - - def _validate_chunksize(self, chunksize=None): - """Validate chunksize against server-specified granularity. - - Helper for :meth:`stream_file`. - - :type chunksize: int - :param chunksize: (Optional) the chunk size to be tested. - - :raises: :exc:`ValueError` if ``chunksize`` is not a multiple - of the server-specified granulariy. - """ - if self._server_chunk_granularity is None: - return - chunksize = chunksize or self.chunksize - if chunksize % self._server_chunk_granularity: - raise ValueError( - 'Server requires chunksize to be a multiple of %d', - self._server_chunk_granularity) - - def stream_file(self, use_chunks=True): - """Upload the stream. - - :type use_chunks: bool - :param use_chunks: If False, send the stream in a single request. - Otherwise, send it in chunks. - - :rtype: :class:`google.cloud.streaming.http_wrapper.Response` - :returns: The response for the final request made. - """ - if self.strategy != RESUMABLE_UPLOAD: - raise ValueError( - 'Cannot stream non-resumable upload') - # final_response is set if we resumed an already-completed upload. - response = self._final_response - send_func = self._send_chunk if use_chunks else self._send_media_body - if use_chunks: - self._validate_chunksize(self.chunksize) - self._ensure_initialized() - while not self.complete: - response = send_func(self.stream.tell()) - if response.status_code in (http_client.OK, http_client.CREATED): - self._complete = True - break - self._progress = self._last_byte(response.info['range']) - if self.progress + 1 != self.stream.tell(): - raise CommunicationError( - 'Failed to transfer all bytes in chunk, upload paused at ' - 'byte %d' % self.progress) - if self.complete and hasattr(self.stream, 'seek'): - if not hasattr(self.stream, 'seekable') or self.stream.seekable(): - current_pos = self.stream.tell() - self.stream.seek(0, os.SEEK_END) - end_pos = self.stream.tell() - self.stream.seek(current_pos) - if current_pos != end_pos: - raise TransferInvalidError( - 'Upload complete with %s ' - 'additional bytes left in stream' % - (int(end_pos) - int(current_pos))) - return response - - def _send_media_request(self, request, end): - """Peform API upload request. - - Helper for _send_media_body & _send_chunk: - - :type request: :class:`google.cloud.streaming.http_wrapper.Request` - :param request: the request to upload - - :type end: int - :param end: end byte of the to be uploaded - - :rtype: :class:`google.cloud.streaming.http_wrapper.Response` - :returns: the response - :raises: :exc:`~.streaming.exceptions.HttpError` if the status - code from the response indicates an error. - """ - response = make_api_request( - self.bytes_http, request, retries=self.num_retries) - if response.status_code not in (http_client.OK, http_client.CREATED, - RESUME_INCOMPLETE): - # We want to reset our state to wherever the server left us - # before this failed request, and then raise. - self.refresh_upload_state() - raise HttpError.from_response(response) - if response.status_code == RESUME_INCOMPLETE: - last_byte = self._last_byte( - self._get_range_header(response)) - if last_byte + 1 != end: - self.stream.seek(last_byte) - return response - - def _send_media_body(self, start): - """Send the entire stream in a single request. - - Helper for :meth:`stream_file`: - - :type start: int - :param start: start byte of the range. - - :rtype: :class:`google.cloud.streaming.http_wrapper.Response` - :returns: The response from the media upload request. - """ - self._ensure_initialized() - if self.total_size is None: - raise TransferInvalidError( - 'Total size must be known for SendMediaBody') - body_stream = StreamSlice(self.stream, self.total_size - start) - - request = Request(url=self.url, http_method='PUT', body=body_stream) - request.headers['Content-Type'] = self.mime_type - if start == self.total_size: - # End of an upload with 0 bytes left to send; just finalize. - range_string = 'bytes */%s' % self.total_size - else: - range_string = 'bytes %s-%s/%s' % (start, self.total_size - 1, - self.total_size) - - request.headers['Content-Range'] = range_string - - return self._send_media_request(request, self.total_size) - - def _send_chunk(self, start): - """Send a chunk of the stream. - - Helper for :meth:`stream_file`: - - :type start: int - :param start: start byte of the range. - - :rtype: :class:`google.cloud.streaming.http_wrapper.Response` - :returns: The response from the chunked upload request. - """ - self._ensure_initialized() - no_log_body = self.total_size is None - if self.total_size is None: - # For the streaming resumable case, we need to detect when - # we're at the end of the stream. - body_stream = BufferedStream( - self.stream, start, self.chunksize) - end = body_stream.stream_end_position - if body_stream.stream_exhausted: - self._total_size = end - # Here, change body_stream from a stream to a string object, - # which means reading a chunk into memory. This works around - # https://code.google.com/p/httplib2/issues/detail?id=176 which can - # cause httplib2 to skip bytes on 401's for file objects. - body_stream = body_stream.read(self.chunksize) - else: - end = min(start + self.chunksize, self.total_size) - body_stream = StreamSlice(self.stream, end - start) - request = Request(url=self.url, http_method='PUT', body=body_stream) - request.headers['Content-Type'] = self.mime_type - if no_log_body: - # Disable logging of streaming body. - request.loggable_body = '' - if self.total_size is None: - # Streaming resumable upload case, unknown total size. - range_string = 'bytes %s-%s/*' % (start, end - 1) - elif end == start: - # End of an upload with 0 bytes left to send; just finalize. - range_string = 'bytes */%s' % self.total_size - else: - # Normal resumable upload case with known sizes. - range_string = 'bytes %s-%s/%s' % (start, end - 1, self.total_size) - - request.headers['Content-Range'] = range_string - - return self._send_media_request(request, end) diff --git a/packages/google-cloud-core/google/cloud/streaming/util.py b/packages/google-cloud-core/google/cloud/streaming/util.py deleted file mode 100644 index e896052f8a1c..000000000000 --- a/packages/google-cloud-core/google/cloud/streaming/util.py +++ /dev/null @@ -1,74 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Assorted utilities shared between parts of apitools.""" - -import random - - -_MAX_RETRY_WAIT = 60 - - -def calculate_wait_for_retry(retry_attempt): - """Calculate the amount of time to wait before a retry attempt. - - Wait time grows exponentially with the number of attempts. A - random amount of jitter is added to spread out retry attempts from - different clients. - - :type retry_attempt: int - :param retry_attempt: Retry attempt counter. - - :rtype: int - :returns: Number of seconds to wait before retrying request. - """ - wait_time = 2 ** retry_attempt - max_jitter = wait_time / 4.0 - wait_time += random.uniform(-max_jitter, max_jitter) - return max(1, min(wait_time, _MAX_RETRY_WAIT)) - - -def acceptable_mime_type(accept_patterns, mime_type): - """Check that ``mime_type`` matches one of ``accept_patterns``. - - Note that this function assumes that all patterns in accept_patterns - will be simple types of the form "type/subtype", where one or both - of these can be "*". We do not support parameters (i.e. "; q=") in - patterns. - - :type accept_patterns: list of string - :param accept_patterns: acceptable MIME types. - - :type mime_type: str - :param mime_type: the MIME being checked - - :rtype: bool - :returns: True if the supplied MIME type matches at least one of the - patterns, else False. - """ - if '/' not in mime_type: - raise ValueError( - 'Invalid MIME type: "%s"' % mime_type) - unsupported_patterns = [p for p in accept_patterns if ';' in p] - if unsupported_patterns: - raise ValueError( - 'MIME patterns with parameter unsupported: "%s"' % ', '.join( - unsupported_patterns)) - - def _match(pattern, mime_type): - """Return True iff mime_type is acceptable for pattern.""" - return all(accept in ('*', provided) for accept, provided - in zip(pattern.split('/'), mime_type.split('/'))) - - return any(_match(pattern, mime_type) for pattern in accept_patterns) diff --git a/packages/google-cloud-core/tests/unit/streaming/__init__.py b/packages/google-cloud-core/tests/unit/streaming/__init__.py deleted file mode 100644 index 58e0d9153632..000000000000 --- a/packages/google-cloud-core/tests/unit/streaming/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/packages/google-cloud-core/tests/unit/streaming/test_buffered_stream.py b/packages/google-cloud-core/tests/unit/streaming/test_buffered_stream.py deleted file mode 100644 index 797ceea2d280..000000000000 --- a/packages/google-cloud-core/tests/unit/streaming/test_buffered_stream.py +++ /dev/null @@ -1,141 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class Test_BufferedStream(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.streaming.buffered_stream import BufferedStream - - return BufferedStream - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_closed_stream(self): - class _Stream(object): - closed = True - - start = 0 - bufsize = 4 - bufstream = self._make_one(_Stream, start, bufsize) - self.assertIs(bufstream._stream, _Stream) - self.assertEqual(bufstream._start_pos, start) - self.assertEqual(bufstream._buffer_pos, 0) - self.assertEqual(bufstream._buffered_data, b'') - self.assertTrue(bufstream._stream_at_end) - self.assertEqual(bufstream._end_pos, 0) - - def test_ctor_start_zero_longer_than_buffer(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - START = 0 - BUFSIZE = 4 - stream = BytesIO(CONTENT) - bufstream = self._make_one(stream, START, BUFSIZE) - self.assertIs(bufstream._stream, stream) - self.assertEqual(bufstream._start_pos, START) - self.assertEqual(bufstream._buffer_pos, 0) - self.assertEqual(bufstream._buffered_data, CONTENT[:BUFSIZE]) - self.assertEqual(len(bufstream), BUFSIZE) - self.assertFalse(bufstream.stream_exhausted) - self.assertEqual(bufstream.stream_end_position, BUFSIZE) - - def test_ctor_start_nonzero_shorter_than_buffer(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - START = 8 - BUFSIZE = 10 - stream = BytesIO(CONTENT) - stream.read(START) # already consumed - bufstream = self._make_one(stream, START, BUFSIZE) - self.assertIs(bufstream._stream, stream) - self.assertEqual(bufstream._start_pos, START) - self.assertEqual(bufstream._buffer_pos, 0) - self.assertEqual(bufstream._buffered_data, CONTENT[START:]) - self.assertEqual(len(bufstream), len(CONTENT) - START) - self.assertTrue(bufstream.stream_exhausted) - self.assertEqual(bufstream.stream_end_position, len(CONTENT)) - - def test__bytes_remaining_start_zero_longer_than_buffer(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - START = 0 - BUFSIZE = 4 - stream = BytesIO(CONTENT) - bufstream = self._make_one(stream, START, BUFSIZE) - self.assertEqual(bufstream._bytes_remaining, BUFSIZE) - - def test__bytes_remaining_start_zero_shorter_than_buffer(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - START = 8 - BUFSIZE = 10 - stream = BytesIO(CONTENT) - stream.read(START) # already consumed - bufstream = self._make_one(stream, START, BUFSIZE) - self.assertEqual(bufstream._bytes_remaining, len(CONTENT) - START) - - def test_read_w_none(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - START = 0 - BUFSIZE = 4 - stream = BytesIO(CONTENT) - bufstream = self._make_one(stream, START, BUFSIZE) - with self.assertRaises(ValueError): - bufstream.read(None) - - def test_read_w_negative_size(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - START = 0 - BUFSIZE = 4 - stream = BytesIO(CONTENT) - bufstream = self._make_one(stream, START, BUFSIZE) - with self.assertRaises(ValueError): - bufstream.read(-2) - - def test_read_from_start(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - START = 0 - BUFSIZE = 4 - stream = BytesIO(CONTENT) - bufstream = self._make_one(stream, START, BUFSIZE) - self.assertEqual(bufstream.read(4), CONTENT[:4]) - - def test_read_exhausted(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - START = len(CONTENT) - BUFSIZE = 10 - stream = BytesIO(CONTENT) - stream.read(START) # already consumed - bufstream = self._make_one(stream, START, BUFSIZE) - self.assertTrue(bufstream.stream_exhausted) - self.assertEqual(bufstream.stream_end_position, len(CONTENT)) - self.assertEqual(bufstream._bytes_remaining, 0) - self.assertEqual(bufstream.read(10), b'') diff --git a/packages/google-cloud-core/tests/unit/streaming/test_exceptions.py b/packages/google-cloud-core/tests/unit/streaming/test_exceptions.py deleted file mode 100644 index b31c562c8e9d..000000000000 --- a/packages/google-cloud-core/tests/unit/streaming/test_exceptions.py +++ /dev/null @@ -1,105 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class Test_HttpError(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.streaming.exceptions import HttpError - - return HttpError - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): - RESPONSE = {'status': '404'} - CONTENT = b'CONTENT' - URL = 'http://www.example.com' - exception = self._make_one(RESPONSE, CONTENT, URL) - self.assertEqual(exception.response, RESPONSE) - self.assertEqual(exception.content, CONTENT) - self.assertEqual(exception.url, URL) - self.assertEqual(exception.status_code, 404) - self.assertEqual( - str(exception), - "HttpError accessing : " - "response: <{'status': '404'}>, content ") - - def test_from_response(self): - RESPONSE = {'status': '404'} - CONTENT = b'CONTENT' - URL = 'http://www.example.com' - - class _Response(object): - info = RESPONSE - content = CONTENT - request_url = URL - - klass = self._get_target_class() - exception = klass.from_response(_Response()) - self.assertIsInstance(exception, klass) - self.assertEqual(exception.response, RESPONSE) - self.assertEqual(exception.content, CONTENT) - self.assertEqual(exception.url, URL) - - -class Test_RetryAfterError(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.streaming.exceptions import RetryAfterError - - return RetryAfterError - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): - RESPONSE = {'status': '404'} - CONTENT = b'CONTENT' - URL = 'http://www.example.com' - RETRY_AFTER = 60 - exception = self._make_one(RESPONSE, CONTENT, URL, RETRY_AFTER) - self.assertEqual(exception.response, RESPONSE) - self.assertEqual(exception.content, CONTENT) - self.assertEqual(exception.url, URL) - self.assertEqual(exception.retry_after, RETRY_AFTER) - self.assertEqual( - str(exception), - "HttpError accessing : " - "response: <{'status': '404'}>, content ") - - def test_from_response(self): - RESPONSE = {'status': '404'} - CONTENT = b'CONTENT' - URL = 'http://www.example.com' - RETRY_AFTER = 60 - - class _Response(object): - info = RESPONSE - content = CONTENT - request_url = URL - retry_after = RETRY_AFTER - - klass = self._get_target_class() - exception = klass.from_response(_Response()) - self.assertIsInstance(exception, klass) - self.assertEqual(exception.response, RESPONSE) - self.assertEqual(exception.content, CONTENT) - self.assertEqual(exception.url, URL) - self.assertEqual(exception.retry_after, RETRY_AFTER) diff --git a/packages/google-cloud-core/tests/unit/streaming/test_http_wrapper.py b/packages/google-cloud-core/tests/unit/streaming/test_http_wrapper.py deleted file mode 100644 index b0d3156ba42f..000000000000 --- a/packages/google-cloud-core/tests/unit/streaming/test_http_wrapper.py +++ /dev/null @@ -1,498 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class Test__httplib2_debug_level(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.streaming.http_wrapper import _httplib2_debug_level - - return _httplib2_debug_level - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_wo_loggable_body_wo_http(self): - from google.cloud._testing import _Monkey - from google.cloud.streaming import http_wrapper as MUT - - request = _Request() - LEVEL = 1 - _httplib2 = _Dummy(debuglevel=0) - with _Monkey(MUT, httplib2=_httplib2): - with self._make_one(request, LEVEL): - self.assertEqual(_httplib2.debuglevel, 0) - - def test_w_loggable_body_wo_http(self): - from google.cloud._testing import _Monkey - from google.cloud.streaming import http_wrapper as MUT - - request = _Request(loggable_body=object()) - LEVEL = 1 - _httplib2 = _Dummy(debuglevel=0) - with _Monkey(MUT, httplib2=_httplib2): - with self._make_one(request, LEVEL): - self.assertEqual(_httplib2.debuglevel, LEVEL) - self.assertEqual(_httplib2.debuglevel, 0) - - def test_w_loggable_body_w_http(self): - from google.cloud._testing import _Monkey - from google.cloud.streaming import http_wrapper as MUT - - class _Connection(object): - debuglevel = 0 - - def set_debuglevel(self, value): - self.debuglevel = value - - request = _Request(loggable_body=object()) - LEVEL = 1 - _httplib2 = _Dummy(debuglevel=0) - update_me = _Connection() - skip_me = _Connection() - connections = {'update:me': update_me, 'skip_me': skip_me} - _http = _Dummy(connections=connections) - with _Monkey(MUT, httplib2=_httplib2): - with self._make_one(request, LEVEL, _http): - self.assertEqual(_httplib2.debuglevel, LEVEL) - self.assertEqual(update_me.debuglevel, LEVEL) - self.assertEqual(skip_me.debuglevel, 0) - self.assertEqual(_httplib2.debuglevel, 0) - self.assertEqual(update_me.debuglevel, 0) - self.assertEqual(skip_me.debuglevel, 0) - - -class Test_Request(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.streaming.http_wrapper import Request - - return Request - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_defaults(self): - request = self._make_one() - self.assertEqual(request.url, '') - self.assertEqual(request.http_method, 'GET') - self.assertEqual(request.headers, {'content-length': '0'}) - self.assertEqual(request.body, '') - self.assertIsNone(request.loggable_body) - - def test_loggable_body_setter_w_body_None(self): - from google.cloud.streaming.exceptions import RequestError - - request = self._make_one(body=None) - with self.assertRaises(RequestError): - request.loggable_body = 'abc' - - def test_body_setter_w_None(self): - request = self._make_one() - request.loggable_body = 'abc' - request.body = None - self.assertEqual(request.headers, {}) - self.assertIsNone(request.body) - self.assertEqual(request.loggable_body, 'abc') - - def test_body_setter_w_non_string(self): - request = self._make_one() - request.loggable_body = 'abc' - request.body = body = _Dummy(length=123) - self.assertEqual(request.headers, {'content-length': '123'}) - self.assertIs(request.body, body) - self.assertEqual(request.loggable_body, '') - - -class Test_Response(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.streaming.http_wrapper import Response - - return Response - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): - CONTENT = 'CONTENT' - URL = 'http://example.com/api' - info = {'status': '200'} - response = self._make_one(info, CONTENT, URL) - self.assertEqual(len(response), len(CONTENT)) - self.assertEqual(response.status_code, 200) - self.assertIsNone(response.retry_after) - self.assertFalse(response.is_redirect) - - def test_length_w_content_encoding_w_content_range(self): - CONTENT = 'CONTENT' - URL = 'http://example.com/api' - RANGE = 'bytes 0-122/5678' - info = { - 'status': '200', - 'content-length': len(CONTENT), - 'content-encoding': 'testing', - 'content-range': RANGE, - } - response = self._make_one(info, CONTENT, URL) - self.assertEqual(len(response), 123) - - def test_length_w_content_encoding_wo_content_range(self): - CONTENT = 'CONTENT' - URL = 'http://example.com/api' - info = { - 'status': '200', - 'content-length': len(CONTENT), - 'content-encoding': 'testing', - } - response = self._make_one(info, CONTENT, URL) - self.assertEqual(len(response), len(CONTENT)) - - def test_length_w_content_length_w_content_range(self): - CONTENT = 'CONTENT' - URL = 'http://example.com/api' - RANGE = 'bytes 0-12/5678' - info = { - 'status': '200', - 'content-length': len(CONTENT) * 2, - 'content-range': RANGE, - } - response = self._make_one(info, CONTENT, URL) - self.assertEqual(len(response), len(CONTENT) * 2) - - def test_length_wo_content_length_w_content_range(self): - CONTENT = 'CONTENT' - URL = 'http://example.com/api' - RANGE = 'bytes 0-122/5678' - info = { - 'status': '200', - 'content-range': RANGE, - } - response = self._make_one(info, CONTENT, URL) - self.assertEqual(len(response), 123) - - def test_retry_after_w_header(self): - CONTENT = 'CONTENT' - URL = 'http://example.com/api' - info = { - 'status': '200', - 'retry-after': '123', - } - response = self._make_one(info, CONTENT, URL) - self.assertEqual(response.retry_after, 123) - - def test_is_redirect_w_code_wo_location(self): - CONTENT = 'CONTENT' - URL = 'http://example.com/api' - info = { - 'status': '301', - } - response = self._make_one(info, CONTENT, URL) - self.assertFalse(response.is_redirect) - - def test_is_redirect_w_code_w_location(self): - CONTENT = 'CONTENT' - URL = 'http://example.com/api' - info = { - 'status': '301', - 'location': 'http://example.com/other', - } - response = self._make_one(info, CONTENT, URL) - self.assertTrue(response.is_redirect) - - -class Test__check_response(unittest.TestCase): - - def _call_fut(self, *args, **kw): - from google.cloud.streaming.http_wrapper import _check_response - - return _check_response(*args, **kw) - - def test_w_none(self): - from google.cloud.streaming.exceptions import RequestError - - with self.assertRaises(RequestError): - self._call_fut(None) - - def test_w_TOO_MANY_REQUESTS(self): - from google.cloud.streaming.exceptions import BadStatusCodeError - from google.cloud.streaming.http_wrapper import TOO_MANY_REQUESTS - - with self.assertRaises(BadStatusCodeError): - self._call_fut(_Response(TOO_MANY_REQUESTS)) - - def test_w_50x(self): - from google.cloud.streaming.exceptions import BadStatusCodeError - - with self.assertRaises(BadStatusCodeError): - self._call_fut(_Response(500)) - - with self.assertRaises(BadStatusCodeError): - self._call_fut(_Response(503)) - - def test_w_retry_after(self): - from google.cloud.streaming.exceptions import RetryAfterError - - with self.assertRaises(RetryAfterError): - self._call_fut(_Response(200, 20)) - - def test_pass(self): - self._call_fut(_Response(200)) - - -class Test__reset_http_connections(unittest.TestCase): - - def _call_fut(self, *args, **kw): - from google.cloud.streaming.http_wrapper import _reset_http_connections - - return _reset_http_connections(*args, **kw) - - def test_wo_connections(self): - http = object() - self._call_fut(http) - - def test_w_connections(self): - connections = {'delete:me': object(), 'skip_me': object()} - http = _Dummy(connections=connections) - self._call_fut(http) - self.assertFalse('delete:me' in connections) - self.assertTrue('skip_me' in connections) - - -class Test___make_api_request_no_retry(unittest.TestCase): - - def _call_fut(self, *args, **kw): - from google.cloud.streaming.http_wrapper import ( - _make_api_request_no_retry) - return _make_api_request_no_retry(*args, **kw) - - def _verify_requested(self, http, request, - redirections=5, connection_type=None): - self.assertEqual(len(http._requested), 1) - url, kw = http._requested[0] - self.assertEqual(url, request.url) - self.assertEqual(kw['method'], request.http_method) - self.assertEqual(kw['body'], request.body) - self.assertEqual(kw['headers'], request.headers) - self.assertEqual(kw['redirections'], redirections) - self.assertEqual(kw['connection_type'], connection_type) - - def test_defaults_wo_connections(self): - from google.cloud._testing import _Monkey - from google.cloud.streaming import http_wrapper as MUT - - INFO = {'status': '200'} - CONTENT = 'CONTENT' - _http = _Http((INFO, CONTENT)) - _httplib2 = _Dummy(debuglevel=1) - _request = _Request() - _checked = [] - with _Monkey(MUT, httplib2=_httplib2, - _check_response=_checked.append): - response = self._call_fut(_http, _request) - - self.assertIsInstance(response, MUT.Response) - self.assertEqual(response.info, INFO) - self.assertEqual(response.content, CONTENT) - self.assertEqual(response.request_url, _request.url) - self.assertEqual(_checked, [response]) - self._verify_requested(_http, _request) - - def test_w_http_connections_miss(self): - from google.cloud._testing import _Monkey - from google.cloud.streaming import http_wrapper as MUT - - INFO = {'status': '200'} - CONTENT = 'CONTENT' - CONN_TYPE = object() - _http = _Http((INFO, CONTENT)) - _http.connections = {'https': CONN_TYPE} - _httplib2 = _Dummy(debuglevel=1) - _request = _Request() - _checked = [] - with _Monkey(MUT, httplib2=_httplib2, - _check_response=_checked.append): - response = self._call_fut(_http, _request) - - self.assertIsInstance(response, MUT.Response) - self.assertEqual(response.info, INFO) - self.assertEqual(response.content, CONTENT) - self.assertEqual(response.request_url, _request.url) - self.assertEqual(_checked, [response]) - self._verify_requested(_http, _request) - - def test_w_http_connections_hit(self): - from google.cloud._testing import _Monkey - from google.cloud.streaming import http_wrapper as MUT - - INFO = {'status': '200'} - CONTENT = 'CONTENT' - CONN_TYPE = object() - _http = _Http((INFO, CONTENT)) - _http.connections = {'http': CONN_TYPE} - _httplib2 = _Dummy(debuglevel=1) - _request = _Request() - _checked = [] - with _Monkey(MUT, httplib2=_httplib2, - _check_response=_checked.append): - response = self._call_fut(_http, _request) - - self.assertIsInstance(response, MUT.Response) - self.assertEqual(response.info, INFO) - self.assertEqual(response.content, CONTENT) - self.assertEqual(response.request_url, _request.url) - self.assertEqual(_checked, [response]) - self._verify_requested(_http, _request, connection_type=CONN_TYPE) - - def test_w_request_returning_None(self): - from google.cloud._testing import _Monkey - from google.cloud.streaming import http_wrapper as MUT - from google.cloud.streaming.exceptions import RequestError - - INFO = None - CONTENT = None - CONN_TYPE = object() - _http = _Http((INFO, CONTENT)) - _http.connections = {'http': CONN_TYPE} - _httplib2 = _Dummy(debuglevel=1) - _request = _Request() - with _Monkey(MUT, httplib2=_httplib2): - with self.assertRaises(RequestError): - self._call_fut(_http, _request) - self._verify_requested(_http, _request, connection_type=CONN_TYPE) - - -class Test_make_api_request(unittest.TestCase): - - def _call_fut(self, *args, **kw): - from google.cloud.streaming.http_wrapper import make_api_request - - return make_api_request(*args, **kw) - - def test_wo_exception(self): - from google.cloud.streaming import http_wrapper as MUT - from google.cloud._testing import _Monkey - - HTTP, REQUEST, RESPONSE = object(), object(), object() - _created, _checked = [], [] - - def _wo_exception(*args, **kw): - _created.append((args, kw)) - return RESPONSE - - with _Monkey(MUT, _make_api_request_no_retry=_wo_exception, - _check_response=_checked.append): - response = self._call_fut(HTTP, REQUEST) - - self.assertIs(response, RESPONSE) - expected_kw = {'redirections': MUT._REDIRECTIONS} - self.assertEqual(_created, [((HTTP, REQUEST), expected_kw)]) - self.assertEqual(_checked, []) # not called by '_wo_exception' - - def test_w_exceptions_lt_max_retries(self): - from google.cloud.streaming.exceptions import RetryAfterError - from google.cloud.streaming import http_wrapper as MUT - from google.cloud._testing import _Monkey - - HTTP, RESPONSE = object(), object() - REQUEST = _Request() - _created, _checked = [], [] - _counter = [None] * 4 - - def _wo_exception(*args, **kw): - _created.append((args, kw)) - if _counter: - _counter.pop() - raise RetryAfterError(RESPONSE, '', REQUEST.url, 0.1) - return RESPONSE - - with _Monkey(MUT, _make_api_request_no_retry=_wo_exception, - _check_response=_checked.append): - response = self._call_fut(HTTP, REQUEST, retries=5) - - self.assertIs(response, RESPONSE) - self.assertEqual(len(_created), 5) - expected_kw = {'redirections': MUT._REDIRECTIONS} - for attempt in _created: - self.assertEqual(attempt, ((HTTP, REQUEST), expected_kw)) - self.assertEqual(_checked, []) # not called by '_wo_exception' - - def test_w_exceptions_gt_max_retries(self): - from google.cloud._testing import _Monkey - from google.cloud.streaming import http_wrapper as MUT - - HTTP = object() - REQUEST = _Request() - _created, _checked = [], [] - - def _wo_exception(*args, **kw): - _created.append((args, kw)) - raise ValueError('Retryable') - - with _Monkey(MUT, calculate_wait_for_retry=lambda *ignored: 0.1, - _make_api_request_no_retry=_wo_exception, - _check_response=_checked.append): - with self.assertRaises(ValueError): - self._call_fut(HTTP, REQUEST, retries=3) - - self.assertEqual(len(_created), 3) - expected_kw = {'redirections': MUT._REDIRECTIONS} - for attempt in _created: - self.assertEqual(attempt, ((HTTP, REQUEST), expected_kw)) - self.assertEqual(_checked, []) # not called by '_wo_exception' - - -class _Dummy(object): - def __init__(self, **kw): - self.__dict__.update(kw) - - -class _Request(object): - __slots__ = ('url', 'http_method', 'body', 'headers', 'loggable_body',) - URL = 'http://example.com/api' - - def __init__(self, url=URL, http_method='GET', body='', - loggable_body=None): - self.url = url - self.http_method = http_method - self.body = body - self.headers = {} - self.loggable_body = loggable_body - - -class _Response(object): - content = '' - request_url = _Request.URL - - def __init__(self, status_code, retry_after=None): - self.info = {'status': status_code} - self.status_code = status_code - self.retry_after = retry_after - - -class _Http(object): - - def __init__(self, *responses): - self._responses = responses - self._requested = [] - - def request(self, url, **kw): - self._requested.append((url, kw)) - response, self._responses = self._responses[0], self._responses[1:] - return response diff --git a/packages/google-cloud-core/tests/unit/streaming/test_stream_slice.py b/packages/google-cloud-core/tests/unit/streaming/test_stream_slice.py deleted file mode 100644 index 47820078447d..000000000000 --- a/packages/google-cloud-core/tests/unit/streaming/test_stream_slice.py +++ /dev/null @@ -1,90 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class Test_StreamSlice(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.streaming.stream_slice import StreamSlice - - return StreamSlice - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - MAXSIZE = 4 - stream = BytesIO(CONTENT) - stream_slice = self._make_one(stream, MAXSIZE) - self.assertIs(stream_slice._stream, stream) - self.assertEqual(stream_slice._remaining_bytes, MAXSIZE) - self.assertEqual(stream_slice._max_bytes, MAXSIZE) - self.assertEqual(len(stream_slice), MAXSIZE) - self.assertEqual(stream_slice.length, MAXSIZE) - - def test___nonzero___empty(self): - from io import BytesIO - - CONTENT = b'' - MAXSIZE = 0 - stream = BytesIO(CONTENT) - stream_slice = self._make_one(stream, MAXSIZE) - self.assertFalse(stream_slice) - - def test___nonzero___nonempty(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - MAXSIZE = 4 - stream = BytesIO(CONTENT) - stream_slice = self._make_one(stream, MAXSIZE) - self.assertTrue(stream_slice) - - def test_read_exhausted(self): - from io import BytesIO - from six.moves import http_client - - CONTENT = b'' - MAXSIZE = 4 - stream = BytesIO(CONTENT) - stream_slice = self._make_one(stream, MAXSIZE) - with self.assertRaises(http_client.IncompleteRead): - stream_slice.read() - - def test_read_implicit_size(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - MAXSIZE = 4 - stream = BytesIO(CONTENT) - stream_slice = self._make_one(stream, MAXSIZE) - self.assertEqual(stream_slice.read(), CONTENT[:MAXSIZE]) - self.assertEqual(stream_slice._remaining_bytes, 0) - - def test_read_explicit_size(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - MAXSIZE = 4 - SIZE = 3 - stream = BytesIO(CONTENT) - stream_slice = self._make_one(stream, MAXSIZE) - self.assertEqual(stream_slice.read(SIZE), CONTENT[:SIZE]) - self.assertEqual(stream_slice._remaining_bytes, MAXSIZE - SIZE) diff --git a/packages/google-cloud-core/tests/unit/streaming/test_transfer.py b/packages/google-cloud-core/tests/unit/streaming/test_transfer.py deleted file mode 100644 index 8bafd4a1cc47..000000000000 --- a/packages/google-cloud-core/tests/unit/streaming/test_transfer.py +++ /dev/null @@ -1,2035 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class Test__Transfer(unittest.TestCase): - URL = 'http://example.com/api' - - @staticmethod - def _get_target_class(): - from google.cloud.streaming.transfer import _Transfer - - return _Transfer - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_defaults(self): - from google.cloud.streaming.transfer import _DEFAULT_CHUNKSIZE - - stream = _Stream() - xfer = self._make_one(stream) - self.assertIs(xfer.stream, stream) - self.assertFalse(xfer.close_stream) - self.assertEqual(xfer.chunksize, _DEFAULT_CHUNKSIZE) - self.assertTrue(xfer.auto_transfer) - self.assertIsNone(xfer.bytes_http) - self.assertIsNone(xfer.http) - self.assertEqual(xfer.num_retries, 5) - self.assertIsNone(xfer.url) - self.assertFalse(xfer.initialized) - - def test_ctor_explicit(self): - stream = _Stream() - HTTP = object() - CHUNK_SIZE = 1 << 18 - NUM_RETRIES = 8 - xfer = self._make_one(stream, - close_stream=True, - chunksize=CHUNK_SIZE, - auto_transfer=False, - http=HTTP, - num_retries=NUM_RETRIES) - self.assertIs(xfer.stream, stream) - self.assertTrue(xfer.close_stream) - self.assertEqual(xfer.chunksize, CHUNK_SIZE) - self.assertFalse(xfer.auto_transfer) - self.assertIs(xfer.bytes_http, HTTP) - self.assertIs(xfer.http, HTTP) - self.assertEqual(xfer.num_retries, NUM_RETRIES) - - def test_bytes_http_fallback_to_http(self): - stream = _Stream() - HTTP = object() - xfer = self._make_one(stream, http=HTTP) - self.assertIs(xfer.bytes_http, HTTP) - - def test_bytes_http_setter(self): - stream = _Stream() - HTTP = object() - BYTES_HTTP = object() - xfer = self._make_one(stream, http=HTTP) - xfer.bytes_http = BYTES_HTTP - self.assertIs(xfer.bytes_http, BYTES_HTTP) - - def test_num_retries_setter_invalid(self): - stream = _Stream() - xfer = self._make_one(stream) - with self.assertRaises(ValueError): - xfer.num_retries = object() - - def test_num_retries_setter_negative(self): - stream = _Stream() - xfer = self._make_one(stream) - with self.assertRaises(ValueError): - xfer.num_retries = -1 - - def test__initialize_not_already_initialized_w_http(self): - HTTP = object() - stream = _Stream() - xfer = self._make_one(stream) - xfer._initialize(HTTP, self.URL) - self.assertTrue(xfer.initialized) - self.assertIs(xfer.http, HTTP) - self.assertIs(xfer.url, self.URL) - - def test__initialize_not_already_initialized_wo_http(self): - from httplib2 import Http - - stream = _Stream() - xfer = self._make_one(stream) - xfer._initialize(None, self.URL) - self.assertTrue(xfer.initialized) - self.assertIsInstance(xfer.http, Http) - self.assertIs(xfer.url, self.URL) - - def test__initialize_w_existing_http(self): - HTTP_1, HTTP_2 = object(), object() - stream = _Stream() - xfer = self._make_one(stream, http=HTTP_1) - xfer._initialize(HTTP_2, self.URL) - self.assertTrue(xfer.initialized) - self.assertIs(xfer.http, HTTP_1) - self.assertIs(xfer.url, self.URL) - - def test__initialize_already_initialized(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - URL_2 = 'http://example.com/other' - HTTP_1, HTTP_2 = object(), object() - stream = _Stream() - xfer = self._make_one(stream) - xfer._initialize(HTTP_1, self.URL) - with self.assertRaises(TransferInvalidError): - xfer._initialize(HTTP_2, URL_2) - - def test__ensure_initialized_hit(self): - HTTP = object() - stream = _Stream() - xfer = self._make_one(stream) - xfer._initialize(HTTP, self.URL) - xfer._ensure_initialized() # no raise - - def test__ensure_initialized_miss(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - stream = _Stream() - xfer = self._make_one(stream) - with self.assertRaises(TransferInvalidError): - xfer._ensure_initialized() - - def test__ensure_uninitialized_hit(self): - stream = _Stream() - xfer = self._make_one(stream) - xfer._ensure_uninitialized() # no raise - - def test__ensure_uninitialized_miss(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - stream = _Stream() - HTTP = object() - xfer = self._make_one(stream) - xfer._initialize(HTTP, self.URL) - with self.assertRaises(TransferInvalidError): - xfer._ensure_uninitialized() - - def test___del___closes_stream(self): - - stream = _Stream() - xfer = self._make_one(stream, close_stream=True) - - self.assertFalse(stream._closed) - del xfer - self.assertTrue(stream._closed) - - -class Test_Download(unittest.TestCase): - URL = "http://example.com/api" - - @staticmethod - def _get_target_class(): - from google.cloud.streaming.transfer import Download - - return Download - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_defaults(self): - stream = _Stream() - download = self._make_one(stream) - self.assertIs(download.stream, stream) - self.assertIsNone(download._initial_response) - self.assertEqual(download.progress, 0) - self.assertIsNone(download.total_size) - self.assertIsNone(download.encoding) - - def test_ctor_w_kwds(self): - stream = _Stream() - CHUNK_SIZE = 123 - download = self._make_one(stream, chunksize=CHUNK_SIZE) - self.assertIs(download.stream, stream) - self.assertEqual(download.chunksize, CHUNK_SIZE) - - def test_ctor_w_total_size(self): - stream = _Stream() - SIZE = 123 - download = self._make_one(stream, total_size=SIZE) - self.assertIs(download.stream, stream) - self.assertEqual(download.total_size, SIZE) - - def test_from_file_w_existing_file_no_override(self): - import os - from google.cloud._testing import _tempdir - - klass = self._get_target_class() - with _tempdir() as tempdir: - filename = os.path.join(tempdir, 'file.out') - with open(filename, 'w') as fileobj: - fileobj.write('EXISTING FILE') - with self.assertRaises(ValueError): - klass.from_file(filename) - - def test_from_file_w_existing_file_w_override_wo_auto_transfer(self): - import os - from google.cloud._testing import _tempdir - - klass = self._get_target_class() - with _tempdir() as tempdir: - filename = os.path.join(tempdir, 'file.out') - with open(filename, 'w') as fileobj: - fileobj.write('EXISTING FILE') - download = klass.from_file(filename, overwrite=True, - auto_transfer=False) - self.assertFalse(download.auto_transfer) - del download # closes stream - with open(filename, 'rb') as fileobj: - self.assertEqual(fileobj.read(), b'') - - def test_from_stream_defaults(self): - stream = _Stream() - klass = self._get_target_class() - download = klass.from_stream(stream) - self.assertIs(download.stream, stream) - self.assertTrue(download.auto_transfer) - self.assertIsNone(download.total_size) - - def test_from_stream_explicit(self): - CHUNK_SIZE = 1 << 18 - SIZE = 123 - stream = _Stream() - klass = self._get_target_class() - download = klass.from_stream(stream, auto_transfer=False, - total_size=SIZE, chunksize=CHUNK_SIZE) - self.assertIs(download.stream, stream) - self.assertFalse(download.auto_transfer) - self.assertEqual(download.total_size, SIZE) - self.assertEqual(download.chunksize, CHUNK_SIZE) - - def test_configure_request(self): - CHUNK_SIZE = 100 - download = self._make_one(_Stream(), chunksize=CHUNK_SIZE) - request = _Dummy(headers={}) - url_builder = _Dummy(query_params={}) - download.configure_request(request, url_builder) - self.assertEqual(request.headers, {'Range': 'bytes=0-99'}) - self.assertEqual(url_builder.query_params, {'alt': 'media'}) - - def test__set_total_wo_content_range_wo_existing_total(self): - info = {} - download = self._make_one(_Stream()) - download._set_total(info) - self.assertEqual(download.total_size, 0) - - def test__set_total_wo_content_range_w_existing_total(self): - SIZE = 123 - info = {} - download = self._make_one(_Stream(), total_size=SIZE) - download._set_total(info) - self.assertEqual(download.total_size, SIZE) - - def test__set_total_w_content_range_w_existing_total(self): - SIZE = 123 - info = {'content-range': 'bytes 123-234/4567'} - download = self._make_one(_Stream(), total_size=SIZE) - download._set_total(info) - self.assertEqual(download.total_size, 4567) - - def test__set_total_w_content_range_w_asterisk_total(self): - info = {'content-range': 'bytes 123-234/*'} - download = self._make_one(_Stream()) - download._set_total(info) - self.assertEqual(download.total_size, 0) - - def test_initialize_download_already_initialized(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - request = _Request() - download = self._make_one(_Stream()) - download._initialize(None, self.URL) - with self.assertRaises(TransferInvalidError): - download.initialize_download(request, http=object()) - - def test_initialize_download_wo_autotransfer(self): - request = _Request() - http = object() - download = self._make_one(_Stream(), auto_transfer=False) - download.initialize_download(request, http) - self.assertIs(download.http, http) - self.assertEqual(download.url, request.url) - - def test_initialize_download_w_autotransfer_failing(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.exceptions import HttpError - - request = _Request() - http = object() - download = self._make_one(_Stream(), auto_transfer=True) - - response = _makeResponse(http_client.BAD_REQUEST) - requester = _MakeRequest(response) - - with _Monkey(MUT, make_api_request=requester): - with self.assertRaises(HttpError): - download.initialize_download(request, http) - - self.assertTrue(len(requester._requested), 1) - self.assertIs(requester._requested[0][0], request) - - def test_initialize_download_w_autotransfer_w_content_location(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - - REDIRECT_URL = 'http://example.com/other' - request = _Request() - http = object() - info = {'content-location': REDIRECT_URL} - download = self._make_one(_Stream(), auto_transfer=True) - - response = _makeResponse(http_client.NO_CONTENT, info) - requester = _MakeRequest(response) - - with _Monkey(MUT, make_api_request=requester): - download.initialize_download(request, http) - - self.assertIsNone(download._initial_response) - self.assertEqual(download.total_size, 0) - self.assertIs(download.http, http) - self.assertEqual(download.url, REDIRECT_URL) - self.assertTrue(len(requester._requested), 1) - self.assertIs(requester._requested[0][0], request) - - def test__normalize_start_end_w_end_w_start_lt_0(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - download = self._make_one(_Stream()) - - with self.assertRaises(TransferInvalidError): - download._normalize_start_end(-1, 0) - - def test__normalize_start_end_w_end_w_start_gt_total(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - download = self._make_one(_Stream()) - download._set_total({'content-range': 'bytes 0-1/2'}) - - with self.assertRaises(TransferInvalidError): - download._normalize_start_end(3, 0) - - def test__normalize_start_end_w_end_lt_start(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - download = self._make_one(_Stream()) - download._set_total({'content-range': 'bytes 0-1/2'}) - - with self.assertRaises(TransferInvalidError): - download._normalize_start_end(1, 0) - - def test__normalize_start_end_w_end_gt_start(self): - download = self._make_one(_Stream()) - download._set_total({'content-range': 'bytes 0-1/2'}) - self.assertEqual(download._normalize_start_end(1, 2), (1, 1)) - - def test__normalize_start_end_wo_end_w_start_lt_0(self): - download = self._make_one(_Stream()) - download._set_total({'content-range': 'bytes 0-1/2'}) - self.assertEqual(download._normalize_start_end(-2), (0, 1)) - self.assertEqual(download._normalize_start_end(-1), (1, 1)) - - def test__normalize_start_end_wo_end_w_start_ge_0(self): - download = self._make_one(_Stream()) - download._set_total({'content-range': 'bytes 0-1/100'}) - self.assertEqual(download._normalize_start_end(0), (0, 99)) - self.assertEqual(download._normalize_start_end(1), (1, 99)) - - def test__set_range_header_w_start_lt_0(self): - request = _Request() - download = self._make_one(_Stream()) - download._set_range_header(request, -1) - self.assertEqual(request.headers['range'], 'bytes=-1') - - def test__set_range_header_w_start_ge_0_wo_end(self): - request = _Request() - download = self._make_one(_Stream()) - download._set_range_header(request, 0) - self.assertEqual(request.headers['range'], 'bytes=0-') - - def test__set_range_header_w_start_ge_0_w_end(self): - request = _Request() - download = self._make_one(_Stream()) - download._set_range_header(request, 0, 1) - self.assertEqual(request.headers['range'], 'bytes=0-1') - - def test__compute_end_byte_w_start_lt_0_w_end(self): - download = self._make_one(_Stream()) - self.assertEqual(download._compute_end_byte(-1, 1), 1) - - def test__compute_end_byte_w_start_ge_0_wo_end_w_use_chunks(self): - CHUNK_SIZE = 5 - download = self._make_one(_Stream(), chunksize=CHUNK_SIZE) - self.assertEqual(download._compute_end_byte(0, use_chunks=True), 4) - - def test__compute_end_byte_w_start_ge_0_w_end_w_use_chunks(self): - CHUNK_SIZE = 5 - download = self._make_one(_Stream(), chunksize=CHUNK_SIZE) - self.assertEqual(download._compute_end_byte(0, 3, use_chunks=True), 3) - self.assertEqual(download._compute_end_byte(0, 5, use_chunks=True), 4) - - def test__compute_end_byte_w_start_ge_0_w_end_w_total_size(self): - CHUNK_SIZE = 50 - download = self._make_one(_Stream(), chunksize=CHUNK_SIZE) - download._set_total({'content-range': 'bytes 0-1/10'}) - self.assertEqual(download._compute_end_byte(0, 100, use_chunks=False), - 9) - self.assertEqual(download._compute_end_byte(0, 8, use_chunks=False), 8) - - def test__compute_end_byte_w_start_ge_0_wo_end_w_total_size(self): - CHUNK_SIZE = 50 - download = self._make_one(_Stream(), chunksize=CHUNK_SIZE) - download._set_total({'content-range': 'bytes 0-1/10'}) - self.assertEqual(download._compute_end_byte(0, use_chunks=False), 9) - - def test__get_chunk_not_initialized(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - download = self._make_one(_Stream()) - - with self.assertRaises(TransferInvalidError): - download._get_chunk(0, 10) - - def test__get_chunk(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - - http = object() - download = self._make_one(_Stream()) - download._initialize(http, self.URL) - response = _makeResponse(http_client.OK) - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - found = download._get_chunk(0, 10) - - self.assertIs(found, response) - self.assertTrue(len(requester._requested), 1) - request = requester._requested[0][0] - self.assertEqual(request.headers['range'], 'bytes=0-10') - - def test__process_response_w_FORBIDDEN(self): - from google.cloud.streaming.exceptions import HttpError - from six.moves import http_client - - download = self._make_one(_Stream()) - response = _makeResponse(http_client.FORBIDDEN) - with self.assertRaises(HttpError): - download._process_response(response) - - def test__process_response_w_NOT_FOUND(self): - from google.cloud.streaming.exceptions import HttpError - from six.moves import http_client - - download = self._make_one(_Stream()) - response = _makeResponse(http_client.NOT_FOUND) - with self.assertRaises(HttpError): - download._process_response(response) - - def test__process_response_w_other_error(self): - from google.cloud.streaming.exceptions import TransferRetryError - from six.moves import http_client - - download = self._make_one(_Stream()) - response = _makeResponse(http_client.BAD_REQUEST) - with self.assertRaises(TransferRetryError): - download._process_response(response) - - def test__process_response_w_OK_wo_encoding(self): - from six.moves import http_client - - stream = _Stream() - download = self._make_one(stream) - response = _makeResponse(http_client.OK, content='OK') - found = download._process_response(response) - self.assertIs(found, response) - self.assertEqual(stream._written, ['OK']) - self.assertEqual(download.progress, 2) - self.assertIsNone(download.encoding) - - def test__process_response_w_PARTIAL_CONTENT_w_encoding(self): - from six.moves import http_client - - stream = _Stream() - download = self._make_one(stream) - info = {'content-encoding': 'blah'} - response = _makeResponse(http_client.OK, info, 'PARTIAL') - found = download._process_response(response) - self.assertIs(found, response) - self.assertEqual(stream._written, ['PARTIAL']) - self.assertEqual(download.progress, 7) - self.assertEqual(download.encoding, 'blah') - - def test__process_response_w_REQUESTED_RANGE_NOT_SATISFIABLE(self): - from six.moves import http_client - - stream = _Stream() - download = self._make_one(stream) - response = _makeResponse( - http_client.REQUESTED_RANGE_NOT_SATISFIABLE) - found = download._process_response(response) - self.assertIs(found, response) - self.assertEqual(stream._written, []) - self.assertEqual(download.progress, 0) - self.assertIsNone(download.encoding) - - def test__process_response_w_NO_CONTENT(self): - from six.moves import http_client - - stream = _Stream() - download = self._make_one(stream) - response = _makeResponse(status_code=http_client.NO_CONTENT) - found = download._process_response(response) - self.assertIs(found, response) - self.assertEqual(stream._written, ['']) - self.assertEqual(download.progress, 0) - self.assertIsNone(download.encoding) - - def test_get_range_not_initialized(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - download = self._make_one(_Stream()) - with self.assertRaises(TransferInvalidError): - download.get_range(0, 10) - - def test_get_range_wo_total_size_complete(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - REQ_RANGE = 'bytes=0-%d' % (LEN,) - RESP_RANGE = 'bytes 0-%d/%d' % (LEN - 1, LEN) - http = object() - stream = _Stream() - download = self._make_one(stream) - download._initialize(http, self.URL) - info = {'content-range': RESP_RANGE} - response = _makeResponse(http_client.OK, info, CONTENT) - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - download.get_range(0, LEN) - - self.assertTrue(len(requester._requested), 1) - request = requester._requested[0][0] - self.assertEqual(request.headers, {'range': REQ_RANGE}) - self.assertEqual(stream._written, [CONTENT]) - self.assertEqual(download.total_size, LEN) - - def test_get_range_wo_total_size_wo_end(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - START = 5 - CHUNK_SIZE = 123 - REQ_RANGE = 'bytes=%d-%d' % (START, START + CHUNK_SIZE - 1,) - RESP_RANGE = 'bytes %d-%d/%d' % (START, LEN - 1, LEN) - http = object() - stream = _Stream() - download = self._make_one(stream, chunksize=CHUNK_SIZE) - download._initialize(http, self.URL) - info = {'content-range': RESP_RANGE} - response = _makeResponse(http_client.OK, info, CONTENT[START:]) - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - download.get_range(START) - - self.assertTrue(len(requester._requested), 1) - request = requester._requested[0][0] - self.assertEqual(request.headers, {'range': REQ_RANGE}) - self.assertEqual(stream._written, [CONTENT[START:]]) - self.assertEqual(download.total_size, LEN) - - def test_get_range_w_total_size_partial(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - PARTIAL_LEN = 5 - REQ_RANGE = 'bytes=0-%d' % (PARTIAL_LEN,) - RESP_RANGE = 'bytes 0-%d/%d' % (PARTIAL_LEN, LEN,) - http = object() - stream = _Stream() - download = self._make_one(stream, total_size=LEN) - download._initialize(http, self.URL) - info = {'content-range': RESP_RANGE} - response = _makeResponse(http_client.OK, info, CONTENT[:PARTIAL_LEN]) - response.length = LEN - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - download.get_range(0, PARTIAL_LEN) - - self.assertTrue(len(requester._requested), 1) - request = requester._requested[0][0] - self.assertEqual(request.headers, {'range': REQ_RANGE}) - self.assertEqual(stream._written, [CONTENT[:PARTIAL_LEN]]) - self.assertEqual(download.total_size, LEN) - - def test_get_range_w_empty_chunk(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.exceptions import TransferRetryError - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - START = 5 - CHUNK_SIZE = 123 - REQ_RANGE = 'bytes=%d-%d' % (START, START + CHUNK_SIZE - 1,) - RESP_RANGE = 'bytes %d-%d/%d' % (START, LEN - 1, LEN) - http = object() - stream = _Stream() - download = self._make_one(stream, chunksize=CHUNK_SIZE) - download._initialize(http, self.URL) - info = {'content-range': RESP_RANGE} - response = _makeResponse(http_client.OK, info) - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - with self.assertRaises(TransferRetryError): - download.get_range(START) - - self.assertTrue(len(requester._requested), 1) - request = requester._requested[0][0] - self.assertEqual(request.headers, {'range': REQ_RANGE}) - self.assertEqual(stream._written, ['']) - self.assertEqual(download.total_size, LEN) - - def test_get_range_w_total_size_wo_use_chunks(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - CHUNK_SIZE = 3 - REQ_RANGE = 'bytes=0-%d' % (LEN - 1,) - RESP_RANGE = 'bytes 0-%d/%d' % (LEN - 1, LEN,) - http = object() - stream = _Stream() - download = self._make_one(stream, total_size=LEN, chunksize=CHUNK_SIZE) - download._initialize(http, self.URL) - info = {'content-range': RESP_RANGE} - response = _makeResponse(http_client.OK, info, CONTENT) - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - download.get_range(0, use_chunks=False) - - self.assertTrue(len(requester._requested), 1) - request = requester._requested[0][0] - self.assertEqual(request.headers, {'range': REQ_RANGE}) - self.assertEqual(stream._written, [CONTENT]) - self.assertEqual(download.total_size, LEN) - - def test_get_range_w_multiple_chunks(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - - CONTENT = b'ABCDE' - LEN = len(CONTENT) - CHUNK_SIZE = 3 - REQ_RANGE_1 = 'bytes=0-%d' % (CHUNK_SIZE - 1,) - RESP_RANGE_1 = 'bytes 0-%d/%d' % (CHUNK_SIZE - 1, LEN) - REQ_RANGE_2 = 'bytes=%d-%d' % (CHUNK_SIZE, LEN - 1) - RESP_RANGE_2 = 'bytes %d-%d/%d' % (CHUNK_SIZE, LEN - 1, LEN) - http = object() - stream = _Stream() - download = self._make_one(stream, chunksize=CHUNK_SIZE) - download._initialize(http, self.URL) - info_1 = {'content-range': RESP_RANGE_1} - response_1 = _makeResponse(http_client.PARTIAL_CONTENT, info_1, - CONTENT[:CHUNK_SIZE]) - info_2 = {'content-range': RESP_RANGE_2} - response_2 = _makeResponse(http_client.OK, info_2, - CONTENT[CHUNK_SIZE:]) - requester = _MakeRequest(response_1, response_2) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - download.get_range(0) - - self.assertTrue(len(requester._requested), 2) - request_1 = requester._requested[0][0] - self.assertEqual(request_1.headers, {'range': REQ_RANGE_1}) - request_2 = requester._requested[1][0] - self.assertEqual(request_2.headers, {'range': REQ_RANGE_2}) - self.assertEqual(stream._written, [b'ABC', b'DE']) - self.assertEqual(download.total_size, LEN) - - def test_stream_file_not_initialized(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - download = self._make_one(_Stream()) - - with self.assertRaises(TransferInvalidError): - download.stream_file() - - def test_stream_file_w_initial_response_complete(self): - from six.moves import http_client - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - RESP_RANGE = 'bytes 0-%d/%d' % (LEN - 1, LEN,) - stream = _Stream() - download = self._make_one(stream, total_size=LEN) - info = {'content-range': RESP_RANGE} - download._initial_response = _makeResponse( - http_client.OK, info, CONTENT) - http = object() - download._initialize(http, _Request.URL) - - download.stream_file() - - self.assertEqual(stream._written, [CONTENT]) - self.assertEqual(download.total_size, LEN) - - def test_stream_file_w_initial_response_incomplete(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - - CHUNK_SIZE = 3 - CONTENT = b'ABCDEF' - LEN = len(CONTENT) - RESP_RANGE_1 = 'bytes 0-%d/%d' % (CHUNK_SIZE - 1, LEN,) - REQ_RANGE_2 = 'bytes=%d-%d' % (CHUNK_SIZE, LEN - 1) - RESP_RANGE_2 = 'bytes %d-%d/%d' % (CHUNK_SIZE, LEN - 1, LEN,) - stream = _Stream() - http = object() - download = self._make_one(stream, chunksize=CHUNK_SIZE) - info_1 = {'content-range': RESP_RANGE_1} - download._initial_response = _makeResponse( - http_client.PARTIAL_CONTENT, info_1, CONTENT[:CHUNK_SIZE]) - info_2 = {'content-range': RESP_RANGE_2} - response_2 = _makeResponse( - http_client.OK, info_2, CONTENT[CHUNK_SIZE:]) - requester = _MakeRequest(response_2) - - download._initialize(http, _Request.URL) - - request = _Request() - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - download.stream_file() - - self.assertTrue(len(requester._requested), 1) - request = requester._requested[0][0] - self.assertEqual(request.headers, {'range': REQ_RANGE_2}) - self.assertEqual(stream._written, - [CONTENT[:CHUNK_SIZE], CONTENT[CHUNK_SIZE:]]) - self.assertEqual(download.total_size, LEN) - - def test_stream_file_wo_initial_response_wo_total_size(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - CHUNK_SIZE = 123 - REQ_RANGE = 'bytes=0-%d' % (CHUNK_SIZE - 1) - RESP_RANGE = 'bytes 0-%d/%d' % (LEN - 1, LEN,) - stream = _Stream() - http = object() - download = self._make_one(stream, chunksize=CHUNK_SIZE) - info = {'content-range': RESP_RANGE} - response = _makeResponse(http_client.OK, info, CONTENT) - requester = _MakeRequest(response) - download._initialize(http, _Request.URL) - - request = _Request() - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - download.stream_file() - - self.assertTrue(len(requester._requested), 1) - request = requester._requested[0][0] - self.assertEqual(request.headers, {'range': REQ_RANGE}) - self.assertEqual(stream._written, [CONTENT]) - self.assertEqual(download.total_size, LEN) - - -class Test_Upload(unittest.TestCase): - URL = "http://example.com/api" - MIME_TYPE = 'application/octet-stream' - UPLOAD_URL = 'http://example.com/upload/id=foobar' - - @staticmethod - def _get_target_class(): - from google.cloud.streaming.transfer import Upload - - return Upload - - def _make_one(self, stream, mime_type=MIME_TYPE, *args, **kw): - return self._get_target_class()(stream, mime_type, *args, **kw) - - def test_ctor_defaults(self): - from google.cloud.streaming.transfer import _DEFAULT_CHUNKSIZE - - stream = _Stream() - upload = self._make_one(stream) - self.assertIs(upload.stream, stream) - self.assertIsNone(upload._final_response) - self.assertIsNone(upload._server_chunk_granularity) - self.assertFalse(upload.complete) - self.assertEqual(upload.mime_type, self.MIME_TYPE) - self.assertEqual(upload.progress, 0) - self.assertIsNone(upload.strategy) - self.assertIsNone(upload.total_size) - self.assertEqual(upload.chunksize, _DEFAULT_CHUNKSIZE) - - def test_ctor_w_kwds(self): - stream = _Stream() - CHUNK_SIZE = 123 - upload = self._make_one(stream, chunksize=CHUNK_SIZE) - self.assertIs(upload.stream, stream) - self.assertEqual(upload.mime_type, self.MIME_TYPE) - self.assertEqual(upload.chunksize, CHUNK_SIZE) - - def test_from_file_w_nonesuch_file(self): - klass = self._get_target_class() - filename = '~nosuchuser/file.txt' - with self.assertRaises(OSError): - klass.from_file(filename) - - def test_from_file_wo_mimetype_w_unguessable_filename(self): - import os - from google.cloud._testing import _tempdir - - klass = self._get_target_class() - CONTENT = b'EXISTING FILE W/ UNGUESSABLE MIMETYPE' - with _tempdir() as tempdir: - filename = os.path.join(tempdir, 'file.unguessable') - with open(filename, 'wb') as fileobj: - fileobj.write(CONTENT) - with self.assertRaises(ValueError): - klass.from_file(filename) - - def test_from_file_wo_mimetype_w_guessable_filename(self): - import os - from google.cloud._testing import _tempdir - - klass = self._get_target_class() - CONTENT = b'EXISTING FILE W/ GUESSABLE MIMETYPE' - with _tempdir() as tempdir: - filename = os.path.join(tempdir, 'file.txt') - with open(filename, 'wb') as fileobj: - fileobj.write(CONTENT) - upload = klass.from_file(filename) - self.assertEqual(upload.mime_type, 'text/plain') - self.assertTrue(upload.auto_transfer) - self.assertEqual(upload.total_size, len(CONTENT)) - upload._stream.close() - - def test_from_file_w_mimetype_w_auto_transfer_w_kwds(self): - import os - from google.cloud._testing import _tempdir - - klass = self._get_target_class() - CONTENT = b'EXISTING FILE W/ GUESSABLE MIMETYPE' - CHUNK_SIZE = 3 - with _tempdir() as tempdir: - filename = os.path.join(tempdir, 'file.unguessable') - with open(filename, 'wb') as fileobj: - fileobj.write(CONTENT) - upload = klass.from_file( - filename, - mime_type=self.MIME_TYPE, - auto_transfer=False, - chunksize=CHUNK_SIZE) - self.assertEqual(upload.mime_type, self.MIME_TYPE) - self.assertFalse(upload.auto_transfer) - self.assertEqual(upload.total_size, len(CONTENT)) - self.assertEqual(upload.chunksize, CHUNK_SIZE) - upload._stream.close() - - def test_from_stream_wo_mimetype(self): - klass = self._get_target_class() - stream = _Stream() - with self.assertRaises(ValueError): - klass.from_stream(stream, mime_type=None) - - def test_from_stream_defaults(self): - klass = self._get_target_class() - stream = _Stream() - upload = klass.from_stream(stream, mime_type=self.MIME_TYPE) - self.assertEqual(upload.mime_type, self.MIME_TYPE) - self.assertTrue(upload.auto_transfer) - self.assertIsNone(upload.total_size) - - def test_from_stream_explicit(self): - klass = self._get_target_class() - stream = _Stream() - SIZE = 10 - CHUNK_SIZE = 3 - upload = klass.from_stream( - stream, - mime_type=self.MIME_TYPE, - auto_transfer=False, - total_size=SIZE, - chunksize=CHUNK_SIZE) - self.assertEqual(upload.mime_type, self.MIME_TYPE) - self.assertFalse(upload.auto_transfer) - self.assertEqual(upload.total_size, SIZE) - self.assertEqual(upload.chunksize, CHUNK_SIZE) - - def test_strategy_setter_invalid(self): - upload = self._make_one(_Stream()) - with self.assertRaises(ValueError): - upload.strategy = object() - with self.assertRaises(ValueError): - upload.strategy = 'unknown' - - def test_strategy_setter_SIMPLE_UPLOAD(self): - from google.cloud.streaming.transfer import SIMPLE_UPLOAD - - upload = self._make_one(_Stream()) - upload.strategy = SIMPLE_UPLOAD - self.assertEqual(upload.strategy, SIMPLE_UPLOAD) - - def test_strategy_setter_RESUMABLE_UPLOAD(self): - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - upload = self._make_one(_Stream()) - upload.strategy = RESUMABLE_UPLOAD - self.assertEqual(upload.strategy, RESUMABLE_UPLOAD) - - def test_total_size_setter_initialized(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - SIZE = 123 - upload = self._make_one(_Stream) - http = object() - upload._initialize(http, _Request.URL) - with self.assertRaises(TransferInvalidError): - upload.total_size = SIZE - - def test_total_size_setter_not_initialized(self): - SIZE = 123 - upload = self._make_one(_Stream) - upload.total_size = SIZE - self.assertEqual(upload.total_size, SIZE) - - def test__set_default_strategy_w_existing_strategy(self): - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - config = _Dummy( - resumable_path='/resumable/endpoint', - simple_multipart=True, - simple_path='/upload/endpoint', - ) - request = _Request() - upload = self._make_one(_Stream) - upload.strategy = RESUMABLE_UPLOAD - upload._set_default_strategy(config, request) - self.assertEqual(upload.strategy, RESUMABLE_UPLOAD) - - def test__set_default_strategy_wo_resumable_path(self): - from google.cloud.streaming.transfer import SIMPLE_UPLOAD - - config = _Dummy( - resumable_path=None, - simple_multipart=True, - simple_path='/upload/endpoint', - ) - request = _Request() - upload = self._make_one(_Stream()) - upload._set_default_strategy(config, request) - self.assertEqual(upload.strategy, SIMPLE_UPLOAD) - - def test__set_default_strategy_w_total_size_gt_threshhold(self): - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD_THRESHOLD - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - config = _UploadConfig() - request = _Request() - upload = self._make_one( - _Stream(), total_size=RESUMABLE_UPLOAD_THRESHOLD + 1) - upload._set_default_strategy(config, request) - self.assertEqual(upload.strategy, RESUMABLE_UPLOAD) - - def test__set_default_strategy_w_body_wo_multipart(self): - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - config = _UploadConfig() - config.simple_multipart = False - request = _Request(body=CONTENT) - upload = self._make_one(_Stream(), total_size=len(CONTENT)) - upload._set_default_strategy(config, request) - self.assertEqual(upload.strategy, RESUMABLE_UPLOAD) - - def test__set_default_strategy_w_body_w_multipart_wo_simple_path(self): - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - config = _UploadConfig() - config.simple_path = None - request = _Request(body=CONTENT) - upload = self._make_one(_Stream(), total_size=len(CONTENT)) - upload._set_default_strategy(config, request) - self.assertEqual(upload.strategy, RESUMABLE_UPLOAD) - - def test__set_default_strategy_w_body_w_multipart_w_simple_path(self): - from google.cloud.streaming.transfer import SIMPLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - config = _UploadConfig() - request = _Request(body=CONTENT) - upload = self._make_one(_Stream(), total_size=len(CONTENT)) - upload._set_default_strategy(config, request) - self.assertEqual(upload.strategy, SIMPLE_UPLOAD) - - def test_configure_request_w_total_size_gt_max_size(self): - MAX_SIZE = 1000 - config = _UploadConfig() - config.max_size = MAX_SIZE - request = _Request() - url_builder = _Dummy() - upload = self._make_one(_Stream(), total_size=MAX_SIZE + 1) - with self.assertRaises(ValueError): - upload.configure_request(config, request, url_builder) - - def test_configure_request_w_invalid_mimetype(self): - config = _UploadConfig() - config.accept = ('text/*',) - request = _Request() - url_builder = _Dummy() - upload = self._make_one(_Stream()) - with self.assertRaises(ValueError): - upload.configure_request(config, request, url_builder) - - def test_configure_request_w_simple_wo_body(self): - from google.cloud.streaming.transfer import SIMPLE_UPLOAD - - CONTENT = b'CONTENT' - config = _UploadConfig() - request = _Request() - url_builder = _Dummy(query_params={}) - upload = self._make_one(_Stream(CONTENT)) - upload.strategy = SIMPLE_UPLOAD - - upload.configure_request(config, request, url_builder) - - self.assertEqual(url_builder.query_params, {'uploadType': 'media'}) - self.assertEqual(url_builder.relative_path, config.simple_path) - - self.assertEqual(request.headers, {'content-type': self.MIME_TYPE}) - self.assertEqual(request.body, CONTENT) - self.assertEqual(request.loggable_body, '') - - def test_configure_request_w_simple_w_body(self): - from google.cloud._helpers import _to_bytes - from google.cloud.streaming.transfer import SIMPLE_UPLOAD - - CONTENT = b'CONTENT' - BODY = b'BODY' - config = _UploadConfig() - request = _Request(body=BODY) - request.headers['content-type'] = 'text/plain' - url_builder = _Dummy(query_params={}) - upload = self._make_one(_Stream(CONTENT)) - upload.strategy = SIMPLE_UPLOAD - - upload.configure_request(config, request, url_builder) - - self.assertEqual(url_builder.query_params, {'uploadType': 'multipart'}) - self.assertEqual(url_builder.relative_path, config.simple_path) - - self.assertEqual(list(request.headers), ['content-type']) - ctype, boundary = [x.strip() - for x in request.headers['content-type'].split(';')] - self.assertEqual(ctype, 'multipart/related') - self.assertTrue(boundary.startswith('boundary="==')) - self.assertTrue(boundary.endswith('=="')) - - divider = b'--' + _to_bytes(boundary[len('boundary="'):-1]) - chunks = request.body.split(divider)[1:-1] # discard prolog / epilog - self.assertEqual(len(chunks), 2) - - parse_chunk = _email_chunk_parser() - text_msg = parse_chunk(chunks[0].strip()) - self.assertEqual(dict(text_msg._headers), - {'Content-Type': 'text/plain', - 'MIME-Version': '1.0'}) - self.assertEqual(text_msg._payload, BODY.decode('ascii')) - - app_msg = parse_chunk(chunks[1].strip()) - self.assertEqual(dict(app_msg._headers), - {'Content-Type': self.MIME_TYPE, - 'Content-Transfer-Encoding': 'binary', - 'MIME-Version': '1.0'}) - self.assertEqual(app_msg._payload, CONTENT.decode('ascii')) - self.assertTrue(b'' in request.loggable_body) - - def test_configure_request_w_resumable_wo_total_size(self): - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'CONTENT' - config = _UploadConfig() - request = _Request() - url_builder = _Dummy(query_params={}) - upload = self._make_one(_Stream(CONTENT)) - upload.strategy = RESUMABLE_UPLOAD - - upload.configure_request(config, request, url_builder) - - self.assertEqual(url_builder.query_params, {'uploadType': 'resumable'}) - self.assertEqual(url_builder.relative_path, config.resumable_path) - - self.assertEqual(request.headers, - {'X-Upload-Content-Type': self.MIME_TYPE}) - - def test_configure_request_w_resumable_w_total_size(self): - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'CONTENT' - LEN = len(CONTENT) - config = _UploadConfig() - request = _Request() - url_builder = _Dummy(query_params={}) - upload = self._make_one(_Stream(CONTENT)) - upload.total_size = LEN - upload.strategy = RESUMABLE_UPLOAD - - upload.configure_request(config, request, url_builder) - - self.assertEqual(url_builder.query_params, {'uploadType': 'resumable'}) - self.assertEqual(url_builder.relative_path, config.resumable_path) - - self.assertEqual(request.headers, - {'X-Upload-Content-Type': self.MIME_TYPE, - 'X-Upload-Content-Length': '%d' % (LEN,)}) - - def test_refresh_upload_state_w_simple_strategy(self): - from google.cloud.streaming.transfer import SIMPLE_UPLOAD - - upload = self._make_one(_Stream()) - upload.strategy = SIMPLE_UPLOAD - upload.refresh_upload_state() # no-op - - def test_refresh_upload_state_not_initialized(self): - from google.cloud.streaming.exceptions import TransferInvalidError - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - upload = self._make_one(_Stream()) - upload.strategy = RESUMABLE_UPLOAD - with self.assertRaises(TransferInvalidError): - upload.refresh_upload_state() - - def test_refresh_upload_state_w_OK(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - RESP_RANGE = 'bytes 0-%d/%d' % (LEN - 1, LEN,) - http = object() - stream = _Stream() - upload = self._make_one(stream, total_size=LEN) - upload.strategy = RESUMABLE_UPLOAD - upload._initialize(http, _Request.URL) - info = {'content-range': RESP_RANGE} - response = _makeResponse(http_client.OK, info, CONTENT) - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - upload.refresh_upload_state() - - self.assertTrue(upload.complete) - self.assertEqual(upload.progress, LEN) - self.assertEqual(stream.tell(), LEN) - self.assertIs(upload._final_response, response) - - def test_refresh_upload_state_w_CREATED(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - RESP_RANGE = 'bytes 0-%d/%d' % (LEN - 1, LEN,) - http = object() - stream = _Stream() - upload = self._make_one(stream, total_size=LEN) - upload.strategy = RESUMABLE_UPLOAD - upload._initialize(http, _Request.URL) - info = {'content-range': RESP_RANGE} - response = _makeResponse(http_client.CREATED, info, CONTENT) - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - upload.refresh_upload_state() - - self.assertTrue(upload.complete) - self.assertEqual(upload.progress, LEN) - self.assertEqual(stream.tell(), LEN) - self.assertIs(upload._final_response, response) - - def test_refresh_upload_state_w_RESUME_INCOMPLETE_w_range(self): - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.http_wrapper import RESUME_INCOMPLETE - from google.cloud._testing import _Monkey - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - LAST = 5 - http = object() - stream = _Stream() - upload = self._make_one(stream, total_size=LEN) - upload.strategy = RESUMABLE_UPLOAD - upload._initialize(http, _Request.URL) - info = {'range': '0-%d' % (LAST - 1,)} - response = _makeResponse(RESUME_INCOMPLETE, info, CONTENT) - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - upload.refresh_upload_state() - - self.assertFalse(upload.complete) - self.assertEqual(upload.progress, LAST) - self.assertEqual(stream.tell(), LAST) - self.assertIsNot(upload._final_response, response) - - def test_refresh_upload_state_w_RESUME_INCOMPLETE_wo_range(self): - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.http_wrapper import RESUME_INCOMPLETE - from google.cloud._testing import _Monkey - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - http = object() - stream = _Stream() - upload = self._make_one(stream, total_size=LEN) - upload.strategy = RESUMABLE_UPLOAD - upload._initialize(http, _Request.URL) - response = _makeResponse(RESUME_INCOMPLETE, content=CONTENT) - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - upload.refresh_upload_state() - - self.assertFalse(upload.complete) - self.assertEqual(upload.progress, 0) - self.assertEqual(stream.tell(), 0) - self.assertIsNot(upload._final_response, response) - - def test_refresh_upload_state_w_error(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.exceptions import HttpError - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - http = object() - stream = _Stream() - upload = self._make_one(stream, total_size=LEN) - upload.strategy = RESUMABLE_UPLOAD - upload._initialize(http, _Request.URL) - response = _makeResponse(http_client.FORBIDDEN) - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - with self.assertRaises(HttpError): - upload.refresh_upload_state() - - def test__get_range_header_miss(self): - upload = self._make_one(_Stream()) - response = _makeResponse(None) - self.assertIsNone(upload._get_range_header(response)) - - def test__get_range_header_w_Range(self): - upload = self._make_one(_Stream()) - response = _makeResponse(None, {'Range': '123'}) - self.assertEqual(upload._get_range_header(response), '123') - - def test__get_range_header_w_range(self): - upload = self._make_one(_Stream()) - response = _makeResponse(None, {'range': '123'}) - self.assertEqual(upload._get_range_header(response), '123') - - def test_initialize_upload_no_strategy(self): - request = _Request() - upload = self._make_one(_Stream()) - with self.assertRaises(ValueError): - upload.initialize_upload(request, http=object()) - - def test_initialize_upload_simple_w_http(self): - from google.cloud.streaming.transfer import SIMPLE_UPLOAD - - request = _Request() - upload = self._make_one(_Stream()) - upload.strategy = SIMPLE_UPLOAD - upload.initialize_upload(request, http=object()) # no-op - - def test_initialize_upload_resumable_already_initialized(self): - from google.cloud.streaming.exceptions import TransferInvalidError - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - request = _Request() - upload = self._make_one(_Stream()) - upload.strategy = RESUMABLE_UPLOAD - upload._initialize(None, self.URL) - with self.assertRaises(TransferInvalidError): - upload.initialize_upload(request, http=object()) - - def test_initialize_upload_w_http_resumable_not_initialized_w_error(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.exceptions import HttpError - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - request = _Request() - upload = self._make_one(_Stream()) - upload.strategy = RESUMABLE_UPLOAD - response = _makeResponse(http_client.FORBIDDEN) - requester = _MakeRequest(response) - - with _Monkey(MUT, make_api_request=requester): - with self.assertRaises(HttpError): - upload.initialize_upload(request, http=object()) - - def test_initialize_upload_w_http_wo_auto_transfer_w_OK(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - request = _Request() - upload = self._make_one(_Stream(), auto_transfer=False) - upload.strategy = RESUMABLE_UPLOAD - info = {'location': self.UPLOAD_URL} - response = _makeResponse(http_client.OK, info) - requester = _MakeRequest(response) - - with _Monkey(MUT, make_api_request=requester): - upload.initialize_upload(request, http=object()) - - self.assertIsNone(upload._server_chunk_granularity) - self.assertEqual(upload.url, self.UPLOAD_URL) - self.assertEqual(requester._responses, []) - self.assertEqual(len(requester._requested), 1) - self.assertIs(requester._requested[0][0], request) - - def test_initialize_upload_w_granularity_w_auto_transfer_w_OK(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - http = object() - request = _Request() - upload = self._make_one(_Stream(CONTENT), chunksize=1000) - upload.strategy = RESUMABLE_UPLOAD - info = {'X-Goog-Upload-Chunk-Granularity': '100', - 'location': self.UPLOAD_URL} - response = _makeResponse(http_client.OK, info) - chunk_response = _makeResponse(http_client.OK) - requester = _MakeRequest(response, chunk_response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - upload.initialize_upload(request, http) - - self.assertEqual(upload._server_chunk_granularity, 100) - self.assertEqual(upload.url, self.UPLOAD_URL) - self.assertEqual(requester._responses, []) - self.assertEqual(len(requester._requested), 2) - self.assertIs(requester._requested[0][0], request) - chunk_request = requester._requested[1][0] - self.assertIsInstance(chunk_request, _Request) - self.assertEqual(chunk_request.url, self.UPLOAD_URL) - self.assertEqual(chunk_request.http_method, 'PUT') - self.assertEqual(chunk_request.body, CONTENT) - - def test__last_byte(self): - upload = self._make_one(_Stream()) - self.assertEqual(upload._last_byte('123-456'), 456) - - def test__validate_chunksize_wo__server_chunk_granularity(self): - upload = self._make_one(_Stream()) - upload._validate_chunksize(123) # no-op - - def test__validate_chunksize_w__server_chunk_granularity_miss(self): - upload = self._make_one(_Stream()) - upload._server_chunk_granularity = 100 - with self.assertRaises(ValueError): - upload._validate_chunksize(123) - - def test__validate_chunksize_w__server_chunk_granularity_hit(self): - upload = self._make_one(_Stream()) - upload._server_chunk_granularity = 100 - upload._validate_chunksize(400) - - def test_stream_file_w_simple_strategy(self): - from google.cloud.streaming.transfer import SIMPLE_UPLOAD - - upload = self._make_one(_Stream()) - upload.strategy = SIMPLE_UPLOAD - with self.assertRaises(ValueError): - upload.stream_file() - - def test_stream_file_w_use_chunks_invalid_chunk_size(self): - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - upload = self._make_one(_Stream(), chunksize=1024) - upload.strategy = RESUMABLE_UPLOAD - upload._server_chunk_granularity = 100 - with self.assertRaises(ValueError): - upload.stream_file(use_chunks=True) - - def test_stream_file_not_initialized(self): - from google.cloud.streaming.exceptions import TransferInvalidError - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - upload = self._make_one(_Stream(), chunksize=1024) - upload.strategy = RESUMABLE_UPLOAD - upload._server_chunk_granularity = 128 - with self.assertRaises(TransferInvalidError): - upload.stream_file() - - def test_stream_file_already_complete_w_unseekable_stream(self): - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - http = object() - stream = object() - response = object() - upload = self._make_one(stream, chunksize=1024) - upload.strategy = RESUMABLE_UPLOAD - upload._server_chunk_granularity = 128 - upload._initialize(http, _Request.URL) - upload._final_response = response - upload._complete = True - self.assertIs(upload.stream_file(), response) - - def test_stream_file_already_complete_w_seekable_stream_unsynced(self): - from google.cloud.streaming.exceptions import CommunicationError - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - http = object() - stream = _Stream(CONTENT) - response = object() - upload = self._make_one(stream, chunksize=1024) - upload.strategy = RESUMABLE_UPLOAD - upload._server_chunk_granularity = 128 - upload._initialize(http, _Request.URL) - upload._final_response = response - upload._complete = True - with self.assertRaises(CommunicationError): - upload.stream_file() - - def test_stream_file_already_complete_wo_seekable_method_synced(self): - import os - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - http = object() - stream = _Stream(CONTENT) - stream.seek(0, os.SEEK_END) - response = object() - upload = self._make_one(stream, chunksize=1024) - upload.strategy = RESUMABLE_UPLOAD - upload._server_chunk_granularity = 128 - upload._initialize(http, _Request.URL) - upload._final_response = response - upload._complete = True - self.assertIs(upload.stream_file(use_chunks=False), response) - - def test_stream_file_already_complete_w_seekable_method_true_synced(self): - import os - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - http = object() - stream = _StreamWithSeekableMethod(CONTENT, True) - stream.seek(0, os.SEEK_END) - response = object() - upload = self._make_one(stream, chunksize=1024) - upload.strategy = RESUMABLE_UPLOAD - upload._server_chunk_granularity = 128 - upload._initialize(http, _Request.URL) - upload._final_response = response - upload._complete = True - self.assertIs(upload.stream_file(use_chunks=False), response) - - def test_stream_file_already_complete_w_seekable_method_false(self): - import os - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - http = object() - stream = _StreamWithSeekableMethod(CONTENT, False) - stream.seek(0, os.SEEK_END) - response = object() - upload = self._make_one(stream, chunksize=1024) - upload.strategy = RESUMABLE_UPLOAD - upload._server_chunk_granularity = 128 - upload._initialize(http, _Request.URL) - upload._final_response = response - upload._complete = True - self.assertIs(upload.stream_file(use_chunks=False), response) - - def test_stream_file_incomplete(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.http_wrapper import RESUME_INCOMPLETE - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - http = object() - stream = _Stream(CONTENT) - upload = self._make_one(stream, chunksize=6) - upload.strategy = RESUMABLE_UPLOAD - upload._server_chunk_granularity = 6 - upload._initialize(http, self.UPLOAD_URL) - - info_1 = {'content-length': '0', 'range': 'bytes=0-5'} - response_1 = _makeResponse(RESUME_INCOMPLETE, info_1) - info_2 = {'content-length': '0', 'range': 'bytes=6-9'} - response_2 = _makeResponse(http_client.OK, info_2) - requester = _MakeRequest(response_1, response_2) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - response = upload.stream_file() - - self.assertIs(response, response_2) - self.assertEqual(len(requester._responses), 0) - self.assertEqual(len(requester._requested), 2) - - request_1 = requester._requested[0][0] - self.assertEqual(request_1.url, self.UPLOAD_URL) - self.assertEqual(request_1.http_method, 'PUT') - self.assertEqual(request_1.headers, - {'Content-Range': 'bytes 0-5/*', - 'Content-Type': self.MIME_TYPE}) - self.assertEqual(request_1.body, CONTENT[:6]) - - request_2 = requester._requested[1][0] - self.assertEqual(request_2.url, self.UPLOAD_URL) - self.assertEqual(request_2.http_method, 'PUT') - self.assertEqual(request_2.headers, - {'Content-Range': 'bytes 6-9/10', - 'Content-Type': self.MIME_TYPE}) - self.assertEqual(request_2.body, CONTENT[6:]) - - def test_stream_file_incomplete_w_transfer_error(self): - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.exceptions import CommunicationError - from google.cloud.streaming.http_wrapper import RESUME_INCOMPLETE - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - http = object() - stream = _Stream(CONTENT) - upload = self._make_one(stream, chunksize=6) - upload.strategy = RESUMABLE_UPLOAD - upload._server_chunk_granularity = 6 - upload._initialize(http, self.UPLOAD_URL) - - info = { - 'content-length': '0', - 'range': 'bytes=0-4', # simulate error, s.b. '0-5' - } - response = _makeResponse(RESUME_INCOMPLETE, info) - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - with self.assertRaises(CommunicationError): - upload.stream_file() - - self.assertEqual(len(requester._responses), 0) - self.assertEqual(len(requester._requested), 1) - - request = requester._requested[0][0] - self.assertEqual(request.url, self.UPLOAD_URL) - self.assertEqual(request.http_method, 'PUT') - self.assertEqual(request.headers, - {'Content-Range': 'bytes 0-5/*', - 'Content-Type': self.MIME_TYPE}) - self.assertEqual(request.body, CONTENT[:6]) - - def test__send_media_request_wo_error(self): - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.http_wrapper import RESUME_INCOMPLETE - - CONTENT = b'ABCDEFGHIJ' - bytes_http = object() - stream = _Stream(CONTENT) - upload = self._make_one(stream) - upload.bytes_http = bytes_http - - headers = {'Content-Range': 'bytes 0-9/10', - 'Content-Type': self.MIME_TYPE} - request = _Request(self.UPLOAD_URL, 'PUT', CONTENT, headers) - info = {'content-length': '0', 'range': 'bytes=0-4'} - response = _makeResponse(RESUME_INCOMPLETE, info) - requester = _MakeRequest(response) - - with _Monkey(MUT, make_api_request=requester): - upload._send_media_request(request, 9) - - self.assertEqual(len(requester._responses), 0) - self.assertEqual(len(requester._requested), 1) - used_request, used_http, _ = requester._requested[0] - self.assertIs(used_request, request) - self.assertIs(used_http, bytes_http) - self.assertEqual(stream.tell(), 4) - - def test__send_media_request_w_error(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.exceptions import HttpError - from google.cloud.streaming.http_wrapper import RESUME_INCOMPLETE - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - bytes_http = object() - http = object() - stream = _Stream(CONTENT) - upload = self._make_one(stream) - upload.strategy = RESUMABLE_UPLOAD - upload._initialize(http, self.UPLOAD_URL) - upload.bytes_http = bytes_http - - headers = {'Content-Range': 'bytes 0-9/10', - 'Content-Type': self.MIME_TYPE} - request = _Request(self.UPLOAD_URL, 'PUT', CONTENT, headers) - info_1 = {'content-length': '0', 'range': 'bytes=0-4'} - response_1 = _makeResponse(http_client.FORBIDDEN, info_1) - info_2 = {'Content-Length': '0', 'Range': 'bytes=0-4'} - response_2 = _makeResponse(RESUME_INCOMPLETE, info_2) - requester = _MakeRequest(response_1, response_2) - - with _Monkey(MUT, Request=_Request, make_api_request=requester): - with self.assertRaises(HttpError): - upload._send_media_request(request, 9) - - self.assertEqual(len(requester._responses), 0) - self.assertEqual(len(requester._requested), 2) - first_request, first_http, _ = requester._requested[0] - self.assertIs(first_request, request) - self.assertIs(first_http, bytes_http) - second_request, second_http, _ = requester._requested[1] - self.assertEqual(second_request.url, self.UPLOAD_URL) - self.assertEqual(second_request.http_method, 'PUT') # ACK! - self.assertEqual(second_request.headers, - {'Content-Range': 'bytes */*'}) - self.assertIs(second_http, http) - - def test__send_media_body_not_initialized(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - upload = self._make_one(_Stream()) - with self.assertRaises(TransferInvalidError): - upload._send_media_body(0) - - def test__send_media_body_wo_total_size(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - http = object() - upload = self._make_one(_Stream()) - upload._initialize(http, _Request.URL) - with self.assertRaises(TransferInvalidError): - upload._send_media_body(0) - - def test__send_media_body_start_lt_total_size(self): - from google.cloud.streaming.stream_slice import StreamSlice - - SIZE = 1234 - http = object() - stream = _Stream() - upload = self._make_one(stream, total_size=SIZE) - upload._initialize(http, self.UPLOAD_URL) - response = object() - streamer = _MediaStreamer(response) - upload._send_media_request = streamer - - found = upload._send_media_body(0) - - self.assertIs(found, response) - request, end = streamer._called_with - self.assertEqual(request.url, self.UPLOAD_URL) - self.assertEqual(request.http_method, 'PUT') - body_stream = request.body - self.assertIsInstance(body_stream, StreamSlice) - self.assertIs(body_stream._stream, stream) - self.assertEqual(len(body_stream), SIZE) - self.assertEqual(request.headers, - {'content-length': '%d' % (SIZE,), # speling! - 'Content-Type': self.MIME_TYPE, - 'Content-Range': 'bytes 0-%d/%d' % (SIZE - 1, SIZE)}) - self.assertEqual(end, SIZE) - - def test__send_media_body_start_eq_total_size(self): - from google.cloud.streaming.stream_slice import StreamSlice - - SIZE = 1234 - http = object() - stream = _Stream() - upload = self._make_one(stream, total_size=SIZE) - upload._initialize(http, self.UPLOAD_URL) - response = object() - streamer = _MediaStreamer(response) - upload._send_media_request = streamer - - found = upload._send_media_body(SIZE) - - self.assertIs(found, response) - request, end = streamer._called_with - self.assertEqual(request.url, self.UPLOAD_URL) - self.assertEqual(request.http_method, 'PUT') - body_stream = request.body - self.assertIsInstance(body_stream, StreamSlice) - self.assertIs(body_stream._stream, stream) - self.assertEqual(len(body_stream), 0) - self.assertEqual(request.headers, - {'content-length': '0', # speling! - 'Content-Type': self.MIME_TYPE, - 'Content-Range': 'bytes */%d' % (SIZE,)}) - self.assertEqual(end, SIZE) - - def test__send_chunk_not_initialized(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - upload = self._make_one(_Stream()) - with self.assertRaises(TransferInvalidError): - upload._send_chunk(0) - - def test__send_chunk_wo_total_size_stream_exhausted(self): - CONTENT = b'ABCDEFGHIJ' - SIZE = len(CONTENT) - http = object() - upload = self._make_one(_Stream(CONTENT), chunksize=1000) - upload._initialize(http, self.UPLOAD_URL) - response = object() - streamer = _MediaStreamer(response) - upload._send_media_request = streamer - self.assertIsNone(upload.total_size) - - found = upload._send_chunk(0) - - self.assertIs(found, response) - self.assertEqual(upload.total_size, SIZE) - request, end = streamer._called_with - self.assertEqual(request.url, self.UPLOAD_URL) - self.assertEqual(request.http_method, 'PUT') - self.assertEqual(request.body, CONTENT) - self.assertEqual(request.headers, - {'content-length': '%d' % SIZE, # speling! - 'Content-Type': self.MIME_TYPE, - 'Content-Range': 'bytes 0-%d/%d' % (SIZE - 1, SIZE)}) - self.assertEqual(end, SIZE) - - def test__send_chunk_wo_total_size_stream_not_exhausted(self): - CONTENT = b'ABCDEFGHIJ' - SIZE = len(CONTENT) - CHUNK_SIZE = SIZE - 5 - http = object() - upload = self._make_one(_Stream(CONTENT), chunksize=CHUNK_SIZE) - upload._initialize(http, self.UPLOAD_URL) - response = object() - streamer = _MediaStreamer(response) - upload._send_media_request = streamer - self.assertIsNone(upload.total_size) - - found = upload._send_chunk(0) - - self.assertIs(found, response) - self.assertIsNone(upload.total_size) - request, end = streamer._called_with - self.assertEqual(request.url, self.UPLOAD_URL) - self.assertEqual(request.http_method, 'PUT') - self.assertEqual(request.body, CONTENT[:CHUNK_SIZE]) - expected_headers = { - 'content-length': '%d' % CHUNK_SIZE, # speling! - 'Content-Type': self.MIME_TYPE, - 'Content-Range': 'bytes 0-%d/*' % (CHUNK_SIZE - 1,), - } - self.assertEqual(request.headers, expected_headers) - self.assertEqual(end, CHUNK_SIZE) - - def test__send_chunk_w_total_size_stream_not_exhausted(self): - from google.cloud.streaming.stream_slice import StreamSlice - - CONTENT = b'ABCDEFGHIJ' - SIZE = len(CONTENT) - CHUNK_SIZE = SIZE - 5 - http = object() - stream = _Stream(CONTENT) - upload = self._make_one(stream, total_size=SIZE, chunksize=CHUNK_SIZE) - upload._initialize(http, self.UPLOAD_URL) - response = object() - streamer = _MediaStreamer(response) - upload._send_media_request = streamer - - found = upload._send_chunk(0) - - self.assertIs(found, response) - request, end = streamer._called_with - self.assertEqual(request.url, self.UPLOAD_URL) - self.assertEqual(request.http_method, 'PUT') - body_stream = request.body - self.assertIsInstance(body_stream, StreamSlice) - self.assertIs(body_stream._stream, stream) - self.assertEqual(len(body_stream), CHUNK_SIZE) - expected_headers = { - 'content-length': '%d' % CHUNK_SIZE, # speling! - 'Content-Type': self.MIME_TYPE, - 'Content-Range': 'bytes 0-%d/%d' % (CHUNK_SIZE - 1, SIZE), - } - self.assertEqual(request.headers, expected_headers) - self.assertEqual(end, CHUNK_SIZE) - - def test__send_chunk_w_total_size_stream_exhausted(self): - from google.cloud.streaming.stream_slice import StreamSlice - - CONTENT = b'ABCDEFGHIJ' - SIZE = len(CONTENT) - CHUNK_SIZE = 1000 - http = object() - stream = _Stream(CONTENT) - upload = self._make_one(stream, total_size=SIZE, chunksize=CHUNK_SIZE) - upload._initialize(http, self.UPLOAD_URL) - response = object() - streamer = _MediaStreamer(response) - upload._send_media_request = streamer - - found = upload._send_chunk(SIZE) - - self.assertIs(found, response) - request, end = streamer._called_with - self.assertEqual(request.url, self.UPLOAD_URL) - self.assertEqual(request.http_method, 'PUT') - body_stream = request.body - self.assertIsInstance(body_stream, StreamSlice) - self.assertIs(body_stream._stream, stream) - self.assertEqual(len(body_stream), 0) - self.assertEqual(request.headers, - {'content-length': '0', # speling! - 'Content-Type': self.MIME_TYPE, - 'Content-Range': 'bytes */%d' % (SIZE,)}) - self.assertEqual(end, SIZE) - - -def _email_chunk_parser(): - import six - - if six.PY3: # pragma: NO COVER Python3 - from email.parser import BytesParser - - parser = BytesParser() - return parser.parsebytes - else: - from email.parser import Parser - - parser = Parser() - return parser.parsestr - - -class _Dummy(object): - def __init__(self, **kw): - self.__dict__.update(kw) - - -class _UploadConfig(object): - accept = ('*/*',) - max_size = None - resumable_path = '/resumable/endpoint' - simple_multipart = True - simple_path = '/upload/endpoint' - - -class _Stream(object): - _closed = False - - def __init__(self, to_read=b''): - import io - - self._written = [] - self._to_read = io.BytesIO(to_read) - - def write(self, to_write): - self._written.append(to_write) - - def seek(self, offset, whence=0): - self._to_read.seek(offset, whence) - - def read(self, size=None): - if size is not None: - return self._to_read.read(size) - return self._to_read.read() - - def tell(self): - return self._to_read.tell() - - def close(self): - self._closed = True - - -class _StreamWithSeekableMethod(_Stream): - - def __init__(self, to_read=b'', seekable=True): - super(_StreamWithSeekableMethod, self).__init__(to_read) - self._seekable = seekable - - def seekable(self): - return self._seekable - - -class _Request(object): - __slots__ = ('url', 'http_method', 'body', 'headers', 'loggable_body') - URL = 'http://example.com/api' - - def __init__(self, url=URL, http_method='GET', body='', headers=None): - self.url = url - self.http_method = http_method - self.body = self.loggable_body = body - if headers is None: - headers = {} - self.headers = headers - - -class _MakeRequest(object): - - def __init__(self, *responses): - self._responses = list(responses) - self._requested = [] - - def __call__(self, http, request, **kw): - self._requested.append((request, http, kw)) - return self._responses.pop(0) - - -def _makeResponse(status_code, info=None, content='', - request_url=_Request.URL): - if info is None: - info = {} - return _Dummy(status_code=status_code, - info=info, - content=content, - length=len(content), - request_url=request_url) - - -class _MediaStreamer(object): - - _called_with = None - - def __init__(self, response): - self._response = response - - def __call__(self, request, end): - assert self._called_with is None - self._called_with = (request, end) - return self._response diff --git a/packages/google-cloud-core/tests/unit/streaming/test_util.py b/packages/google-cloud-core/tests/unit/streaming/test_util.py deleted file mode 100644 index 4da788182cb9..000000000000 --- a/packages/google-cloud-core/tests/unit/streaming/test_util.py +++ /dev/null @@ -1,66 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class Test_calculate_wait_for_retry(unittest.TestCase): - - def _call_fut(self, *args, **kw): - from google.cloud.streaming.util import calculate_wait_for_retry - - return calculate_wait_for_retry(*args, **kw) - - def test_w_negative_jitter_lt_max_wait(self): - import random - from google.cloud._testing import _Monkey - - with _Monkey(random, uniform=lambda lower, upper: lower): - self.assertEqual(self._call_fut(1), 1.5) - - def test_w_positive_jitter_gt_max_wait(self): - import random - from google.cloud._testing import _Monkey - - with _Monkey(random, uniform=lambda lower, upper: upper): - self.assertEqual(self._call_fut(4), 20) - - -class Test_acceptable_mime_type(unittest.TestCase): - - def _call_fut(self, *args, **kw): - from google.cloud.streaming.util import acceptable_mime_type - - return acceptable_mime_type(*args, **kw) - - def test_pattern_wo_slash(self): - with self.assertRaises(ValueError) as err: - self._call_fut(['text/*'], 'BOGUS') - self.assertEqual( - err.exception.args, - ('Invalid MIME type: "BOGUS"',)) - - def test_accept_pattern_w_semicolon(self): - with self.assertRaises(ValueError) as err: - self._call_fut(['text/*;charset=utf-8'], 'text/plain') - self.assertEqual( - err.exception.args, - ('MIME patterns with parameter unsupported: ' - '"text/*;charset=utf-8"',)) - - def test_miss(self): - self.assertFalse(self._call_fut(['image/*'], 'text/plain')) - - def test_hit(self): - self.assertTrue(self._call_fut(['text/*'], 'text/plain')) From 7ac9561baf1c4646fc7c75623c06865cc8f57617 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Sat, 22 Jul 2017 16:38:30 -0700 Subject: [PATCH 159/468] Fix pylint for the main package --- packages/google-cloud-core/.flake8 | 1 + packages/google-cloud-core/google/__init__.py | 2 ++ .../google-cloud-core/google/cloud/__init__.py | 2 ++ .../google-cloud-core/google/cloud/_helpers.py | 5 +++-- packages/google-cloud-core/google/cloud/_http.py | 4 +++- .../google-cloud-core/google/cloud/_testing.py | 14 +++++++++----- packages/google-cloud-core/google/cloud/client.py | 2 +- .../google/cloud/future/operation.py | 2 +- packages/google-cloud-core/google/cloud/iam.py | 6 +++--- .../google-cloud-core/google/cloud/iterator.py | 5 ++++- .../google-cloud-core/google/cloud/operation.py | 4 ++-- packages/google-cloud-core/nox.py | 3 ++- 12 files changed, 33 insertions(+), 17 deletions(-) diff --git a/packages/google-cloud-core/.flake8 b/packages/google-cloud-core/.flake8 index 25168dc87605..7f4ddb8072b0 100644 --- a/packages/google-cloud-core/.flake8 +++ b/packages/google-cloud-core/.flake8 @@ -1,4 +1,5 @@ [flake8] +import-order-style=google exclude = __pycache__, .git, diff --git a/packages/google-cloud-core/google/__init__.py b/packages/google-cloud-core/google/__init__.py index b2b833373882..a35569c36339 100644 --- a/packages/google-cloud-core/google/__init__.py +++ b/packages/google-cloud-core/google/__init__.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +"""Google namespace package.""" + try: import pkg_resources pkg_resources.declare_namespace(__name__) diff --git a/packages/google-cloud-core/google/cloud/__init__.py b/packages/google-cloud-core/google/cloud/__init__.py index b2b833373882..59a804265f5c 100644 --- a/packages/google-cloud-core/google/cloud/__init__.py +++ b/packages/google-cloud-core/google/cloud/__init__.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +"""Google Cloud namespace package.""" + try: import pkg_resources pkg_resources.declare_namespace(__name__) diff --git a/packages/google-cloud-core/google/cloud/_helpers.py b/packages/google-cloud-core/google/cloud/_helpers.py index 72918e064507..8dc9bf1cf412 100644 --- a/packages/google-cloud-core/google/cloud/_helpers.py +++ b/packages/google-cloud-core/google/cloud/_helpers.py @@ -17,7 +17,6 @@ This module is not part of the public API surface. """ -# Avoid the grpc and google.cloud.grpc collision. from __future__ import absolute_import import calendar @@ -32,6 +31,8 @@ import google_auth_httplib2 try: + # pylint: disable=ungrouped-imports + # We must import google.auth.transport.grpc within this try: catch. import grpc import google.auth.transport.grpc except ImportError: # pragma: NO COVER @@ -104,7 +105,7 @@ def top(self): :rtype: object :returns: the top-most item, or None if the stack is empty. """ - if len(self._stack) > 0: + if self._stack: return self._stack[-1] diff --git a/packages/google-cloud-core/google/cloud/_http.py b/packages/google-cloud-core/google/cloud/_http.py index e1a481e581a7..ada60b4fb2c3 100644 --- a/packages/google-cloud-core/google/cloud/_http.py +++ b/packages/google-cloud-core/google/cloud/_http.py @@ -279,7 +279,9 @@ def api_request(self, method, path, query_params=None, can allow custom behavior, for example, to defer an HTTP request and complete initialization of the object at a later time. - :raises: Exception if the response code is not 200 OK. + :raises ~google.cloud.exceptions.GoogleCloudError: if the response code + is not 200 OK. + :raises TypeError: if the response content type is not JSON. :rtype: dict or str :returns: The API response payload, either as a raw string or a dictionary if the response is valid JSON. diff --git a/packages/google-cloud-core/google/cloud/_testing.py b/packages/google-cloud-core/google/cloud/_testing.py index a544fffc5fe4..871b5f631bc7 100644 --- a/packages/google-cloud-core/google/cloud/_testing.py +++ b/packages/google-cloud-core/google/cloud/_testing.py @@ -14,17 +14,15 @@ """Shared testing utilities.""" - -# Avoid the grpc and google.cloud.grpc collision. from __future__ import absolute_import class _Monkey(object): - # context-manager for replacing module names in the scope of a test. + """Context-manager for replacing module names in the scope of a test.""" def __init__(self, module, **kw): self.module = module - if len(kw) == 0: # pragma: NO COVER + if not kw: # pragma: NO COVER raise ValueError('_Monkey was used with nothing to monkey-patch') self.to_restore = {key: getattr(module, key) for key in kw} for key, value in kw.items(): @@ -68,8 +66,12 @@ def _tempdir_mgr(): return _tempdir_mgr +# pylint: disable=invalid-name +# Retain _tempdir as a constant for backwards compatibility despite +# being an invalid name. _tempdir = _tempdir_maker() del _tempdir_maker +# pylint: enable=invalid-name class _GAXBaseAPI(object): @@ -79,7 +81,8 @@ class _GAXBaseAPI(object): def __init__(self, **kw): self.__dict__.update(kw) - def _make_grpc_error(self, status_code, trailing=None): + @staticmethod + def _make_grpc_error(status_code, trailing=None): from grpc._channel import _RPCState from google.cloud.exceptions import GrpcRendezvous @@ -111,6 +114,7 @@ def __init__(self, *pages, **kwargs): self.page_token = kwargs.get('page_token') def next(self): + """Iterate to the next page.""" import six return six.next(self._pages) diff --git a/packages/google-cloud-core/google/cloud/client.py b/packages/google-cloud-core/google/cloud/client.py index 9bdbf507d201..5fa7f7ef95a2 100644 --- a/packages/google-cloud-core/google/cloud/client.py +++ b/packages/google-cloud-core/google/cloud/client.py @@ -64,7 +64,7 @@ def from_service_account_json(cls, json_credentials_path, *args, **kwargs): :rtype: :class:`_ClientFactoryMixin` :returns: The client created with the retrieved JSON credentials. - :raises: :class:`TypeError` if there is a conflict with the kwargs + :raises TypeError: if there is a conflict with the kwargs and the credentials created by the factory. """ if 'credentials' in kwargs: diff --git a/packages/google-cloud-core/google/cloud/future/operation.py b/packages/google-cloud-core/google/cloud/future/operation.py index 5bbfda1a8f0b..8064e5c13e1f 100644 --- a/packages/google-cloud-core/google/cloud/future/operation.py +++ b/packages/google-cloud-core/google/cloud/future/operation.py @@ -34,7 +34,7 @@ class Operation(base.PollingFuture): initial operation. refresh (Callable[[], Operation]): A callable that returns the latest state of the operation. - cancel (Callable[[], None]), A callable that tries to cancel + cancel (Callable[[], None]): A callable that tries to cancel the operation. result_type (type): The protobuf type for the operation's result. metadata_type (type): The protobuf type for the operation's diff --git a/packages/google-cloud-core/google/cloud/iam.py b/packages/google-cloud-core/google/cloud/iam.py index 49bb11266cee..bbc31c047a85 100644 --- a/packages/google-cloud-core/google/cloud/iam.py +++ b/packages/google-cloud-core/google/cloud/iam.py @@ -226,14 +226,14 @@ def to_api_repr(self): if self.version is not None: resource['version'] = self.version - if len(self._bindings) > 0: + if self._bindings: bindings = resource['bindings'] = [] for role, members in sorted(self._bindings.items()): - if len(members) > 0: + if members: bindings.append( {'role': role, 'members': sorted(set(members))}) - if len(bindings) == 0: + if not bindings: del resource['bindings'] return resource diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index 7bb708e90f09..742443ddc5f9 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -242,7 +242,8 @@ def _page_iter(self, increment): results per page while an items iterator will want to increment per item. - Yields :class:`Page` instances. + :rtype: :class:`Page` + :returns: pages """ page = self._next_page() while page is not None: @@ -387,6 +388,8 @@ def _get_next_page_response(self): :rtype: dict :returns: The parsed JSON response of the next page's contents. + + :raises ValueError: If the HTTP method is not ``GET`` or ``POST``. """ params = self._get_query_params() if self._HTTP_METHOD == 'GET': diff --git a/packages/google-cloud-core/google/cloud/operation.py b/packages/google-cloud-core/google/cloud/operation.py index 4e700a553e4f..9f53c595f658 100644 --- a/packages/google-cloud-core/google/cloud/operation.py +++ b/packages/google-cloud-core/google/cloud/operation.py @@ -50,7 +50,7 @@ def register_type(klass, type_url=None): :param type_url: (Optional) URL naming the type. If not provided, infers the URL from the type descriptor. - :raises: ValueError if a registration already exists for the URL. + :raises ValueError: if a registration already exists for the URL. """ if type_url is None: type_url = _compute_type_url(klass) @@ -258,7 +258,7 @@ def poll(self): :rtype: bool :returns: A boolean indicating if the current operation has completed. - :raises: :class:`~exceptions.ValueError` if the operation + :raises ValueError: if the operation has already completed. """ if self.complete: diff --git a/packages/google-cloud-core/nox.py b/packages/google-cloud-core/nox.py index c8f4a942e7a2..8f025cce8b61 100644 --- a/packages/google-cloud-core/nox.py +++ b/packages/google-cloud-core/nox.py @@ -50,7 +50,8 @@ def lint(session): serious code quality issues. """ session.interpreter = 'python3.6' - session.install('flake8', 'pylint', 'gcp-devrel-py-tools') + session.install( + 'flake8', 'flake8-import-order', 'pylint', 'gcp-devrel-py-tools') session.install('.') session.run('flake8', 'google/cloud/core') session.run( From a254b8c441c0be10e63e69792e93161a67e37063 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Sat, 22 Jul 2017 16:39:52 -0700 Subject: [PATCH 160/468] Revert "Fix pylint for the main package" - accidental push This reverts commit 028e35b1e76eaf9c88fd87e12f251cbaf4c3a5c1. --- packages/google-cloud-core/.flake8 | 1 - packages/google-cloud-core/google/__init__.py | 2 -- .../google-cloud-core/google/cloud/__init__.py | 2 -- .../google-cloud-core/google/cloud/_helpers.py | 5 ++--- packages/google-cloud-core/google/cloud/_http.py | 4 +--- .../google-cloud-core/google/cloud/_testing.py | 14 +++++--------- packages/google-cloud-core/google/cloud/client.py | 2 +- .../google/cloud/future/operation.py | 2 +- packages/google-cloud-core/google/cloud/iam.py | 6 +++--- .../google-cloud-core/google/cloud/iterator.py | 5 +---- .../google-cloud-core/google/cloud/operation.py | 4 ++-- packages/google-cloud-core/nox.py | 3 +-- 12 files changed, 17 insertions(+), 33 deletions(-) diff --git a/packages/google-cloud-core/.flake8 b/packages/google-cloud-core/.flake8 index 7f4ddb8072b0..25168dc87605 100644 --- a/packages/google-cloud-core/.flake8 +++ b/packages/google-cloud-core/.flake8 @@ -1,5 +1,4 @@ [flake8] -import-order-style=google exclude = __pycache__, .git, diff --git a/packages/google-cloud-core/google/__init__.py b/packages/google-cloud-core/google/__init__.py index a35569c36339..b2b833373882 100644 --- a/packages/google-cloud-core/google/__init__.py +++ b/packages/google-cloud-core/google/__init__.py @@ -12,8 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Google namespace package.""" - try: import pkg_resources pkg_resources.declare_namespace(__name__) diff --git a/packages/google-cloud-core/google/cloud/__init__.py b/packages/google-cloud-core/google/cloud/__init__.py index 59a804265f5c..b2b833373882 100644 --- a/packages/google-cloud-core/google/cloud/__init__.py +++ b/packages/google-cloud-core/google/cloud/__init__.py @@ -12,8 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Google Cloud namespace package.""" - try: import pkg_resources pkg_resources.declare_namespace(__name__) diff --git a/packages/google-cloud-core/google/cloud/_helpers.py b/packages/google-cloud-core/google/cloud/_helpers.py index 8dc9bf1cf412..72918e064507 100644 --- a/packages/google-cloud-core/google/cloud/_helpers.py +++ b/packages/google-cloud-core/google/cloud/_helpers.py @@ -17,6 +17,7 @@ This module is not part of the public API surface. """ +# Avoid the grpc and google.cloud.grpc collision. from __future__ import absolute_import import calendar @@ -31,8 +32,6 @@ import google_auth_httplib2 try: - # pylint: disable=ungrouped-imports - # We must import google.auth.transport.grpc within this try: catch. import grpc import google.auth.transport.grpc except ImportError: # pragma: NO COVER @@ -105,7 +104,7 @@ def top(self): :rtype: object :returns: the top-most item, or None if the stack is empty. """ - if self._stack: + if len(self._stack) > 0: return self._stack[-1] diff --git a/packages/google-cloud-core/google/cloud/_http.py b/packages/google-cloud-core/google/cloud/_http.py index ada60b4fb2c3..e1a481e581a7 100644 --- a/packages/google-cloud-core/google/cloud/_http.py +++ b/packages/google-cloud-core/google/cloud/_http.py @@ -279,9 +279,7 @@ def api_request(self, method, path, query_params=None, can allow custom behavior, for example, to defer an HTTP request and complete initialization of the object at a later time. - :raises ~google.cloud.exceptions.GoogleCloudError: if the response code - is not 200 OK. - :raises TypeError: if the response content type is not JSON. + :raises: Exception if the response code is not 200 OK. :rtype: dict or str :returns: The API response payload, either as a raw string or a dictionary if the response is valid JSON. diff --git a/packages/google-cloud-core/google/cloud/_testing.py b/packages/google-cloud-core/google/cloud/_testing.py index 871b5f631bc7..a544fffc5fe4 100644 --- a/packages/google-cloud-core/google/cloud/_testing.py +++ b/packages/google-cloud-core/google/cloud/_testing.py @@ -14,15 +14,17 @@ """Shared testing utilities.""" + +# Avoid the grpc and google.cloud.grpc collision. from __future__ import absolute_import class _Monkey(object): - """Context-manager for replacing module names in the scope of a test.""" + # context-manager for replacing module names in the scope of a test. def __init__(self, module, **kw): self.module = module - if not kw: # pragma: NO COVER + if len(kw) == 0: # pragma: NO COVER raise ValueError('_Monkey was used with nothing to monkey-patch') self.to_restore = {key: getattr(module, key) for key in kw} for key, value in kw.items(): @@ -66,12 +68,8 @@ def _tempdir_mgr(): return _tempdir_mgr -# pylint: disable=invalid-name -# Retain _tempdir as a constant for backwards compatibility despite -# being an invalid name. _tempdir = _tempdir_maker() del _tempdir_maker -# pylint: enable=invalid-name class _GAXBaseAPI(object): @@ -81,8 +79,7 @@ class _GAXBaseAPI(object): def __init__(self, **kw): self.__dict__.update(kw) - @staticmethod - def _make_grpc_error(status_code, trailing=None): + def _make_grpc_error(self, status_code, trailing=None): from grpc._channel import _RPCState from google.cloud.exceptions import GrpcRendezvous @@ -114,7 +111,6 @@ def __init__(self, *pages, **kwargs): self.page_token = kwargs.get('page_token') def next(self): - """Iterate to the next page.""" import six return six.next(self._pages) diff --git a/packages/google-cloud-core/google/cloud/client.py b/packages/google-cloud-core/google/cloud/client.py index 5fa7f7ef95a2..9bdbf507d201 100644 --- a/packages/google-cloud-core/google/cloud/client.py +++ b/packages/google-cloud-core/google/cloud/client.py @@ -64,7 +64,7 @@ def from_service_account_json(cls, json_credentials_path, *args, **kwargs): :rtype: :class:`_ClientFactoryMixin` :returns: The client created with the retrieved JSON credentials. - :raises TypeError: if there is a conflict with the kwargs + :raises: :class:`TypeError` if there is a conflict with the kwargs and the credentials created by the factory. """ if 'credentials' in kwargs: diff --git a/packages/google-cloud-core/google/cloud/future/operation.py b/packages/google-cloud-core/google/cloud/future/operation.py index 8064e5c13e1f..5bbfda1a8f0b 100644 --- a/packages/google-cloud-core/google/cloud/future/operation.py +++ b/packages/google-cloud-core/google/cloud/future/operation.py @@ -34,7 +34,7 @@ class Operation(base.PollingFuture): initial operation. refresh (Callable[[], Operation]): A callable that returns the latest state of the operation. - cancel (Callable[[], None]): A callable that tries to cancel + cancel (Callable[[], None]), A callable that tries to cancel the operation. result_type (type): The protobuf type for the operation's result. metadata_type (type): The protobuf type for the operation's diff --git a/packages/google-cloud-core/google/cloud/iam.py b/packages/google-cloud-core/google/cloud/iam.py index bbc31c047a85..49bb11266cee 100644 --- a/packages/google-cloud-core/google/cloud/iam.py +++ b/packages/google-cloud-core/google/cloud/iam.py @@ -226,14 +226,14 @@ def to_api_repr(self): if self.version is not None: resource['version'] = self.version - if self._bindings: + if len(self._bindings) > 0: bindings = resource['bindings'] = [] for role, members in sorted(self._bindings.items()): - if members: + if len(members) > 0: bindings.append( {'role': role, 'members': sorted(set(members))}) - if not bindings: + if len(bindings) == 0: del resource['bindings'] return resource diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index 742443ddc5f9..7bb708e90f09 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -242,8 +242,7 @@ def _page_iter(self, increment): results per page while an items iterator will want to increment per item. - :rtype: :class:`Page` - :returns: pages + Yields :class:`Page` instances. """ page = self._next_page() while page is not None: @@ -388,8 +387,6 @@ def _get_next_page_response(self): :rtype: dict :returns: The parsed JSON response of the next page's contents. - - :raises ValueError: If the HTTP method is not ``GET`` or ``POST``. """ params = self._get_query_params() if self._HTTP_METHOD == 'GET': diff --git a/packages/google-cloud-core/google/cloud/operation.py b/packages/google-cloud-core/google/cloud/operation.py index 9f53c595f658..4e700a553e4f 100644 --- a/packages/google-cloud-core/google/cloud/operation.py +++ b/packages/google-cloud-core/google/cloud/operation.py @@ -50,7 +50,7 @@ def register_type(klass, type_url=None): :param type_url: (Optional) URL naming the type. If not provided, infers the URL from the type descriptor. - :raises ValueError: if a registration already exists for the URL. + :raises: ValueError if a registration already exists for the URL. """ if type_url is None: type_url = _compute_type_url(klass) @@ -258,7 +258,7 @@ def poll(self): :rtype: bool :returns: A boolean indicating if the current operation has completed. - :raises ValueError: if the operation + :raises: :class:`~exceptions.ValueError` if the operation has already completed. """ if self.complete: diff --git a/packages/google-cloud-core/nox.py b/packages/google-cloud-core/nox.py index 8f025cce8b61..c8f4a942e7a2 100644 --- a/packages/google-cloud-core/nox.py +++ b/packages/google-cloud-core/nox.py @@ -50,8 +50,7 @@ def lint(session): serious code quality issues. """ session.interpreter = 'python3.6' - session.install( - 'flake8', 'flake8-import-order', 'pylint', 'gcp-devrel-py-tools') + session.install('flake8', 'pylint', 'gcp-devrel-py-tools') session.install('.') session.run('flake8', 'google/cloud/core') session.run( From 0a28565cc4f51298859f97ca3a0cb3e630b3eb4c Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Mon, 24 Jul 2017 14:08:55 -0700 Subject: [PATCH 161/468] Fix pylint for the main package (#3658) --- packages/google-cloud-core/.flake8 | 1 + packages/google-cloud-core/google/__init__.py | 2 ++ .../google-cloud-core/google/cloud/__init__.py | 2 ++ .../google-cloud-core/google/cloud/_helpers.py | 3 +-- packages/google-cloud-core/google/cloud/_http.py | 4 +++- .../google-cloud-core/google/cloud/_testing.py | 14 +++++++++----- packages/google-cloud-core/google/cloud/client.py | 2 +- .../google/cloud/future/operation.py | 2 +- packages/google-cloud-core/google/cloud/iam.py | 6 +++--- .../google-cloud-core/google/cloud/iterator.py | 5 ++++- .../google-cloud-core/google/cloud/operation.py | 4 ++-- packages/google-cloud-core/nox.py | 3 ++- 12 files changed, 31 insertions(+), 17 deletions(-) diff --git a/packages/google-cloud-core/.flake8 b/packages/google-cloud-core/.flake8 index 25168dc87605..7f4ddb8072b0 100644 --- a/packages/google-cloud-core/.flake8 +++ b/packages/google-cloud-core/.flake8 @@ -1,4 +1,5 @@ [flake8] +import-order-style=google exclude = __pycache__, .git, diff --git a/packages/google-cloud-core/google/__init__.py b/packages/google-cloud-core/google/__init__.py index b2b833373882..a35569c36339 100644 --- a/packages/google-cloud-core/google/__init__.py +++ b/packages/google-cloud-core/google/__init__.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +"""Google namespace package.""" + try: import pkg_resources pkg_resources.declare_namespace(__name__) diff --git a/packages/google-cloud-core/google/cloud/__init__.py b/packages/google-cloud-core/google/cloud/__init__.py index b2b833373882..59a804265f5c 100644 --- a/packages/google-cloud-core/google/cloud/__init__.py +++ b/packages/google-cloud-core/google/cloud/__init__.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +"""Google Cloud namespace package.""" + try: import pkg_resources pkg_resources.declare_namespace(__name__) diff --git a/packages/google-cloud-core/google/cloud/_helpers.py b/packages/google-cloud-core/google/cloud/_helpers.py index 72918e064507..62bbccf74b15 100644 --- a/packages/google-cloud-core/google/cloud/_helpers.py +++ b/packages/google-cloud-core/google/cloud/_helpers.py @@ -17,7 +17,6 @@ This module is not part of the public API surface. """ -# Avoid the grpc and google.cloud.grpc collision. from __future__ import absolute_import import calendar @@ -104,7 +103,7 @@ def top(self): :rtype: object :returns: the top-most item, or None if the stack is empty. """ - if len(self._stack) > 0: + if self._stack: return self._stack[-1] diff --git a/packages/google-cloud-core/google/cloud/_http.py b/packages/google-cloud-core/google/cloud/_http.py index e1a481e581a7..ada60b4fb2c3 100644 --- a/packages/google-cloud-core/google/cloud/_http.py +++ b/packages/google-cloud-core/google/cloud/_http.py @@ -279,7 +279,9 @@ def api_request(self, method, path, query_params=None, can allow custom behavior, for example, to defer an HTTP request and complete initialization of the object at a later time. - :raises: Exception if the response code is not 200 OK. + :raises ~google.cloud.exceptions.GoogleCloudError: if the response code + is not 200 OK. + :raises TypeError: if the response content type is not JSON. :rtype: dict or str :returns: The API response payload, either as a raw string or a dictionary if the response is valid JSON. diff --git a/packages/google-cloud-core/google/cloud/_testing.py b/packages/google-cloud-core/google/cloud/_testing.py index a544fffc5fe4..871b5f631bc7 100644 --- a/packages/google-cloud-core/google/cloud/_testing.py +++ b/packages/google-cloud-core/google/cloud/_testing.py @@ -14,17 +14,15 @@ """Shared testing utilities.""" - -# Avoid the grpc and google.cloud.grpc collision. from __future__ import absolute_import class _Monkey(object): - # context-manager for replacing module names in the scope of a test. + """Context-manager for replacing module names in the scope of a test.""" def __init__(self, module, **kw): self.module = module - if len(kw) == 0: # pragma: NO COVER + if not kw: # pragma: NO COVER raise ValueError('_Monkey was used with nothing to monkey-patch') self.to_restore = {key: getattr(module, key) for key in kw} for key, value in kw.items(): @@ -68,8 +66,12 @@ def _tempdir_mgr(): return _tempdir_mgr +# pylint: disable=invalid-name +# Retain _tempdir as a constant for backwards compatibility despite +# being an invalid name. _tempdir = _tempdir_maker() del _tempdir_maker +# pylint: enable=invalid-name class _GAXBaseAPI(object): @@ -79,7 +81,8 @@ class _GAXBaseAPI(object): def __init__(self, **kw): self.__dict__.update(kw) - def _make_grpc_error(self, status_code, trailing=None): + @staticmethod + def _make_grpc_error(status_code, trailing=None): from grpc._channel import _RPCState from google.cloud.exceptions import GrpcRendezvous @@ -111,6 +114,7 @@ def __init__(self, *pages, **kwargs): self.page_token = kwargs.get('page_token') def next(self): + """Iterate to the next page.""" import six return six.next(self._pages) diff --git a/packages/google-cloud-core/google/cloud/client.py b/packages/google-cloud-core/google/cloud/client.py index 9bdbf507d201..5fa7f7ef95a2 100644 --- a/packages/google-cloud-core/google/cloud/client.py +++ b/packages/google-cloud-core/google/cloud/client.py @@ -64,7 +64,7 @@ def from_service_account_json(cls, json_credentials_path, *args, **kwargs): :rtype: :class:`_ClientFactoryMixin` :returns: The client created with the retrieved JSON credentials. - :raises: :class:`TypeError` if there is a conflict with the kwargs + :raises TypeError: if there is a conflict with the kwargs and the credentials created by the factory. """ if 'credentials' in kwargs: diff --git a/packages/google-cloud-core/google/cloud/future/operation.py b/packages/google-cloud-core/google/cloud/future/operation.py index 5bbfda1a8f0b..8064e5c13e1f 100644 --- a/packages/google-cloud-core/google/cloud/future/operation.py +++ b/packages/google-cloud-core/google/cloud/future/operation.py @@ -34,7 +34,7 @@ class Operation(base.PollingFuture): initial operation. refresh (Callable[[], Operation]): A callable that returns the latest state of the operation. - cancel (Callable[[], None]), A callable that tries to cancel + cancel (Callable[[], None]): A callable that tries to cancel the operation. result_type (type): The protobuf type for the operation's result. metadata_type (type): The protobuf type for the operation's diff --git a/packages/google-cloud-core/google/cloud/iam.py b/packages/google-cloud-core/google/cloud/iam.py index 49bb11266cee..bbc31c047a85 100644 --- a/packages/google-cloud-core/google/cloud/iam.py +++ b/packages/google-cloud-core/google/cloud/iam.py @@ -226,14 +226,14 @@ def to_api_repr(self): if self.version is not None: resource['version'] = self.version - if len(self._bindings) > 0: + if self._bindings: bindings = resource['bindings'] = [] for role, members in sorted(self._bindings.items()): - if len(members) > 0: + if members: bindings.append( {'role': role, 'members': sorted(set(members))}) - if len(bindings) == 0: + if not bindings: del resource['bindings'] return resource diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/cloud/iterator.py index 7bb708e90f09..742443ddc5f9 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/cloud/iterator.py @@ -242,7 +242,8 @@ def _page_iter(self, increment): results per page while an items iterator will want to increment per item. - Yields :class:`Page` instances. + :rtype: :class:`Page` + :returns: pages """ page = self._next_page() while page is not None: @@ -387,6 +388,8 @@ def _get_next_page_response(self): :rtype: dict :returns: The parsed JSON response of the next page's contents. + + :raises ValueError: If the HTTP method is not ``GET`` or ``POST``. """ params = self._get_query_params() if self._HTTP_METHOD == 'GET': diff --git a/packages/google-cloud-core/google/cloud/operation.py b/packages/google-cloud-core/google/cloud/operation.py index 4e700a553e4f..9f53c595f658 100644 --- a/packages/google-cloud-core/google/cloud/operation.py +++ b/packages/google-cloud-core/google/cloud/operation.py @@ -50,7 +50,7 @@ def register_type(klass, type_url=None): :param type_url: (Optional) URL naming the type. If not provided, infers the URL from the type descriptor. - :raises: ValueError if a registration already exists for the URL. + :raises ValueError: if a registration already exists for the URL. """ if type_url is None: type_url = _compute_type_url(klass) @@ -258,7 +258,7 @@ def poll(self): :rtype: bool :returns: A boolean indicating if the current operation has completed. - :raises: :class:`~exceptions.ValueError` if the operation + :raises ValueError: if the operation has already completed. """ if self.complete: diff --git a/packages/google-cloud-core/nox.py b/packages/google-cloud-core/nox.py index c8f4a942e7a2..8f025cce8b61 100644 --- a/packages/google-cloud-core/nox.py +++ b/packages/google-cloud-core/nox.py @@ -50,7 +50,8 @@ def lint(session): serious code quality issues. """ session.interpreter = 'python3.6' - session.install('flake8', 'pylint', 'gcp-devrel-py-tools') + session.install( + 'flake8', 'flake8-import-order', 'pylint', 'gcp-devrel-py-tools') session.install('.') session.run('flake8', 'google/cloud/core') session.run( From 75551bcbc12c92bf9a821cdccd246f86ae36594d Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Mon, 24 Jul 2017 15:29:38 -0700 Subject: [PATCH 162/468] Split polling future into its own module (#3662) --- .../google/cloud/future/base.py | 149 --------------- .../google/cloud/future/operation.py | 4 +- .../google/cloud/future/polling.py | 169 ++++++++++++++++++ .../tests/unit/future/test_operation.py | 2 +- .../future/{test_base.py => test_polling.py} | 4 +- 5 files changed, 174 insertions(+), 154 deletions(-) create mode 100644 packages/google-cloud-core/google/cloud/future/polling.py rename packages/google-cloud-core/tests/unit/future/{test_base.py => test_polling.py} (97%) diff --git a/packages/google-cloud-core/google/cloud/future/base.py b/packages/google-cloud-core/google/cloud/future/base.py index aed1dfd80e5d..243913640d62 100644 --- a/packages/google-cloud-core/google/cloud/future/base.py +++ b/packages/google-cloud-core/google/cloud/future/base.py @@ -15,14 +15,8 @@ """Abstract and helper bases for Future implementations.""" import abc -import concurrent.futures -import functools -import operator import six -import tenacity - -from google.cloud.future import _helpers @six.add_metaclass(abc.ABCMeta) @@ -71,146 +65,3 @@ def set_result(self, result): @abc.abstractmethod def set_exception(self, exception): raise NotImplementedError() - - -class PollingFuture(Future): - """A Future that needs to poll some service to check its status. - - The :meth:`done` method should be implemented by subclasses. The polling - behavior will repeatedly call ``done`` until it returns True. - - .. note: Privacy here is intended to prevent the final class from - overexposing, not to prevent subclasses from accessing methods. - """ - def __init__(self): - super(PollingFuture, self).__init__() - self._result = None - self._exception = None - self._result_set = False - """bool: Set to True when the result has been set via set_result or - set_exception.""" - self._polling_thread = None - self._done_callbacks = [] - - @abc.abstractmethod - def done(self): - """Checks to see if the operation is complete. - - Returns: - bool: True if the operation is complete, False otherwise. - """ - # pylint: disable=redundant-returns-doc, missing-raises-doc - raise NotImplementedError() - - def running(self): - """True if the operation is currently running.""" - return not self.done() - - def _blocking_poll(self, timeout=None): - """Poll and wait for the Future to be resolved. - - Args: - timeout (int): How long to wait for the operation to complete. - If None, wait indefinitely. - """ - if self._result_set: - return - - retry_on = tenacity.retry_if_result( - functools.partial(operator.is_not, True)) - # Use exponential backoff with jitter. - wait_on = ( - tenacity.wait_exponential(multiplier=1, max=10) + - tenacity.wait_random(0, 1)) - - if timeout is None: - retry = tenacity.retry(retry=retry_on, wait=wait_on) - else: - retry = tenacity.retry( - retry=retry_on, - wait=wait_on, - stop=tenacity.stop_after_delay(timeout)) - - try: - retry(self.done)() - except tenacity.RetryError as exc: - six.raise_from( - concurrent.futures.TimeoutError( - 'Operation did not complete within the designated ' - 'timeout.'), - exc) - - def result(self, timeout=None): - """Get the result of the operation, blocking if necessary. - - Args: - timeout (int): How long to wait for the operation to complete. - If None, wait indefinitely. - - Returns: - google.protobuf.Message: The Operation's result. - - Raises: - google.gax.GaxError: If the operation errors or if the timeout is - reached before the operation completes. - """ - self._blocking_poll(timeout=timeout) - - if self._exception is not None: - # pylint: disable=raising-bad-type - # Pylint doesn't recognize that this is valid in this case. - raise self._exception - - return self._result - - def exception(self, timeout=None): - """Get the exception from the operation, blocking if necessary. - - Args: - timeout (int): How long to wait for the operation to complete. - If None, wait indefinitely. - - Returns: - Optional[google.gax.GaxError]: The operation's error. - """ - self._blocking_poll() - return self._exception - - def add_done_callback(self, fn): - """Add a callback to be executed when the operation is complete. - - If the operation is not already complete, this will start a helper - thread to poll for the status of the operation in the background. - - Args: - fn (Callable[Future]): The callback to execute when the operation - is complete. - """ - if self._result_set: - _helpers.safe_invoke_callback(fn, self) - return - - self._done_callbacks.append(fn) - - if self._polling_thread is None: - # The polling thread will exit on its own as soon as the operation - # is done. - self._polling_thread = _helpers.start_daemon_thread( - target=self._blocking_poll) - - def _invoke_callbacks(self, *args, **kwargs): - """Invoke all done callbacks.""" - for callback in self._done_callbacks: - _helpers.safe_invoke_callback(callback, *args, **kwargs) - - def set_result(self, result): - """Set the Future's result.""" - self._result = result - self._result_set = True - self._invoke_callbacks(self) - - def set_exception(self, exception): - """Set the Future's exception.""" - self._exception = exception - self._result_set = True - self._invoke_callbacks(self) diff --git a/packages/google-cloud-core/google/cloud/future/operation.py b/packages/google-cloud-core/google/cloud/future/operation.py index 8064e5c13e1f..21da738ca0ff 100644 --- a/packages/google-cloud-core/google/cloud/future/operation.py +++ b/packages/google-cloud-core/google/cloud/future/operation.py @@ -23,10 +23,10 @@ from google.cloud import _helpers from google.cloud import exceptions -from google.cloud.future import base +from google.cloud.future import polling -class Operation(base.PollingFuture): +class Operation(polling.PollingFuture): """A Future for interacting with a Google API Long-Running Operation. Args: diff --git a/packages/google-cloud-core/google/cloud/future/polling.py b/packages/google-cloud-core/google/cloud/future/polling.py new file mode 100644 index 000000000000..6b7ae4221f64 --- /dev/null +++ b/packages/google-cloud-core/google/cloud/future/polling.py @@ -0,0 +1,169 @@ +# Copyright 2017, Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Abstract and helper bases for Future implementations.""" + +import abc +import concurrent.futures +import functools +import operator + +import six +import tenacity + +from google.cloud.future import _helpers +from google.cloud.future import base + + +class PollingFuture(base.Future): + """A Future that needs to poll some service to check its status. + + The :meth:`done` method should be implemented by subclasses. The polling + behavior will repeatedly call ``done`` until it returns True. + + .. note: Privacy here is intended to prevent the final class from + overexposing, not to prevent subclasses from accessing methods. + """ + def __init__(self): + super(PollingFuture, self).__init__() + self._result = None + self._exception = None + self._result_set = False + """bool: Set to True when the result has been set via set_result or + set_exception.""" + self._polling_thread = None + self._done_callbacks = [] + + @abc.abstractmethod + def done(self): + """Checks to see if the operation is complete. + + Returns: + bool: True if the operation is complete, False otherwise. + """ + # pylint: disable=redundant-returns-doc, missing-raises-doc + raise NotImplementedError() + + def running(self): + """True if the operation is currently running.""" + return not self.done() + + def _blocking_poll(self, timeout=None): + """Poll and wait for the Future to be resolved. + + Args: + timeout (int): How long to wait for the operation to complete. + If None, wait indefinitely. + """ + if self._result_set: + return + + retry_on = tenacity.retry_if_result( + functools.partial(operator.is_not, True)) + # Use exponential backoff with jitter. + wait_on = ( + tenacity.wait_exponential(multiplier=1, max=10) + + tenacity.wait_random(0, 1)) + + if timeout is None: + retry = tenacity.retry(retry=retry_on, wait=wait_on) + else: + retry = tenacity.retry( + retry=retry_on, + wait=wait_on, + stop=tenacity.stop_after_delay(timeout)) + + try: + retry(self.done)() + except tenacity.RetryError as exc: + six.raise_from( + concurrent.futures.TimeoutError( + 'Operation did not complete within the designated ' + 'timeout.'), + exc) + + def result(self, timeout=None): + """Get the result of the operation, blocking if necessary. + + Args: + timeout (int): How long to wait for the operation to complete. + If None, wait indefinitely. + + Returns: + google.protobuf.Message: The Operation's result. + + Raises: + google.gax.GaxError: If the operation errors or if the timeout is + reached before the operation completes. + """ + self._blocking_poll(timeout=timeout) + + if self._exception is not None: + # pylint: disable=raising-bad-type + # Pylint doesn't recognize that this is valid in this case. + raise self._exception + + return self._result + + def exception(self, timeout=None): + """Get the exception from the operation, blocking if necessary. + + Args: + timeout (int): How long to wait for the operation to complete. + If None, wait indefinitely. + + Returns: + Optional[google.gax.GaxError]: The operation's error. + """ + self._blocking_poll() + return self._exception + + def add_done_callback(self, fn): + """Add a callback to be executed when the operation is complete. + + If the operation is not already complete, this will start a helper + thread to poll for the status of the operation in the background. + + Args: + fn (Callable[Future]): The callback to execute when the operation + is complete. + """ + if self._result_set: + _helpers.safe_invoke_callback(fn, self) + return + + self._done_callbacks.append(fn) + + if self._polling_thread is None: + # The polling thread will exit on its own as soon as the operation + # is done. + self._polling_thread = _helpers.start_daemon_thread( + target=self._blocking_poll) + + def _invoke_callbacks(self, *args, **kwargs): + """Invoke all done callbacks.""" + for callback in self._done_callbacks: + _helpers.safe_invoke_callback(callback, *args, **kwargs) + + def set_result(self, result): + """Set the Future's result.""" + self._result = result + self._result_set = True + self._invoke_callbacks(self) + + def set_exception(self, exception): + """Set the Future's exception.""" + self._exception = exception + self._result_set = True + self._invoke_callbacks(self) diff --git a/packages/google-cloud-core/tests/unit/future/test_operation.py b/packages/google-cloud-core/tests/unit/future/test_operation.py index 0e29aa687ee6..2d281694001a 100644 --- a/packages/google-cloud-core/tests/unit/future/test_operation.py +++ b/packages/google-cloud-core/tests/unit/future/test_operation.py @@ -61,7 +61,7 @@ def make_operation_future(client_operations_responses=None): def test_constructor(): - future, refresh, cancel = make_operation_future() + future, refresh, _ = make_operation_future() assert future.operation == refresh.responses[0] assert future.operation.done is False diff --git a/packages/google-cloud-core/tests/unit/future/test_base.py b/packages/google-cloud-core/tests/unit/future/test_polling.py similarity index 97% rename from packages/google-cloud-core/tests/unit/future/test_base.py rename to packages/google-cloud-core/tests/unit/future/test_polling.py index 69a0348e68d9..c8fde1c20385 100644 --- a/packages/google-cloud-core/tests/unit/future/test_base.py +++ b/packages/google-cloud-core/tests/unit/future/test_polling.py @@ -19,10 +19,10 @@ import mock import pytest -from google.cloud.future import base +from google.cloud.future import polling -class PollingFutureImpl(base.PollingFuture): +class PollingFutureImpl(polling.PollingFuture): def done(self): return False From f34bc9c228ad1fd8514e0f70a2eb8bb5a6781dca Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Mon, 24 Jul 2017 16:09:58 -0700 Subject: [PATCH 163/468] Re-enable flake8 for core package and tests (#3664) --- packages/google-cloud-core/.flake8 | 3 +++ packages/google-cloud-core/google/cloud/_helpers.py | 10 +++++----- packages/google-cloud-core/google/cloud/_http.py | 2 +- packages/google-cloud-core/google/cloud/client.py | 4 ++-- .../google-cloud-core/google/cloud/credentials.py | 6 +++--- packages/google-cloud-core/google/cloud/exceptions.py | 5 +++-- .../google/cloud/future/operation.py | 7 +++---- packages/google-cloud-core/nox.py | 2 +- .../google-cloud-core/tests/unit/test_credentials.py | 11 +++++------ packages/google-cloud-core/tests/unit/test_iam.py | 1 - 10 files changed, 26 insertions(+), 25 deletions(-) diff --git a/packages/google-cloud-core/.flake8 b/packages/google-cloud-core/.flake8 index 7f4ddb8072b0..3db9b737d6bc 100644 --- a/packages/google-cloud-core/.flake8 +++ b/packages/google-cloud-core/.flake8 @@ -1,5 +1,8 @@ [flake8] import-order-style=google +# Note: this forces all google imports to be in the third group. See +# https://github.com/PyCQA/flake8-import-order/issues/111 +application-import-names=google exclude = __pycache__, .git, diff --git a/packages/google-cloud-core/google/cloud/_helpers.py b/packages/google-cloud-core/google/cloud/_helpers.py index 62bbccf74b15..fdb22ecdf09c 100644 --- a/packages/google-cloud-core/google/cloud/_helpers.py +++ b/packages/google-cloud-core/google/cloud/_helpers.py @@ -25,10 +25,14 @@ import re from threading import local as Local +import google_auth_httplib2 +import httplib2 +import six +from six.moves import http_client + import google.auth from google.protobuf import duration_pb2 from google.protobuf import timestamp_pb2 -import google_auth_httplib2 try: import grpc @@ -36,10 +40,6 @@ except ImportError: # pragma: NO COVER grpc = None -import httplib2 -import six -from six.moves import http_client - _NOW = datetime.datetime.utcnow # To be replaced by tests. _RFC3339_MICROS = '%Y-%m-%dT%H:%M:%S.%fZ' diff --git a/packages/google-cloud-core/google/cloud/_http.py b/packages/google-cloud-core/google/cloud/_http.py index ada60b4fb2c3..186d6216e7eb 100644 --- a/packages/google-cloud-core/google/cloud/_http.py +++ b/packages/google-cloud-core/google/cloud/_http.py @@ -16,8 +16,8 @@ import json import platform -from pkg_resources import get_distribution +from pkg_resources import get_distribution import six from six.moves.urllib.parse import urlencode diff --git a/packages/google-cloud-core/google/cloud/client.py b/packages/google-cloud-core/google/cloud/client.py index 5fa7f7ef95a2..5906ab5ed108 100644 --- a/packages/google-cloud-core/google/cloud/client.py +++ b/packages/google-cloud-core/google/cloud/client.py @@ -18,13 +18,13 @@ import json from pickle import PicklingError -import google.auth.credentials -from google.oauth2 import service_account import google_auth_httplib2 import six +import google.auth.credentials from google.cloud._helpers import _determine_default_project from google.cloud.credentials import get_credentials +from google.oauth2 import service_account _GOOGLE_AUTH_CREDENTIALS_HELP = ( diff --git a/packages/google-cloud-core/google/cloud/credentials.py b/packages/google-cloud-core/google/cloud/credentials.py index e5fe30245ea5..29c4a5d310f4 100644 --- a/packages/google-cloud-core/google/cloud/credentials.py +++ b/packages/google-cloud-core/google/cloud/credentials.py @@ -16,15 +16,15 @@ import base64 import datetime + import six from six.moves.urllib.parse import urlencode import google.auth import google.auth.credentials - -from google.cloud._helpers import UTC -from google.cloud._helpers import _NOW from google.cloud._helpers import _microseconds_from_datetime +from google.cloud._helpers import _NOW +from google.cloud._helpers import UTC def get_credentials(): diff --git a/packages/google-cloud-core/google/cloud/exceptions.py b/packages/google-cloud-core/google/cloud/exceptions.py index 32080de7ff50..e911980c6328 100644 --- a/packages/google-cloud-core/google/cloud/exceptions.py +++ b/packages/google-cloud-core/google/cloud/exceptions.py @@ -22,17 +22,18 @@ import copy import json + import six from google.cloud._helpers import _to_bytes -_HTTP_CODE_TO_EXCEPTION = {} # populated at end of module - try: from grpc._channel import _Rendezvous except ImportError: # pragma: NO COVER _Rendezvous = None +_HTTP_CODE_TO_EXCEPTION = {} # populated at end of module + # pylint: disable=invalid-name GrpcRendezvous = _Rendezvous diff --git a/packages/google-cloud-core/google/cloud/future/operation.py b/packages/google-cloud-core/google/cloud/future/operation.py index 21da738ca0ff..ec430cd9c55b 100644 --- a/packages/google-cloud-core/google/cloud/future/operation.py +++ b/packages/google-cloud-core/google/cloud/future/operation.py @@ -17,13 +17,12 @@ import functools import threading -from google.longrunning import operations_pb2 -from google.protobuf import json_format -from google.rpc import code_pb2 - from google.cloud import _helpers from google.cloud import exceptions from google.cloud.future import polling +from google.longrunning import operations_pb2 +from google.protobuf import json_format +from google.rpc import code_pb2 class Operation(polling.PollingFuture): diff --git a/packages/google-cloud-core/nox.py b/packages/google-cloud-core/nox.py index 8f025cce8b61..48b55332283e 100644 --- a/packages/google-cloud-core/nox.py +++ b/packages/google-cloud-core/nox.py @@ -53,7 +53,7 @@ def lint(session): session.install( 'flake8', 'flake8-import-order', 'pylint', 'gcp-devrel-py-tools') session.install('.') - session.run('flake8', 'google/cloud/core') + session.run('flake8', 'google', 'tests') session.run( 'gcp-devrel-py-tools', 'run-pylint', '--config', 'pylint.config.py', diff --git a/packages/google-cloud-core/tests/unit/test_credentials.py b/packages/google-cloud-core/tests/unit/test_credentials.py index 53370a061494..aaffa907dda1 100644 --- a/packages/google-cloud-core/tests/unit/test_credentials.py +++ b/packages/google-cloud-core/tests/unit/test_credentials.py @@ -15,6 +15,7 @@ import unittest import mock +import six class Test_get_credentials(unittest.TestCase): @@ -169,12 +170,10 @@ def test_w_int(self): self.assertEqual(self._call_fut(123), 123) def test_w_long(self): - try: - long - except NameError: # pragma: NO COVER Py3K - pass - else: - self.assertEqual(self._call_fut(long(123)), 123) + if six.PY3: + raise unittest.SkipTest('No long on Python 3') + + self.assertEqual(self._call_fut(long(123)), 123) # noqa: F821 def test_w_naive_datetime(self): import datetime diff --git a/packages/google-cloud-core/tests/unit/test_iam.py b/packages/google-cloud-core/tests/unit/test_iam.py index d076edd6eba9..4a17c61ce173 100644 --- a/packages/google-cloud-core/tests/unit/test_iam.py +++ b/packages/google-cloud-core/tests/unit/test_iam.py @@ -200,7 +200,6 @@ def test_from_api_repr_complete(self): {'role': VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, ], } - empty = frozenset() klass = self._get_target_class() policy = klass.from_api_repr(RESOURCE) self.assertEqual(policy.etag, 'DEADBEEF') From 1232f7f4fd3a1fc49b08c67a89658c226ee75cef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Isra=C3=ABl=20Hall=C3=A9?= Date: Tue, 25 Jul 2017 14:10:47 -0400 Subject: [PATCH 164/468] Correctly url-encode list parameters (#3657) --- .../google-cloud-core/google/cloud/_http.py | 2 +- .../tests/unit/test__http.py | 24 ++++++++++++------- 2 files changed, 17 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/_http.py b/packages/google-cloud-core/google/cloud/_http.py index 186d6216e7eb..b7c17ca91d6d 100644 --- a/packages/google-cloud-core/google/cloud/_http.py +++ b/packages/google-cloud-core/google/cloud/_http.py @@ -135,7 +135,7 @@ def build_api_url(cls, path, query_params=None, query_params = query_params or {} if query_params: - url += '?' + urlencode(query_params) + url += '?' + urlencode(query_params, doseq=True) return url diff --git a/packages/google-cloud-core/tests/unit/test__http.py b/packages/google-cloud-core/tests/unit/test__http.py index 1226042b5859..22df11566811 100644 --- a/packages/google-cloud-core/tests/unit/test__http.py +++ b/packages/google-cloud-core/tests/unit/test__http.py @@ -94,12 +94,15 @@ def test_build_api_url_no_extra_query_params(self): self.assertEqual(conn.build_api_url('/foo'), URI) def test_build_api_url_w_extra_query_params(self): - from six.moves.urllib.parse import parse_qsl + from six.moves.urllib.parse import parse_qs from six.moves.urllib.parse import urlsplit client = object() conn = self._make_mock_one(client) - uri = conn.build_api_url('/foo', {'bar': 'baz'}) + uri = conn.build_api_url('/foo', { + 'bar': 'baz', + 'qux': ['quux', 'corge'] + }) scheme, netloc, path, qs, _ = urlsplit(uri) self.assertEqual('%s://%s' % (scheme, netloc), conn.API_BASE_URL) @@ -111,8 +114,9 @@ def test_build_api_url_w_extra_query_params(self): 'foo', ]) self.assertEqual(path, PATH) - parms = dict(parse_qsl(qs)) - self.assertEqual(parms['bar'], 'baz') + parms = dict(parse_qs(qs)) + self.assertEqual(parms['bar'], ['baz']) + self.assertEqual(parms['qux'], ['quux', 'corge']) def test__make_request_no_data_no_content_type_no_headers(self): http = _Http( @@ -222,7 +226,7 @@ def test_api_request_wo_json_expected(self): b'CONTENT') def test_api_request_w_query_params(self): - from six.moves.urllib.parse import parse_qsl + from six.moves.urllib.parse import parse_qs from six.moves.urllib.parse import urlsplit http = _Http( @@ -231,7 +235,10 @@ def test_api_request_w_query_params(self): ) client = mock.Mock(_http=http, spec=['_http']) conn = self._make_mock_one(client) - self.assertEqual(conn.api_request('GET', '/', {'foo': 'bar'}), {}) + self.assertEqual(conn.api_request('GET', '/', { + 'foo': 'bar', + 'baz': ['qux', 'quux'] + }), {}) self.assertEqual(http._called_with['method'], 'GET') uri = http._called_with['uri'] scheme, netloc, path, qs, _ = urlsplit(uri) @@ -244,8 +251,9 @@ def test_api_request_w_query_params(self): '', ]) self.assertEqual(path, PATH) - parms = dict(parse_qsl(qs)) - self.assertEqual(parms['foo'], 'bar') + parms = dict(parse_qs(qs)) + self.assertEqual(parms['foo'], ['bar']) + self.assertEqual(parms['baz'], ['qux', 'quux']) self.assertIsNone(http._called_with['body']) expected_headers = { 'Accept-Encoding': 'gzip', From 5c753e009b21de58c07b9b2fe4c12ffe1c82dd21 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 25 Jul 2017 13:10:24 -0700 Subject: [PATCH 165/468] Moving bytes signing helpers from `core` to `storage`. (#3668) --- .../google/cloud/credentials.py | 173 --------------- .../tests/unit/test_credentials.py | 199 ------------------ 2 files changed, 372 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/credentials.py b/packages/google-cloud-core/google/cloud/credentials.py index 29c4a5d310f4..b434cac2f1e7 100644 --- a/packages/google-cloud-core/google/cloud/credentials.py +++ b/packages/google-cloud-core/google/cloud/credentials.py @@ -14,17 +14,7 @@ """A simple wrapper around the OAuth2 credentials library.""" -import base64 -import datetime - -import six -from six.moves.urllib.parse import urlencode - import google.auth -import google.auth.credentials -from google.cloud._helpers import _microseconds_from_datetime -from google.cloud._helpers import _NOW -from google.cloud._helpers import UTC def get_credentials(): @@ -38,166 +28,3 @@ def get_credentials(): """ credentials, _ = google.auth.default() return credentials - - -def _get_signed_query_params(credentials, expiration, string_to_sign): - """Gets query parameters for creating a signed URL. - - :type credentials: :class:`google.auth.credentials.Signer` - :param credentials: The credentials used to create a private key - for signing text. - - :type expiration: int or long - :param expiration: When the signed URL should expire. - - :type string_to_sign: str - :param string_to_sign: The string to be signed by the credentials. - - :raises AttributeError: If :meth: sign_blob is unavailable. - - :rtype: dict - :returns: Query parameters matching the signing credentials with a - signed payload. - """ - if not isinstance(credentials, google.auth.credentials.Signing): - auth_uri = ('https://google-cloud-python.readthedocs.io/en/latest/' - 'core/auth.html?highlight=authentication#setting-up-' - 'a-service-account') - raise AttributeError('you need a private key to sign credentials.' - 'the credentials you are currently using %s ' - 'just contains a token. see %s for more ' - 'details.' % (type(credentials), auth_uri)) - - signature_bytes = credentials.sign_bytes(string_to_sign) - signature = base64.b64encode(signature_bytes) - service_account_name = credentials.signer_email - return { - 'GoogleAccessId': service_account_name, - 'Expires': str(expiration), - 'Signature': signature, - } - - -def _get_expiration_seconds(expiration): - """Convert 'expiration' to a number of seconds in the future. - - :type expiration: int, long, datetime.datetime, datetime.timedelta - :param expiration: When the signed URL should expire. - - :raises TypeError: When expiration is not an integer. - - :rtype: int - :returns: a timestamp as an absolute number of seconds. - """ - # If it's a timedelta, add it to `now` in UTC. - if isinstance(expiration, datetime.timedelta): - now = _NOW().replace(tzinfo=UTC) - expiration = now + expiration - - # If it's a datetime, convert to a timestamp. - if isinstance(expiration, datetime.datetime): - micros = _microseconds_from_datetime(expiration) - expiration = micros // 10**6 - - if not isinstance(expiration, six.integer_types): - raise TypeError('Expected an integer timestamp, datetime, or ' - 'timedelta. Got %s' % type(expiration)) - return expiration - - -def generate_signed_url(credentials, resource, expiration, - api_access_endpoint='', - method='GET', content_md5=None, - content_type=None, response_type=None, - response_disposition=None, generation=None): - """Generate signed URL to provide query-string auth'n to a resource. - - .. note:: - - Assumes ``credentials`` implements the - :class:`google.auth.credentials.Signing` interface. Also assumes - ``credentials`` has a ``service_account_email`` property which - identifies the credentials. - - .. note:: - - If you are on Google Compute Engine, you can't generate a signed URL. - Follow `Issue 922`_ for updates on this. If you'd like to be able to - generate a signed URL from GCE, you can use a standard service account - from a JSON file rather than a GCE service account. - - See headers `reference`_ for more details on optional arguments. - - .. _Issue 922: https://github.com/GoogleCloudPlatform/\ - google-cloud-python/issues/922 - .. _reference: https://cloud.google.com/storage/docs/reference-headers - - :type credentials: :class:`google.auth.credentials.Signing` - :param credentials: Credentials object with an associated private key to - sign text. - - :type resource: str - :param resource: A pointer to a specific resource - (typically, ``/bucket-name/path/to/blob.txt``). - - :type expiration: :class:`int`, :class:`long`, :class:`datetime.datetime`, - :class:`datetime.timedelta` - :param expiration: When the signed URL should expire. - - :type api_access_endpoint: str - :param api_access_endpoint: Optional URI base. Defaults to empty string. - - :type method: str - :param method: The HTTP verb that will be used when requesting the URL. - Defaults to ``'GET'``. - - :type content_md5: str - :param content_md5: (Optional) The MD5 hash of the object referenced by - ``resource``. - - :type content_type: str - :param content_type: (Optional) The content type of the object referenced - by ``resource``. - - :type response_type: str - :param response_type: (Optional) Content type of responses to requests for - the signed URL. Used to over-ride the content type of - the underlying resource. - - :type response_disposition: str - :param response_disposition: (Optional) Content disposition of responses to - requests for the signed URL. - - :type generation: str - :param generation: (Optional) A value that indicates which generation of - the resource to fetch. - - :rtype: str - :returns: A signed URL you can use to access the resource - until expiration. - """ - expiration = _get_expiration_seconds(expiration) - - # Generate the string to sign. - string_to_sign = '\n'.join([ - method, - content_md5 or '', - content_type or '', - str(expiration), - resource]) - - # Set the right query parameters. - query_params = _get_signed_query_params(credentials, - expiration, - string_to_sign) - if response_type is not None: - query_params['response-content-type'] = response_type - if response_disposition is not None: - query_params['response-content-disposition'] = response_disposition - if generation is not None: - query_params['generation'] = generation - - # Return the built URL. - return '{endpoint}{resource}?{querystring}'.format( - endpoint=api_access_endpoint, resource=resource, - querystring=urlencode(query_params)) diff --git a/packages/google-cloud-core/tests/unit/test_credentials.py b/packages/google-cloud-core/tests/unit/test_credentials.py index aaffa907dda1..3b313c1dc1d6 100644 --- a/packages/google-cloud-core/tests/unit/test_credentials.py +++ b/packages/google-cloud-core/tests/unit/test_credentials.py @@ -15,7 +15,6 @@ import unittest import mock -import six class Test_get_credentials(unittest.TestCase): @@ -33,201 +32,3 @@ def test_it(self): self.assertIs(found, mock.sentinel.credentials) default.assert_called_once_with() - - -class Test_generate_signed_url(unittest.TestCase): - - def _call_fut(self, *args, **kwargs): - from google.cloud.credentials import generate_signed_url - - return generate_signed_url(*args, **kwargs) - - def _generate_helper(self, response_type=None, response_disposition=None, - generation=None): - import base64 - from six.moves.urllib.parse import parse_qs - from six.moves.urllib.parse import urlsplit - import google.auth.credentials - from google.cloud._testing import _Monkey - from google.cloud import credentials as MUT - - ENDPOINT = 'http://api.example.com' - RESOURCE = '/name/path' - SIGNED = base64.b64encode(b'DEADBEEF') - CREDENTIALS = mock.Mock(spec=google.auth.credentials.Signing) - CREDENTIALS.signer_email = 'service@example.com' - - def _get_signed_query_params(*args): - credentials, expiration = args[:2] - return { - 'GoogleAccessId': credentials.signer_email, - 'Expires': str(expiration), - 'Signature': SIGNED, - } - - with _Monkey(MUT, _get_signed_query_params=_get_signed_query_params): - url = self._call_fut(CREDENTIALS, RESOURCE, 1000, - api_access_endpoint=ENDPOINT, - response_type=response_type, - response_disposition=response_disposition, - generation=generation) - - scheme, netloc, path, qs, frag = urlsplit(url) - self.assertEqual(scheme, 'http') - self.assertEqual(netloc, 'api.example.com') - self.assertEqual(path, RESOURCE) - params = parse_qs(qs) - # In Py3k, parse_qs gives us text values: - self.assertEqual(params.pop('Signature'), [SIGNED.decode('ascii')]) - self.assertEqual(params.pop('Expires'), ['1000']) - self.assertEqual(params.pop('GoogleAccessId'), - [CREDENTIALS.signer_email]) - if response_type is not None: - self.assertEqual(params.pop('response-content-type'), - [response_type]) - if response_disposition is not None: - self.assertEqual(params.pop('response-content-disposition'), - [response_disposition]) - if generation is not None: - self.assertEqual(params.pop('generation'), [generation]) - # Make sure we have checked them all. - self.assertEqual(len(params), 0) - self.assertEqual(frag, '') - - def test_w_expiration_int(self): - self._generate_helper() - - def test_w_custom_fields(self): - response_type = 'text/plain' - response_disposition = 'attachment; filename=blob.png' - generation = '123' - self._generate_helper(response_type=response_type, - response_disposition=response_disposition, - generation=generation) - - -class Test_generate_signed_url_exception(unittest.TestCase): - def test_with_google_credentials(self): - import time - import google.auth.credentials - from google.cloud.credentials import generate_signed_url - - RESOURCE = '/name/path' - - credentials = mock.Mock(spec=google.auth.credentials.Credentials) - expiration = int(time.time() + 5) - self.assertRaises(AttributeError, generate_signed_url, credentials, - resource=RESOURCE, expiration=expiration) - - -class Test__get_signed_query_params(unittest.TestCase): - - def _call_fut(self, credentials, expiration, string_to_sign): - from google.cloud.credentials import _get_signed_query_params - - return _get_signed_query_params(credentials, expiration, - string_to_sign) - - def test_it(self): - import base64 - import google.auth.credentials - - SIG_BYTES = b'DEADBEEF' - ACCOUNT_NAME = mock.sentinel.service_account_email - CREDENTIALS = mock.Mock(spec=google.auth.credentials.Signing) - CREDENTIALS.signer_email = ACCOUNT_NAME - CREDENTIALS.sign_bytes.return_value = SIG_BYTES - EXPIRATION = 100 - STRING_TO_SIGN = 'dummy_signature' - result = self._call_fut(CREDENTIALS, EXPIRATION, - STRING_TO_SIGN) - - self.assertEqual(result, { - 'GoogleAccessId': ACCOUNT_NAME, - 'Expires': str(EXPIRATION), - 'Signature': base64.b64encode(b'DEADBEEF'), - }) - CREDENTIALS.sign_bytes.assert_called_once_with(STRING_TO_SIGN) - - -class Test__get_expiration_seconds(unittest.TestCase): - - def _call_fut(self, expiration): - from google.cloud.credentials import _get_expiration_seconds - - return _get_expiration_seconds(expiration) - - def _utc_seconds(self, when): - import calendar - - return int(calendar.timegm(when.timetuple())) - - def test_w_invalid(self): - self.assertRaises(TypeError, self._call_fut, object()) - self.assertRaises(TypeError, self._call_fut, None) - - def test_w_int(self): - self.assertEqual(self._call_fut(123), 123) - - def test_w_long(self): - if six.PY3: - raise unittest.SkipTest('No long on Python 3') - - self.assertEqual(self._call_fut(long(123)), 123) # noqa: F821 - - def test_w_naive_datetime(self): - import datetime - - expiration_no_tz = datetime.datetime(2004, 8, 19, 0, 0, 0, 0) - utc_seconds = self._utc_seconds(expiration_no_tz) - self.assertEqual(self._call_fut(expiration_no_tz), utc_seconds) - - def test_w_utc_datetime(self): - import datetime - from google.cloud._helpers import UTC - - expiration_utc = datetime.datetime(2004, 8, 19, 0, 0, 0, 0, UTC) - utc_seconds = self._utc_seconds(expiration_utc) - self.assertEqual(self._call_fut(expiration_utc), utc_seconds) - - def test_w_other_zone_datetime(self): - import datetime - from google.cloud._helpers import _UTC - - class CET(_UTC): - _tzname = 'CET' - _utcoffset = datetime.timedelta(hours=1) - - zone = CET() - expiration_other = datetime.datetime(2004, 8, 19, 0, 0, 0, 0, zone) - utc_seconds = self._utc_seconds(expiration_other) - cet_seconds = utc_seconds - (60 * 60) # CET one hour earlier than UTC - self.assertEqual(self._call_fut(expiration_other), cet_seconds) - - def test_w_timedelta_seconds(self): - import datetime - from google.cloud._testing import _Monkey - from google.cloud import credentials as MUT - - dummy_utcnow = datetime.datetime(2004, 8, 19, 0, 0, 0, 0) - utc_seconds = self._utc_seconds(dummy_utcnow) - expiration_as_delta = datetime.timedelta(seconds=10) - - with _Monkey(MUT, _NOW=lambda: dummy_utcnow): - result = self._call_fut(expiration_as_delta) - - self.assertEqual(result, utc_seconds + 10) - - def test_w_timedelta_days(self): - import datetime - from google.cloud._testing import _Monkey - from google.cloud import credentials as MUT - - dummy_utcnow = datetime.datetime(2004, 8, 19, 0, 0, 0, 0) - utc_seconds = self._utc_seconds(dummy_utcnow) - expiration_as_delta = datetime.timedelta(days=1) - - with _Monkey(MUT, _NOW=lambda: dummy_utcnow): - result = self._call_fut(expiration_as_delta) - - self.assertEqual(result, utc_seconds + 86400) From 21f1fb473afe124d53679bf5a3017a658e428e65 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 25 Jul 2017 14:13:44 -0700 Subject: [PATCH 166/468] Removing `get_credentials()` from `core`. (#3667) * Removing `get_credentials()` from `core`. In the process also: - Slight re-org on `nox.py` config (to pass posargs) for `core` and `datastore` - Getting rid of last usage of `_Monkey` in datastore This is part of `@jonparrott`'s effort to slim down / stabilize `core`. * Removing `google.cloud.credentials` module from docs. --- .../google-cloud-core/google/cloud/client.py | 4 +- .../google/cloud/credentials.py | 30 ------- packages/google-cloud-core/nox.py | 23 +++-- .../tests/unit/test_client.py | 83 ++++++++----------- .../tests/unit/test_credentials.py | 34 -------- 5 files changed, 54 insertions(+), 120 deletions(-) delete mode 100644 packages/google-cloud-core/google/cloud/credentials.py delete mode 100644 packages/google-cloud-core/tests/unit/test_credentials.py diff --git a/packages/google-cloud-core/google/cloud/client.py b/packages/google-cloud-core/google/cloud/client.py index 5906ab5ed108..468cf9e40a52 100644 --- a/packages/google-cloud-core/google/cloud/client.py +++ b/packages/google-cloud-core/google/cloud/client.py @@ -21,9 +21,9 @@ import google_auth_httplib2 import six +import google.auth import google.auth.credentials from google.cloud._helpers import _determine_default_project -from google.cloud.credentials import get_credentials from google.oauth2 import service_account @@ -135,7 +135,7 @@ def __init__(self, credentials=None, _http=None): credentials, google.auth.credentials.Credentials)): raise ValueError(_GOOGLE_AUTH_CREDENTIALS_HELP) if credentials is None and _http is None: - credentials = get_credentials() + credentials, _ = google.auth.default() self._credentials = google.auth.credentials.with_scopes_if_required( credentials, self.SCOPE) self._http_internal = _http diff --git a/packages/google-cloud-core/google/cloud/credentials.py b/packages/google-cloud-core/google/cloud/credentials.py deleted file mode 100644 index b434cac2f1e7..000000000000 --- a/packages/google-cloud-core/google/cloud/credentials.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright 2014 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""A simple wrapper around the OAuth2 credentials library.""" - -import google.auth - - -def get_credentials(): - """Gets credentials implicitly from the current environment. - - Uses :func:`google.auth.default()`. - - :rtype: :class:`google.auth.credentials.Credentials`, - :returns: A new credentials instance corresponding to the implicit - environment. - """ - credentials, _ = google.auth.default() - return credentials diff --git a/packages/google-cloud-core/nox.py b/packages/google-cloud-core/nox.py index 48b55332283e..1dca10eb9b69 100644 --- a/packages/google-cloud-core/nox.py +++ b/packages/google-cloud-core/nox.py @@ -13,6 +13,7 @@ # limitations under the License. from __future__ import absolute_import +import os import nox @@ -29,16 +30,26 @@ def unit_tests(session, python_version): session.virtualenv_dirname = 'unit-' + python_version # Install all test dependencies, then install this package in-place. - session.install('mock', 'pytest', 'pytest-cov', - 'grpcio >= 1.0.2') + session.install( + 'mock', + 'pytest', + 'pytest-cov', + 'grpcio >= 1.0.2', + ) session.install('-e', '.') # Run py.test against the unit tests. session.run( - 'py.test', '--quiet', - '--cov=google.cloud', '--cov=tests.unit', '--cov-append', - '--cov-config=.coveragerc', '--cov-report=', '--cov-fail-under=97', - 'tests/unit', + 'py.test', + '--quiet', + '--cov=google.cloud', + '--cov=tests.unit', + '--cov-append', + '--cov-config=.coveragerc', + '--cov-report=', + '--cov-fail-under=97', + os.path.join('tests', 'unit'), + *session.posargs ) diff --git a/packages/google-cloud-core/tests/unit/test_client.py b/packages/google-cloud-core/tests/unit/test_client.py index 14eac68abee3..25667712c69a 100644 --- a/packages/google-cloud-core/tests/unit/test_client.py +++ b/packages/google-cloud-core/tests/unit/test_client.py @@ -59,37 +59,31 @@ def test_unpickleable(self): with self.assertRaises(pickle.PicklingError): pickle.dumps(client_obj) - def test_ctor_defaults(self): - from google.cloud._testing import _Monkey - from google.cloud import client - - CREDENTIALS = _make_credentials() - FUNC_CALLS = [] - - def mock_get_credentials(): - FUNC_CALLS.append('get_credentials') - return CREDENTIALS + def test_constructor_defaults(self): + credentials = _make_credentials() - with _Monkey(client, get_credentials=mock_get_credentials): + patch = mock.patch( + 'google.auth.default', return_value=(credentials, None)) + with patch as default: client_obj = self._make_one() - self.assertIs(client_obj._credentials, CREDENTIALS) + self.assertIs(client_obj._credentials, credentials) self.assertIsNone(client_obj._http_internal) - self.assertEqual(FUNC_CALLS, ['get_credentials']) + default.assert_called_once_with() - def test_ctor_explicit(self): - CREDENTIALS = _make_credentials() - HTTP = object() - client_obj = self._make_one(credentials=CREDENTIALS, _http=HTTP) + def test_constructor_explicit(self): + credentials = _make_credentials() + http = mock.sentinel.http + client_obj = self._make_one(credentials=credentials, _http=http) - self.assertIs(client_obj._credentials, CREDENTIALS) - self.assertIs(client_obj._http_internal, HTTP) + self.assertIs(client_obj._credentials, credentials) + self.assertIs(client_obj._http_internal, http) - def test_ctor_bad_credentials(self): - CREDENTIALS = object() + def test_constructor_bad_credentials(self): + credentials = mock.sentinel.credentials with self.assertRaises(ValueError): - self._make_one(credentials=CREDENTIALS) + self._make_one(credentials=credentials) def test_from_service_account_json(self): from google.cloud import _helpers @@ -162,34 +156,27 @@ def _get_target_class(): def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) - def test_ctor_defaults(self): - from google.cloud._testing import _Monkey - from google.cloud import client - - PROJECT = 'PROJECT' - CREDENTIALS = _make_credentials() - FUNC_CALLS = [] - - def mock_determine_proj(project): - FUNC_CALLS.append((project, '_determine_default_project')) - return PROJECT + def test_constructor_defaults(self): + credentials = _make_credentials() + patch1 = mock.patch( + 'google.auth.default', return_value=(credentials, None)) - def mock_get_credentials(): - FUNC_CALLS.append('get_credentials') - return CREDENTIALS + project = 'prahj-ekt' + patch2 = mock.patch( + 'google.cloud.client._determine_default_project', + return_value=project) - with _Monkey(client, get_credentials=mock_get_credentials, - _determine_default_project=mock_determine_proj): - client_obj = self._make_one() + with patch1 as default: + with patch2 as _determine_default_project: + client_obj = self._make_one() - self.assertEqual(client_obj.project, PROJECT) - self.assertIs(client_obj._credentials, CREDENTIALS) + self.assertEqual(client_obj.project, project) + self.assertIs(client_obj._credentials, credentials) self.assertIsNone(client_obj._http_internal) - self.assertEqual( - FUNC_CALLS, - [(None, '_determine_default_project'), 'get_credentials']) + default.assert_called_once_with() + _determine_default_project.assert_called_once_with(None) - def test_ctor_missing_project(self): + def test_constructor_missing_project(self): from google.cloud._testing import _Monkey from google.cloud import client @@ -204,7 +191,7 @@ def mock_determine_proj(project): self.assertEqual(FUNC_CALLS, [(None, '_determine_default_project')]) - def test_ctor_w_invalid_project(self): + def test_constructor_w_invalid_project(self): CREDENTIALS = _make_credentials() HTTP = object() with self.assertRaises(ValueError): @@ -227,11 +214,11 @@ def _explicit_ctor_helper(self, project): self.assertIs(client_obj._credentials, CREDENTIALS) self.assertIs(client_obj._http_internal, HTTP) - def test_ctor_explicit_bytes(self): + def test_constructor_explicit_bytes(self): PROJECT = b'PROJECT' self._explicit_ctor_helper(PROJECT) - def test_ctor_explicit_unicode(self): + def test_constructor_explicit_unicode(self): PROJECT = u'PROJECT' self._explicit_ctor_helper(PROJECT) diff --git a/packages/google-cloud-core/tests/unit/test_credentials.py b/packages/google-cloud-core/tests/unit/test_credentials.py deleted file mode 100644 index 3b313c1dc1d6..000000000000 --- a/packages/google-cloud-core/tests/unit/test_credentials.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2014 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - - -class Test_get_credentials(unittest.TestCase): - - def _call_fut(self): - from google.cloud import credentials - - return credentials.get_credentials() - - def test_it(self): - with mock.patch('google.auth.default', autospec=True) as default: - default.return_value = ( - mock.sentinel.credentials, mock.sentinel.project) - found = self._call_fut() - - self.assertIs(found, mock.sentinel.credentials) - default.assert_called_once_with() From 4dd72b6840726cbb81cd220caa09e34043095e54 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 27 Jul 2017 11:21:30 -0700 Subject: [PATCH 167/468] Remove httplib2, replace with Requests (#3674) * Core: remove httplib2, replace with Requests Additionally remove make_exception in favor of from_http_status and from_http_response. * Datastore: replace httplib2 with Requests * DNS: replace httplib2 with Requests * Error Reporting: replace httplib2 with requests * Language: replace httplib2 with Requests * Logging: replace httplib2 with requests * Monitoring: replace httplib2 with Requests * Pubsub: replace httplib2 with Requests * Resource Manager: replace httplib2 with Requests * Runtimeconfig: replace httplib2 with Requests * Speech: replace httplib2 with Requests * Storage: replace httplib2 with Requests * BigQuery: replace httplib2 with Requests * Translate: replace httplib2 with Requests * Vision: replace httplib2 with Requests --- .../google/cloud/_helpers.py | 5 +- .../google-cloud-core/google/cloud/_http.py | 54 +-- .../google-cloud-core/google/cloud/client.py | 32 +- .../google/cloud/exceptions.py | 82 +++-- packages/google-cloud-core/setup.py | 3 +- .../tests/unit/test__http.py | 327 +++++++----------- .../tests/unit/test_client.py | 12 +- .../tests/unit/test_exceptions.py | 233 ++++++------- 8 files changed, 307 insertions(+), 441 deletions(-) diff --git a/packages/google-cloud-core/google/cloud/_helpers.py b/packages/google-cloud-core/google/cloud/_helpers.py index fdb22ecdf09c..83f6db3a20fc 100644 --- a/packages/google-cloud-core/google/cloud/_helpers.py +++ b/packages/google-cloud-core/google/cloud/_helpers.py @@ -25,12 +25,11 @@ import re from threading import local as Local -import google_auth_httplib2 -import httplib2 import six from six.moves import http_client import google.auth +import google.auth.transport.requests from google.protobuf import duration_pb2 from google.protobuf import timestamp_pb2 @@ -550,7 +549,7 @@ def make_secure_channel(credentials, user_agent, host, extra_options=()): :returns: gRPC secure channel with credentials attached. """ target = '%s:%d' % (host, http_client.HTTPS_PORT) - http_request = google_auth_httplib2.Request(http=httplib2.Http()) + http_request = google.auth.transport.requests.Request() user_agent_option = ('grpc.primary_user_agent', user_agent) options = (user_agent_option,) + extra_options diff --git a/packages/google-cloud-core/google/cloud/_http.py b/packages/google-cloud-core/google/cloud/_http.py index b7c17ca91d6d..2a0a24e38006 100644 --- a/packages/google-cloud-core/google/cloud/_http.py +++ b/packages/google-cloud-core/google/cloud/_http.py @@ -18,10 +18,9 @@ import platform from pkg_resources import get_distribution -import six from six.moves.urllib.parse import urlencode -from google.cloud.exceptions import make_exception +from google.cloud import exceptions API_BASE_URL = 'https://www.googleapis.com' @@ -67,8 +66,9 @@ def credentials(self): def http(self): """A getter for the HTTP transport used in talking to the API. - :rtype: :class:`httplib2.Http` - :returns: A Http object used to transport data. + Returns: + google.auth.transport.requests.AuthorizedSession: + A :class:`requests.Session` instance. """ return self._client._http @@ -168,23 +168,13 @@ def _make_request(self, method, url, data=None, content_type=None, custom behavior, for example, to defer an HTTP request and complete initialization of the object at a later time. - :rtype: tuple of ``response`` (a dictionary of sorts) - and ``content`` (a string). - :returns: The HTTP response object and the content of the response, - returned by :meth:`_do_request`. + :rtype: :class:`requests.Response` + :returns: The HTTP response. """ headers = headers or {} headers.update(self._EXTRA_HEADERS) headers['Accept-Encoding'] = 'gzip' - if data: - content_length = len(str(data)) - else: - content_length = 0 - - # NOTE: str is intended, bytes are sufficient for headers. - headers['Content-Length'] = str(content_length) - if content_type: headers['Content-Type'] = content_type @@ -215,12 +205,11 @@ def _do_request(self, method, url, headers, data, (Optional) Unused ``target_object`` here but may be used by a superclass. - :rtype: tuple of ``response`` (a dictionary of sorts) - and ``content`` (a string). - :returns: The HTTP response object and the content of the response. + :rtype: :class:`requests.Response` + :returns: The HTTP response. """ - return self.http.request(uri=url, method=method, headers=headers, - body=data) + return self.http.request( + url=url, method=method, headers=headers, data=data) def api_request(self, method, path, query_params=None, data=None, content_type=None, headers=None, @@ -281,7 +270,7 @@ def api_request(self, method, path, query_params=None, :raises ~google.cloud.exceptions.GoogleCloudError: if the response code is not 200 OK. - :raises TypeError: if the response content type is not JSON. + :raises ValueError: if the response content type is not JSON. :rtype: dict or str :returns: The API response payload, either as a raw string or a dictionary if the response is valid JSON. @@ -296,21 +285,14 @@ def api_request(self, method, path, query_params=None, data = json.dumps(data) content_type = 'application/json' - response, content = self._make_request( + response = self._make_request( method=method, url=url, data=data, content_type=content_type, headers=headers, target_object=_target_object) - if not 200 <= response.status < 300: - raise make_exception(response, content, - error_info=method + ' ' + url) + if not 200 <= response.status_code < 300: + raise exceptions.from_http_response(response) - string_or_bytes = (six.binary_type, six.text_type) - if content and expect_json and isinstance(content, string_or_bytes): - content_type = response.get('content-type', '') - if not content_type.startswith('application/json'): - raise TypeError('Expected JSON, got %s' % content_type) - if isinstance(content, six.binary_type): - content = content.decode('utf-8') - return json.loads(content) - - return content + if expect_json and response.content: + return response.json() + else: + return response.content diff --git a/packages/google-cloud-core/google/cloud/client.py b/packages/google-cloud-core/google/cloud/client.py index 468cf9e40a52..7403be71f521 100644 --- a/packages/google-cloud-core/google/cloud/client.py +++ b/packages/google-cloud-core/google/cloud/client.py @@ -18,11 +18,11 @@ import json from pickle import PicklingError -import google_auth_httplib2 import six import google.auth import google.auth.credentials +import google.auth.transport.requests from google.cloud._helpers import _determine_default_project from google.oauth2 import service_account @@ -87,36 +87,23 @@ class Client(_ClientFactoryMixin): Stores ``credentials`` and an HTTP object so that subclasses can pass them along to a connection class. - If no value is passed in for ``_http``, a :class:`httplib2.Http` object + If no value is passed in for ``_http``, a :class:`requests.Session` object will be created and authorized with the ``credentials``. If not, the ``credentials`` and ``_http`` need not be related. Callers and subclasses may seek to use the private key from ``credentials`` to sign data. - A custom (non-``httplib2``) HTTP object must have a ``request`` method - which accepts the following arguments: - - * ``uri`` - * ``method`` - * ``body`` - * ``headers`` - - In addition, ``redirections`` and ``connection_type`` may be used. - - A custom ``_http`` object will also need to be able to add a bearer token - to API requests and handle token refresh on 401 errors. - :type credentials: :class:`~google.auth.credentials.Credentials` :param credentials: (Optional) The OAuth2 Credentials to use for this client. If not passed (and if no ``_http`` object is passed), falls back to the default inferred from the environment. - :type _http: :class:`~httplib2.Http` + :type _http: :class:`~requests.Session` :param _http: (Optional) HTTP object to make requests. Can be any object that defines ``request()`` with the same interface as - :meth:`~httplib2.Http.request`. If not passed, an + :meth:`requests.Session.request`. If not passed, an ``_http`` object is created that is bound to the ``credentials`` for the current object. This parameter should be considered private, and could @@ -151,12 +138,13 @@ def __getstate__(self): def _http(self): """Getter for object used for HTTP transport. - :rtype: :class:`~httplib2.Http` + :rtype: :class:`~requests.Session` :returns: An HTTP object. """ if self._http_internal is None: - self._http_internal = google_auth_httplib2.AuthorizedHttp( - self._credentials) + self._http_internal = ( + google.auth.transport.requests.AuthorizedSession( + self._credentials)) return self._http_internal @@ -204,10 +192,10 @@ class ClientWithProject(Client, _ClientProjectMixin): passed), falls back to the default inferred from the environment. - :type _http: :class:`~httplib2.Http` + :type _http: :class:`~requests.Session` :param _http: (Optional) HTTP object to make requests. Can be any object that defines ``request()`` with the same interface as - :meth:`~httplib2.Http.request`. If not passed, an + :meth:`~requests.Session.request`. If not passed, an ``_http`` object is created that is bound to the ``credentials`` for the current object. This parameter should be considered private, and could diff --git a/packages/google-cloud-core/google/cloud/exceptions.py b/packages/google-cloud-core/google/cloud/exceptions.py index e911980c6328..2e7eca3be98d 100644 --- a/packages/google-cloud-core/google/cloud/exceptions.py +++ b/packages/google-cloud-core/google/cloud/exceptions.py @@ -21,7 +21,6 @@ from __future__ import absolute_import import copy -import json import six @@ -186,56 +185,55 @@ class GatewayTimeout(ServerError): code = 504 -def make_exception(response, content, error_info=None, use_json=True): - """Factory: create exception based on HTTP response code. +def from_http_status(status_code, message, errors=()): + """Create a :class:`GoogleCloudError` from an HTTP status code. - :type response: :class:`httplib2.Response` or other HTTP response object - :param response: A response object that defines a status code as the - status attribute. + Args: + status_code (int): The HTTP status code. + message (str): The exception message. + errors (Sequence[Any]): A list of additional error information. + + Returns: + GoogleCloudError: An instance of the appropriate subclass of + :class:`GoogleCloudError`. + """ + error_class = _HTTP_CODE_TO_EXCEPTION.get(status_code, GoogleCloudError) + error = error_class(message, errors) + + if error.code is None: + error.code = status_code + + return error - :type content: str or dictionary - :param content: The body of the HTTP error response. - :type error_info: str - :param error_info: Optional string giving extra information about the - failed request. +def from_http_response(response): + """Create a :class:`GoogleCloudError` from a :class:`requests.Response`. - :type use_json: bool - :param use_json: Flag indicating if ``content`` is expected to be JSON. + Args: + response (requests.Response): The HTTP response. - :rtype: instance of :class:`GoogleCloudError`, or a concrete subclass. - :returns: Exception specific to the error response. + Returns: + GoogleCloudError: An instance of the appropriate subclass of + :class:`GoogleCloudError`, with the message and errors populated + from the response. """ - if isinstance(content, six.binary_type): - content = content.decode('utf-8') - - if isinstance(content, six.string_types): - payload = None - if use_json: - try: - payload = json.loads(content) - except ValueError: - # Expected JSON but received something else. - pass - if payload is None: - payload = {'error': {'message': content}} - else: - payload = content - - message = payload.get('error', {}).get('message', '') + try: + payload = response.json() + except ValueError: + payload = {'error': {'message': response.text or 'unknown error'}} + + error_message = payload.get('error', {}).get('message', 'unknown error') errors = payload.get('error', {}).get('errors', ()) - if error_info is not None: - message += ' (%s)' % (error_info,) + message = '{method} {url}: {error}'.format( + method=response.request.method, + url=response.request.url, + error=error_message) - try: - klass = _HTTP_CODE_TO_EXCEPTION[response.status] - except KeyError: - error = GoogleCloudError(message, errors) - error.code = response.status - else: - error = klass(message, errors) - return error + exception = from_http_status( + response.status_code, message, errors=errors) + exception.response = response + return exception def _walk_subclasses(klass): diff --git a/packages/google-cloud-core/setup.py b/packages/google-cloud-core/setup.py index ba84f2347d18..2a221ffe04b9 100644 --- a/packages/google-cloud-core/setup.py +++ b/packages/google-cloud-core/setup.py @@ -51,11 +51,10 @@ REQUIREMENTS = [ - 'httplib2 >= 0.9.1', 'googleapis-common-protos >= 1.3.4', 'protobuf >= 3.0.0', 'google-auth >= 0.4.0, < 2.0.0dev', - 'google-auth-httplib2', + 'requests >= 2.4.0, < 3.0.0dev', 'six', 'tenacity >= 4.0.0, <5.0.0dev' ] diff --git a/packages/google-cloud-core/tests/unit/test__http.py b/packages/google-cloud-core/tests/unit/test__http.py index 22df11566811..abf630b9a41f 100644 --- a/packages/google-cloud-core/tests/unit/test__http.py +++ b/packages/google-cloud-core/tests/unit/test__http.py @@ -12,9 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +import json import unittest import mock +import requests +from six.moves import http_client class TestConnection(unittest.TestCase): @@ -52,7 +55,24 @@ def test_user_agent_format(self): self.assertEqual(conn.USER_AGENT, expected_ua) +def make_response(status=http_client.OK, content=b'', headers={}): + response = requests.Response() + response.status_code = status + response._content = content + response.headers = headers + response.request = requests.Request() + return response + + +def make_requests_session(responses): + session = mock.create_autospec(requests.Session, instance=True) + session.request.side_effect = responses + return session + + class TestJSONConnection(unittest.TestCase): + JSON_HEADERS = {'content-type': 'application/json'} + EMPTY_JSON_RESPONSE = make_response(content=b'{}', headers=JSON_HEADERS) @staticmethod def _get_target_class(): @@ -119,129 +139,123 @@ def test_build_api_url_w_extra_query_params(self): self.assertEqual(parms['qux'], ['quux', 'corge']) def test__make_request_no_data_no_content_type_no_headers(self): - http = _Http( - {'status': '200', 'content-type': 'text/plain'}, - b'', - ) + http = make_requests_session([make_response()]) client = mock.Mock(_http=http, spec=['_http']) conn = self._make_one(client) - URI = 'http://example.com/test' - headers, content = conn._make_request('GET', URI) - self.assertEqual(headers['status'], '200') - self.assertEqual(headers['content-type'], 'text/plain') - self.assertEqual(content, b'') - self.assertEqual(http._called_with['method'], 'GET') - self.assertEqual(http._called_with['uri'], URI) - self.assertIsNone(http._called_with['body']) + url = 'http://example.com/test' + + response = conn._make_request('GET', url) + + self.assertEqual(response.status_code, http_client.OK) + self.assertEqual(response.content, b'') + expected_headers = { 'Accept-Encoding': 'gzip', - 'Content-Length': '0', 'User-Agent': conn.USER_AGENT, } - self.assertEqual(http._called_with['headers'], expected_headers) + http.request.assert_called_once_with( + method='GET', url=url, headers=expected_headers, data=None) def test__make_request_w_data_no_extra_headers(self): - http = _Http( - {'status': '200', 'content-type': 'text/plain'}, - b'', - ) + http = make_requests_session([make_response()]) client = mock.Mock(_http=http, spec=['_http']) conn = self._make_one(client) - URI = 'http://example.com/test' - conn._make_request('GET', URI, {}, 'application/json') - self.assertEqual(http._called_with['method'], 'GET') - self.assertEqual(http._called_with['uri'], URI) - self.assertEqual(http._called_with['body'], {}) + url = 'http://example.com/test' + data = b'data' + + conn._make_request('GET', url, data, 'application/json') + expected_headers = { 'Accept-Encoding': 'gzip', - 'Content-Length': '0', 'Content-Type': 'application/json', 'User-Agent': conn.USER_AGENT, } - self.assertEqual(http._called_with['headers'], expected_headers) + http.request.assert_called_once_with( + method='GET', url=url, headers=expected_headers, data=data) def test__make_request_w_extra_headers(self): - http = _Http( - {'status': '200', 'content-type': 'text/plain'}, - b'', - ) + http = make_requests_session([make_response()]) client = mock.Mock(_http=http, spec=['_http']) conn = self._make_one(client) - URI = 'http://example.com/test' - conn._make_request('GET', URI, headers={'X-Foo': 'foo'}) - self.assertEqual(http._called_with['method'], 'GET') - self.assertEqual(http._called_with['uri'], URI) - self.assertIsNone(http._called_with['body']) + + url = 'http://example.com/test' + conn._make_request('GET', url, headers={'X-Foo': 'foo'}) + expected_headers = { 'Accept-Encoding': 'gzip', - 'Content-Length': '0', 'X-Foo': 'foo', 'User-Agent': conn.USER_AGENT, } - self.assertEqual(http._called_with['headers'], expected_headers) + http.request.assert_called_once_with( + method='GET', url=url, headers=expected_headers, data=None) def test_api_request_defaults(self): - http = _Http( - {'status': '200', 'content-type': 'application/json'}, - b'{}', - ) + http = make_requests_session([ + make_response(content=b'{}', headers=self.JSON_HEADERS)]) client = mock.Mock(_http=http, spec=['_http']) conn = self._make_mock_one(client) - PATH = '/path/required' - # Intended to emulate self.mock_template - URI = '/'.join([ - conn.API_BASE_URL, - 'mock', - '%s%s' % (conn.API_VERSION, PATH), - ]) - self.assertEqual(conn.api_request('GET', PATH), {}) - self.assertEqual(http._called_with['method'], 'GET') - self.assertEqual(http._called_with['uri'], URI) - self.assertIsNone(http._called_with['body']) + path = '/path/required' + + self.assertEqual(conn.api_request('GET', path), {}) + expected_headers = { 'Accept-Encoding': 'gzip', - 'Content-Length': '0', 'User-Agent': conn.USER_AGENT, } - self.assertEqual(http._called_with['headers'], expected_headers) + expected_url = '{base}/mock/{version}{path}'.format( + base=conn.API_BASE_URL, + version=conn.API_VERSION, + path=path) + http.request.assert_called_once_with( + method='GET', + url=expected_url, + headers=expected_headers, + data=None) def test_api_request_w_non_json_response(self): - http = _Http( - {'status': '200', 'content-type': 'text/plain'}, - b'CONTENT', - ) + http = make_requests_session([ + make_response(content=b'content')]) client = mock.Mock(_http=http, spec=['_http']) conn = self._make_mock_one(client) - self.assertRaises(TypeError, conn.api_request, 'GET', '/') + with self.assertRaises(ValueError): + conn.api_request('GET', '/') def test_api_request_wo_json_expected(self): - http = _Http( - {'status': '200', 'content-type': 'text/plain'}, - b'CONTENT', - ) + http = make_requests_session([ + make_response(content=b'content')]) client = mock.Mock(_http=http, spec=['_http']) conn = self._make_mock_one(client) - self.assertEqual(conn.api_request('GET', '/', expect_json=False), - b'CONTENT') + + result = conn.api_request('GET', '/', expect_json=False) + + self.assertEqual(result, b'content') def test_api_request_w_query_params(self): from six.moves.urllib.parse import parse_qs from six.moves.urllib.parse import urlsplit - http = _Http( - {'status': '200', 'content-type': 'application/json'}, - b'{}', - ) + http = make_requests_session([self.EMPTY_JSON_RESPONSE]) client = mock.Mock(_http=http, spec=['_http']) conn = self._make_mock_one(client) - self.assertEqual(conn.api_request('GET', '/', { + + result = conn.api_request('GET', '/', { 'foo': 'bar', 'baz': ['qux', 'quux'] - }), {}) - self.assertEqual(http._called_with['method'], 'GET') - uri = http._called_with['uri'] - scheme, netloc, path, qs, _ = urlsplit(uri) + }) + + self.assertEqual(result, {}) + + expected_headers = { + 'Accept-Encoding': 'gzip', + 'User-Agent': conn.USER_AGENT, + } + http.request.assert_called_once_with( + method='GET', url=mock.ANY, headers=expected_headers, + data=None) + + url = http.request.call_args[1]['url'] + scheme, netloc, path, qs, _ = urlsplit(url) self.assertEqual('%s://%s' % (scheme, netloc), conn.API_BASE_URL) # Intended to emulate self.mock_template PATH = '/'.join([ @@ -254,175 +268,84 @@ def test_api_request_w_query_params(self): parms = dict(parse_qs(qs)) self.assertEqual(parms['foo'], ['bar']) self.assertEqual(parms['baz'], ['qux', 'quux']) - self.assertIsNone(http._called_with['body']) - expected_headers = { - 'Accept-Encoding': 'gzip', - 'Content-Length': '0', - 'User-Agent': conn.USER_AGENT, - } - self.assertEqual(http._called_with['headers'], expected_headers) def test_api_request_w_headers(self): - from six.moves.urllib.parse import urlsplit - - http = _Http( - {'status': '200', 'content-type': 'application/json'}, - b'{}', - ) + http = make_requests_session([self.EMPTY_JSON_RESPONSE]) client = mock.Mock(_http=http, spec=['_http']) conn = self._make_mock_one(client) - self.assertEqual( - conn.api_request('GET', '/', headers={'X-Foo': 'bar'}), {}) - self.assertEqual(http._called_with['method'], 'GET') - uri = http._called_with['uri'] - scheme, netloc, path, qs, _ = urlsplit(uri) - self.assertEqual('%s://%s' % (scheme, netloc), conn.API_BASE_URL) - # Intended to emulate self.mock_template - PATH = '/'.join([ - '', - 'mock', - conn.API_VERSION, - '', - ]) - self.assertEqual(path, PATH) - self.assertEqual(qs, '') - self.assertIsNone(http._called_with['body']) + + result = conn.api_request('GET', '/', headers={'X-Foo': 'bar'}) + self.assertEqual(result, {}) + expected_headers = { 'Accept-Encoding': 'gzip', - 'Content-Length': '0', 'User-Agent': conn.USER_AGENT, 'X-Foo': 'bar', } - self.assertEqual(http._called_with['headers'], expected_headers) + http.request.assert_called_once_with( + method='GET', url=mock.ANY, headers=expected_headers, + data=None) def test_api_request_w_extra_headers(self): - from six.moves.urllib.parse import urlsplit - - http = _Http( - {'status': '200', 'content-type': 'application/json'}, - b'{}', - ) + http = make_requests_session([self.EMPTY_JSON_RESPONSE]) client = mock.Mock(_http=http, spec=['_http']) conn = self._make_mock_one(client) conn._EXTRA_HEADERS = { 'X-Baz': 'dax-quux', 'X-Foo': 'not-bar', # Collision with ``headers``. } - self.assertEqual( - conn.api_request('GET', '/', headers={'X-Foo': 'bar'}), {}) - self.assertEqual(http._called_with['method'], 'GET') - uri = http._called_with['uri'] - scheme, netloc, path, qs, _ = urlsplit(uri) - self.assertEqual('%s://%s' % (scheme, netloc), conn.API_BASE_URL) - # Intended to emulate self.mock_template - PATH = '/'.join([ - '', - 'mock', - conn.API_VERSION, - '', - ]) - self.assertEqual(path, PATH) - self.assertEqual(qs, '') - self.assertIsNone(http._called_with['body']) + + result = conn.api_request('GET', '/', headers={'X-Foo': 'bar'}) + + self.assertEqual(result, {}) + expected_headers = { 'Accept-Encoding': 'gzip', - 'Content-Length': '0', 'User-Agent': conn.USER_AGENT, 'X-Foo': 'not-bar', # The one passed-in is overridden. 'X-Baz': 'dax-quux', } - self.assertEqual(http._called_with['headers'], expected_headers) + http.request.assert_called_once_with( + method='GET', url=mock.ANY, headers=expected_headers, + data=None) def test_api_request_w_data(self): - import json - - DATA = {'foo': 'bar'} - DATAJ = json.dumps(DATA) - http = _Http( - {'status': '200', 'content-type': 'application/json'}, - b'{}', - ) + http = make_requests_session([self.EMPTY_JSON_RESPONSE]) client = mock.Mock(_http=http, spec=['_http']) conn = self._make_mock_one(client) - # Intended to emulate self.mock_template - URI = '/'.join([ - conn.API_BASE_URL, - 'mock', - conn.API_VERSION, - '', - ]) - self.assertEqual(conn.api_request('POST', '/', data=DATA), {}) - self.assertEqual(http._called_with['method'], 'POST') - self.assertEqual(http._called_with['uri'], URI) - self.assertEqual(http._called_with['body'], DATAJ) + + data = {'foo': 'bar'} + self.assertEqual(conn.api_request('POST', '/', data=data), {}) + + expected_data = json.dumps(data) + expected_headers = { 'Accept-Encoding': 'gzip', - 'Content-Length': str(len(DATAJ)), 'Content-Type': 'application/json', 'User-Agent': conn.USER_AGENT, } - self.assertEqual(http._called_with['headers'], expected_headers) + + http.request.assert_called_once_with( + method='POST', url=mock.ANY, headers=expected_headers, + data=expected_data) def test_api_request_w_404(self): - from google.cloud.exceptions import NotFound + from google.cloud import exceptions - http = _Http( - {'status': '404', 'content-type': 'text/plain'}, - b'{}' - ) + http = make_requests_session([make_response(http_client.NOT_FOUND)]) client = mock.Mock(_http=http, spec=['_http']) conn = self._make_mock_one(client) - self.assertRaises(NotFound, conn.api_request, 'GET', '/') - def test_api_request_w_500(self): - from google.cloud.exceptions import InternalServerError + with self.assertRaises(exceptions.NotFound): + conn.api_request('GET', '/') - http = _Http( - {'status': '500', 'content-type': 'text/plain'}, - b'{}', - ) - client = mock.Mock(_http=http, spec=['_http']) - conn = self._make_mock_one(client) - self.assertRaises(InternalServerError, conn.api_request, 'GET', '/') + def test_api_request_w_500(self): + from google.cloud import exceptions - def test_api_request_non_binary_response(self): - http = _Http( - {'status': '200', 'content-type': 'application/json'}, - u'{}', - ) + http = make_requests_session([ + make_response(http_client.INTERNAL_SERVER_ERROR)]) client = mock.Mock(_http=http, spec=['_http']) conn = self._make_mock_one(client) - result = conn.api_request('GET', '/') - # Intended to emulate self.mock_template - URI = '/'.join([ - conn.API_BASE_URL, - 'mock', - conn.API_VERSION, - '', - ]) - self.assertEqual(result, {}) - self.assertEqual(http._called_with['method'], 'GET') - self.assertEqual(http._called_with['uri'], URI) - self.assertIsNone(http._called_with['body']) - expected_headers = { - 'Accept-Encoding': 'gzip', - 'Content-Length': '0', - 'User-Agent': conn.USER_AGENT, - } - self.assertEqual(http._called_with['headers'], expected_headers) - - -class _Http(object): - - _called_with = None - - def __init__(self, headers, content): - from httplib2 import Response - - self._response = Response(headers) - self._content = content - - def request(self, **kw): - self._called_with = kw - return self._response, self._content + with self.assertRaises(exceptions.InternalServerError): + conn.api_request('GET', '/') diff --git a/packages/google-cloud-core/tests/unit/test_client.py b/packages/google-cloud-core/tests/unit/test_client.py index 25667712c69a..bed3ebe2c036 100644 --- a/packages/google-cloud-core/tests/unit/test_client.py +++ b/packages/google-cloud-core/tests/unit/test_client.py @@ -132,17 +132,17 @@ def test__http_property_new(self): client = self._make_one(credentials=credentials) self.assertIsNone(client._http_internal) - patch = mock.patch('google_auth_httplib2.AuthorizedHttp', - return_value=mock.sentinel.http) - with patch as mocked: + authorized_session_patch = mock.patch( + 'google.auth.transport.requests.AuthorizedSession', + return_value=mock.sentinel.http) + with authorized_session_patch as AuthorizedSession: self.assertIs(client._http, mock.sentinel.http) # Check the mock. - mocked.assert_called_once_with(credentials) - self.assertEqual(mocked.call_count, 1) + AuthorizedSession.assert_called_once_with(credentials) # Make sure the cached value is used on subsequent access. self.assertIs(client._http_internal, mock.sentinel.http) self.assertIs(client._http, mock.sentinel.http) - self.assertEqual(mocked.call_count, 1) + self.assertEqual(AuthorizedSession.call_count, 1) class TestClientWithProject(unittest.TestCase): diff --git a/packages/google-cloud-core/tests/unit/test_exceptions.py b/packages/google-cloud-core/tests/unit/test_exceptions.py index b3488296eff4..4be260831825 100644 --- a/packages/google-cloud-core/tests/unit/test_exceptions.py +++ b/packages/google-cloud-core/tests/unit/test_exceptions.py @@ -12,139 +12,116 @@ # See the License for the specific language governing permissions and # limitations under the License. -import unittest +import json +import requests +from six.moves import http_client -class Test_GoogleCloudError(unittest.TestCase): +from google.cloud import exceptions - @staticmethod - def _get_target_class(): - from google.cloud.exceptions import GoogleCloudError - return GoogleCloudError +def test_create_google_cloud_error(): + exception = exceptions.GoogleCloudError('Testing') + exception.code = 600 + assert str(exception) == '600 Testing' + assert exception.message == 'Testing' + assert exception.errors == [] - def _make_one(self, message, errors=()): - return self._get_target_class()(message, errors=errors) - def test_ctor_defaults(self): - e = self._make_one('Testing') - e.code = 600 - self.assertEqual(str(e), '600 Testing') - self.assertEqual(e.message, 'Testing') - self.assertEqual(list(e.errors), []) +def test_create_google_cloud_error_with_args(): + error = { + 'domain': 'global', + 'location': 'test', + 'locationType': 'testing', + 'message': 'Testing', + 'reason': 'test', + } + exception = exceptions.GoogleCloudError('Testing', [error]) + exception.code = 600 + assert str(exception) == '600 Testing' + assert exception.message == 'Testing' + assert exception.errors == [error] - def test_ctor_explicit(self): - ERROR = { - 'domain': 'global', - 'location': 'test', - 'locationType': 'testing', - 'message': 'Testing', - 'reason': 'test', - } - e = self._make_one('Testing', [ERROR]) - e.code = 600 - self.assertEqual(str(e), '600 Testing') - self.assertEqual(e.message, 'Testing') - self.assertEqual(list(e.errors), [ERROR]) - - -class Test_make_exception(unittest.TestCase): - - def _call_fut(self, response, content, error_info=None, use_json=True): - from google.cloud.exceptions import make_exception - - return make_exception(response, content, error_info=error_info, - use_json=use_json) - - def test_hit_w_content_as_str(self): - from google.cloud.exceptions import NotFound - - response = _Response(404) - content = b'{"error": {"message": "Not Found"}}' - exception = self._call_fut(response, content) - self.assertIsInstance(exception, NotFound) - self.assertEqual(exception.message, 'Not Found') - self.assertEqual(list(exception.errors), []) - - def test_hit_w_content_as_unicode(self): - import six - from google.cloud._helpers import _to_bytes - from google.cloud.exceptions import NotFound - - error_message = u'That\u2019s not found.' - expected = u'404 %s' % (error_message,) - - response = _Response(404) - content = u'{"error": {"message": "%s" }}' % (error_message,) - - exception = self._call_fut(response, content) - if six.PY2: - self.assertEqual(str(exception), - _to_bytes(expected, encoding='utf-8')) - else: # pragma: NO COVER - self.assertEqual(str(exception), expected) - - self.assertIsInstance(exception, NotFound) - self.assertEqual(exception.message, error_message) - self.assertEqual(list(exception.errors), []) - - def test_hit_w_content_as_unicode_as_py3(self): - import six - from google.cloud._testing import _Monkey - from google.cloud.exceptions import NotFound - - error_message = u'That is not found.' - expected = u'404 %s' % (error_message,) - - with _Monkey(six, PY2=False): - response = _Response(404) - content = u'{"error": {"message": "%s" }}' % (error_message,) - exception = self._call_fut(response, content) - - self.assertIsInstance(exception, NotFound) - self.assertEqual(exception.message, error_message) - self.assertEqual(list(exception.errors), []) - self.assertEqual(str(exception), expected) - - def test_miss_w_content_as_dict(self): - from google.cloud.exceptions import GoogleCloudError - - ERROR = { - 'domain': 'global', - 'location': 'test', - 'locationType': 'testing', - 'message': 'Testing', - 'reason': 'test', + +def test_from_http_status(): + message = 'message' + exception = exceptions.from_http_status(http_client.NOT_FOUND, message) + assert exception.code == http_client.NOT_FOUND + assert exception.message == message + assert exception.errors == [] + + +def test_from_http_status_with_errors(): + message = 'message' + errors = ['1', '2'] + exception = exceptions.from_http_status( + http_client.NOT_FOUND, message, errors=errors) + + assert isinstance(exception, exceptions.NotFound) + assert exception.code == http_client.NOT_FOUND + assert exception.message == message + assert exception.errors == errors + + +def test_from_http_status_unknown_code(): + message = 'message' + status_code = 156 + exception = exceptions.from_http_status(status_code, message) + assert exception.code == status_code + assert exception.message == message + + +def make_response(content): + response = requests.Response() + response._content = content + response.status_code = http_client.NOT_FOUND + response.request = requests.Request( + method='POST', url='https://example.com').prepare() + return response + + +def test_from_http_response_no_content(): + response = make_response(None) + + exception = exceptions.from_http_response(response) + + assert isinstance(exception, exceptions.NotFound) + assert exception.code == http_client.NOT_FOUND + assert exception.message == 'POST https://example.com/: unknown error' + assert exception.response == response + + +def test_from_http_response_text_content(): + response = make_response(b'message') + + exception = exceptions.from_http_response(response) + + assert isinstance(exception, exceptions.NotFound) + assert exception.code == http_client.NOT_FOUND + assert exception.message == 'POST https://example.com/: message' + + +def test_from_http_response_json_content(): + response = make_response(json.dumps({ + 'error': { + 'message': 'json message', + 'errors': ['1', '2'] } - response = _Response(600) - content = {"error": {"message": "Unknown Error", "errors": [ERROR]}} - exception = self._call_fut(response, content) - self.assertIsInstance(exception, GoogleCloudError) - self.assertEqual(exception.message, 'Unknown Error') - self.assertEqual(list(exception.errors), [ERROR]) - - def test_html_when_json_expected(self): - from google.cloud.exceptions import NotFound - - response = _Response(NotFound.code) - content = '404 Not Found' - exception = self._call_fut(response, content, use_json=True) - self.assertIsInstance(exception, NotFound) - self.assertEqual(exception.message, content) - self.assertEqual(list(exception.errors), []) - - def test_without_use_json(self): - from google.cloud.exceptions import TooManyRequests - - content = u'error-content' - response = _Response(TooManyRequests.code) - exception = self._call_fut(response, content, use_json=False) - - self.assertIsInstance(exception, TooManyRequests) - self.assertEqual(exception.message, content) - self.assertEqual(list(exception.errors), []) - - -class _Response(object): - def __init__(self, status): - self.status = status + }).encode('utf-8')) + + exception = exceptions.from_http_response(response) + + assert isinstance(exception, exceptions.NotFound) + assert exception.code == http_client.NOT_FOUND + assert exception.message == 'POST https://example.com/: json message' + assert exception.errors == ['1', '2'] + + +def test_from_http_response_bad_json_content(): + response = make_response(json.dumps({'meep': 'moop'}).encode('utf-8')) + + exception = exceptions.from_http_response(response) + + assert isinstance(exception, exceptions.NotFound) + assert exception.code == http_client.NOT_FOUND + assert exception.message == 'POST https://example.com/: unknown error' From 56a15392b875ecd1a3ef16c87de67a2a6d725ff4 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 4 Aug 2017 09:50:46 -0700 Subject: [PATCH 168/468] Add a function to complain about obselete packages. (#3724) --- .../google/cloud/obselete.py | 40 +++++++++++++++++++ .../tests/unit/test_obselete.py | 31 ++++++++++++++ 2 files changed, 71 insertions(+) create mode 100644 packages/google-cloud-core/google/cloud/obselete.py create mode 100644 packages/google-cloud-core/tests/unit/test_obselete.py diff --git a/packages/google-cloud-core/google/cloud/obselete.py b/packages/google-cloud-core/google/cloud/obselete.py new file mode 100644 index 000000000000..9af28cd85d52 --- /dev/null +++ b/packages/google-cloud-core/google/cloud/obselete.py @@ -0,0 +1,40 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import warnings + +import pkg_resources + + +def complain(distribution_name): + """Issue a warning if `distribution_name` is installed. + + In a future release, this method will be updated to raise ImportError + rather than just send a warning. + + Args: + distribution_name (str): The name of the obselete distribution. + """ + try: + pkg_resources.get_distribution(distribution_name) + warnings.warn( + 'The {pkg} distribution is now obselete. ' + 'Please `pip uninstall {pkg}`. ' + 'In the future, this warning will become an ImportError.'.format( + pkg=distribution_name, + ), + DeprecationWarning, + ) + except pkg_resources.DistributionNotFound: + pass diff --git a/packages/google-cloud-core/tests/unit/test_obselete.py b/packages/google-cloud-core/tests/unit/test_obselete.py new file mode 100644 index 000000000000..78764c749490 --- /dev/null +++ b/packages/google-cloud-core/tests/unit/test_obselete.py @@ -0,0 +1,31 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import warnings + +import mock + +from google.cloud import obselete + + +def test_complain_noop(): + with mock.patch.object(warnings, 'warn', autospec=True) as warn: + obselete.complain('bogus_package') + assert warn.call_count == 0 + + +def test_complain(): + with mock.patch.object(warnings, 'warn', autospec=True) as warn: + obselete.complain('google-cloud-core') + warn.assert_called_once_with(mock.ANY, DeprecationWarning) From e472b9af9131c66c8795b29ad29a6dc9d5d0edea Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 4 Aug 2017 14:56:10 -0700 Subject: [PATCH 169/468] Cutting release google-cloud-core==0.26.0. (#3727) --- packages/google-cloud-core/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-core/setup.py b/packages/google-cloud-core/setup.py index 2a221ffe04b9..52ea0c253d8d 100644 --- a/packages/google-cloud-core/setup.py +++ b/packages/google-cloud-core/setup.py @@ -61,7 +61,7 @@ setup( name='google-cloud-core', - version='0.25.0', + version='0.26.0', description='API Client library for Google Cloud: Core Helpers', long_description=README, namespace_packages=[ From 959ce9829da672c41b48c7348ba9abae4a58d709 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 4 Aug 2017 15:26:26 -0700 Subject: [PATCH 170/468] Add missing "packages" to sentence in ``core`` README. (#3728) --- packages/google-cloud-core/README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-core/README.rst b/packages/google-cloud-core/README.rst index 53cbd311a50e..0685a028dbb1 100644 --- a/packages/google-cloud-core/README.rst +++ b/packages/google-cloud-core/README.rst @@ -2,8 +2,8 @@ Core Helpers for Google Cloud Python Client Library =================================================== This library is not meant to stand-alone. Instead it defines -common helpers (e.g. base ``Client`` and ``Connection`` classes) -used by all of the ``google-cloud-*``. +common helpers (e.g. base ``Client`` classes) used by all of the +``google-cloud-*`` packages. |pypi| |versions| From 8995877a2762490a9b8516848e1545af935b7f2c Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Mon, 7 Aug 2017 09:05:32 -0700 Subject: [PATCH 171/468] Add google.api.core package (#3726) --- .../google-cloud-core/google/api/__init__.py | 22 +++++++++++++++++++ .../google/api/core/__init__.py | 18 +++++++++++++++ packages/google-cloud-core/setup.py | 1 + 3 files changed, 41 insertions(+) create mode 100644 packages/google-cloud-core/google/api/__init__.py create mode 100644 packages/google-cloud-core/google/api/core/__init__.py diff --git a/packages/google-cloud-core/google/api/__init__.py b/packages/google-cloud-core/google/api/__init__.py new file mode 100644 index 000000000000..2648890d2599 --- /dev/null +++ b/packages/google-cloud-core/google/api/__init__.py @@ -0,0 +1,22 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google API namespace package.""" + +try: + import pkg_resources + pkg_resources.declare_namespace(__name__) +except ImportError: + import pkgutil + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/packages/google-cloud-core/google/api/core/__init__.py b/packages/google-cloud-core/google/api/core/__init__.py new file mode 100644 index 000000000000..123d3e291c70 --- /dev/null +++ b/packages/google-cloud-core/google/api/core/__init__.py @@ -0,0 +1,18 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google API Core. + +This package contains common code and utilties used by Google client libraries. +""" diff --git a/packages/google-cloud-core/setup.py b/packages/google-cloud-core/setup.py index 52ea0c253d8d..5cc4a9c8141b 100644 --- a/packages/google-cloud-core/setup.py +++ b/packages/google-cloud-core/setup.py @@ -67,6 +67,7 @@ namespace_packages=[ 'google', 'google.cloud', + 'google.api', ], packages=find_packages(exclude=('tests*',)), install_requires=REQUIREMENTS, From 4d6f435d04ad89b5453a3411787c0a81f4e879a7 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 7 Aug 2017 14:41:06 -0700 Subject: [PATCH 172/468] Explicitly depend on setuptools >= 34 (#3745) --- packages/google-cloud-core/setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/google-cloud-core/setup.py b/packages/google-cloud-core/setup.py index 5cc4a9c8141b..f697e385494b 100644 --- a/packages/google-cloud-core/setup.py +++ b/packages/google-cloud-core/setup.py @@ -55,6 +55,7 @@ 'protobuf >= 3.0.0', 'google-auth >= 0.4.0, < 2.0.0dev', 'requests >= 2.4.0, < 3.0.0dev', + 'setuptools >= 34.0.0', 'six', 'tenacity >= 4.0.0, <5.0.0dev' ] From ac3153cbeff431bac83fafc2c02067b2a7e17912 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 7 Aug 2017 14:49:41 -0700 Subject: [PATCH 173/468] Bump requests minimum bound to 2.18.0 (#3748) --- packages/google-cloud-core/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-core/setup.py b/packages/google-cloud-core/setup.py index f697e385494b..96d7567b9de6 100644 --- a/packages/google-cloud-core/setup.py +++ b/packages/google-cloud-core/setup.py @@ -54,7 +54,7 @@ 'googleapis-common-protos >= 1.3.4', 'protobuf >= 3.0.0', 'google-auth >= 0.4.0, < 2.0.0dev', - 'requests >= 2.4.0, < 3.0.0dev', + 'requests >= 2.18.0, < 3.0.0dev', 'setuptools >= 34.0.0', 'six', 'tenacity >= 4.0.0, <5.0.0dev' From e211a2101db5cc25cc74be3a14ee38f2ef73562e Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 8 Aug 2017 10:32:00 -0700 Subject: [PATCH 174/468] Add google.api.core.exceptions (#3738) * Add google.api.core.exceptions * Add google.api.core to coverage report * Alias google.cloud.exceptions to google.api.core.exceptions * Fix lint * Address review comments * Fix typo --- .../google/api/core/exceptions.py | 420 ++++++++++++++++++ .../google/cloud/exceptions.py | 254 ++--------- .../google/cloud/obselete.py | 2 + packages/google-cloud-core/nox.py | 1 + .../tests/unit/api_core/__init__.py | 0 .../tests/unit/api_core/test_exceptions.py | 201 +++++++++ 6 files changed, 655 insertions(+), 223 deletions(-) create mode 100644 packages/google-cloud-core/google/api/core/exceptions.py create mode 100644 packages/google-cloud-core/tests/unit/api_core/__init__.py create mode 100644 packages/google-cloud-core/tests/unit/api_core/test_exceptions.py diff --git a/packages/google-cloud-core/google/api/core/exceptions.py b/packages/google-cloud-core/google/api/core/exceptions.py new file mode 100644 index 000000000000..c25816abce34 --- /dev/null +++ b/packages/google-cloud-core/google/api/core/exceptions.py @@ -0,0 +1,420 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Exceptions raised by Google API core & clients. + +This module provides base classes for all errors raised by libraries based +on :mod:`google.api.core`, including both HTTP and gRPC clients. +""" + +from __future__ import absolute_import +from __future__ import unicode_literals + +import six +from six.moves import http_client + +try: + import grpc +except ImportError: # pragma: NO COVER + grpc = None + +# Lookup tables for mapping exceptions from HTTP and gRPC transports. +# Populated by _APICallErrorMeta +_HTTP_CODE_TO_EXCEPTION = {} +_GRPC_CODE_TO_EXCEPTION = {} + + +class GoogleAPIError(Exception): + """Base class for all exceptions raised by Google API Clients.""" + pass + + +class _GoogleAPICallErrorMeta(type): + """Metaclass for registering GoogleAPICallError subclasses.""" + def __new__(mcs, name, bases, class_dict): + cls = type.__new__(mcs, name, bases, class_dict) + if cls.code is not None: + _HTTP_CODE_TO_EXCEPTION.setdefault(cls.code, cls) + if cls.grpc_status_code is not None: + _GRPC_CODE_TO_EXCEPTION.setdefault(cls.grpc_status_code, cls) + return cls + + +@six.python_2_unicode_compatible +@six.add_metaclass(_GoogleAPICallErrorMeta) +class GoogleAPICallError(GoogleAPIError): + """Base class for exceptions raised by calling API methods. + + Args: + message (str): The exception message. + errors (Sequence[Any]): An optional list of error details. + response (Union[requests.Request, grpc.Call]): The response or + gRPC call metadata. + """ + + code = None + """Optional[int]: The HTTP status code associated with this error. + + This may be ``None`` if the exception does not have a direct mapping + to an HTTP error. + + See http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html + """ + + grpc_status_code = None + """Optional[grpc.StatusCode]: The gRPC status code associated with this + error. + + This may be ``None`` if the exception does not match up to a gRPC error. + """ + + def __init__(self, message, errors=(), response=None): + super(GoogleAPICallError, self).__init__(message) + self.message = message + """str: The exception message.""" + self._errors = errors + self._response = response + + def __str__(self): + return '{} {}'.format(self.code, self.message) + + @property + def errors(self): + """Detailed error information. + + Returns: + Sequence[Any]: A list of additional error details. + """ + return list(self._errors) + + @property + def response(self): + """Optional[Union[requests.Request, grpc.Call]]: The response or + gRPC call metadata.""" + return self._response + + +class Redirection(GoogleAPICallError): + """Base class for for all redirection (HTTP 3xx) responses.""" + + +class MovedPermanently(Redirection): + """Exception mapping a ``301 Moved Permanently`` response.""" + code = http_client.MOVED_PERMANENTLY + + +class NotModified(Redirection): + """Exception mapping a ``304 Not Modified`` response.""" + code = http_client.NOT_MODIFIED + + +class TemporaryRedirect(Redirection): + """Exception mapping a ``307 Temporary Redirect`` response.""" + code = http_client.TEMPORARY_REDIRECT + + +class ResumeIncomplete(Redirection): + """Exception mapping a ``308 Resume Incomplete`` response. + + .. note:: :ref:`http_client.PERMANENT_REDIRECT` is ``308``, but Google APIs + differ in their use of this status code. + """ + code = 308 + + +class ClientError(GoogleAPICallError): + """Base class for all client error (HTTP 4xx) responses.""" + + +class BadRequest(ClientError): + """Exception mapping a ``400 Bad Request`` response.""" + code = http_client.BAD_REQUEST + + +class InvalidArgument(BadRequest): + """Exception mapping a :prop:`grpc.StatusCode.INVALID_ARGUMENT` error.""" + grpc_status_code = ( + grpc.StatusCode.INVALID_ARGUMENT if grpc is not None else None) + + +class FailedPrecondition(BadRequest): + """Exception mapping a :prop:`grpc.StatusCode.FAILED_PRECONDITION` + error.""" + grpc_status_code = ( + grpc.StatusCode.FAILED_PRECONDITION if grpc is not None else None) + + +class OutOfRange(BadRequest): + """Exception mapping a :prop:`grpc.StatusCode.OUT_OF_RANGE` error.""" + grpc_status_code = ( + grpc.StatusCode.OUT_OF_RANGE if grpc is not None else None) + + +class Unauthorized(ClientError): + """Exception mapping a ``401 Unauthorized`` response.""" + code = http_client.UNAUTHORIZED + + +class Unauthenticated(Unauthorized): + """Exception mapping a :prop:`grpc.StatusCode.UNAUTHENTICATED` error.""" + grpc_status_code = ( + grpc.StatusCode.UNAUTHENTICATED if grpc is not None else None) + + +class Forbidden(ClientError): + """Exception mapping a ``403 Forbidden`` response.""" + code = http_client.FORBIDDEN + + +class PermissionDenied(Forbidden): + """Exception mapping a :prop:`grpc.StatusCode.PERMISSION_DENIED` error.""" + grpc_status_code = ( + grpc.StatusCode.PERMISSION_DENIED if grpc is not None else None) + + +class NotFound(ClientError): + """Exception mapping a ``404 Not Found`` response or a + :prop:`grpc.StatusCode.NOT_FOUND` error.""" + code = http_client.NOT_FOUND + grpc_status_code = ( + grpc.StatusCode.NOT_FOUND if grpc is not None else None) + + +class MethodNotAllowed(ClientError): + """Exception mapping a ``405 Method Not Allowed`` response.""" + code = http_client.METHOD_NOT_ALLOWED + + +class Conflict(ClientError): + """Exception mapping a ``409 Conflict`` response.""" + code = http_client.CONFLICT + + +class AlreadyExists(Conflict): + """Exception mapping a :prop:`grpc.StatusCode.ALREADY_EXISTS` error.""" + grpc_status_code = ( + grpc.StatusCode.ALREADY_EXISTS if grpc is not None else None) + + +class Aborted(Conflict): + """Exception mapping a :prop:`grpc.StatusCode.ABORTED` error.""" + grpc_status_code = ( + grpc.StatusCode.ABORTED if grpc is not None else None) + + +class LengthRequired(ClientError): + """Exception mapping a ``411 Length Required`` response.""" + code = http_client.LENGTH_REQUIRED + + +class PreconditionFailed(ClientError): + """Exception mapping a ``412 Precondition Failed`` response.""" + code = http_client.PRECONDITION_FAILED + + +class RequestRangeNotSatisfiable(ClientError): + """Exception mapping a ``416 Request Range Not Satisfiable`` response.""" + code = http_client.REQUESTED_RANGE_NOT_SATISFIABLE + + +class TooManyRequests(ClientError): + """Exception mapping a ``429 Too Many Requests`` response.""" + # http_client does not define a constant for this in Python 2. + code = 429 + + +class ResourceExhausted(TooManyRequests): + """Exception mapping a :prop:`grpc.StatusCode.RESOURCE_EXHAUSTED` error.""" + grpc_status_code = ( + grpc.StatusCode.RESOURCE_EXHAUSTED if grpc is not None else None) + + +class Cancelled(ClientError): + """Exception mapping a :prop:`grpc.StatusCode.CANCELLED` error.""" + # This maps to HTTP status code 499. See + # https://github.com/googleapis/googleapis/blob/master/google/rpc\ + # /code.proto + code = 499 + grpc_status_code = grpc.StatusCode.CANCELLED if grpc is not None else None + + +class ServerError(GoogleAPICallError): + """Base for 5xx responses.""" + + +class InternalServerError(ServerError): + """Exception mapping a ``500 Internal Server Error`` response. or a + :prop:`grpc.StatusCode.INTERNAL` error.""" + code = http_client.INTERNAL_SERVER_ERROR + grpc_status_code = grpc.StatusCode.INTERNAL if grpc is not None else None + + +class Unknown(ServerError): + """Exception mapping a :prop:`grpc.StatusCode.UNKNOWN` error.""" + grpc_status_code = grpc.StatusCode.UNKNOWN if grpc is not None else None + + +class DataLoss(ServerError): + """Exception mapping a :prop:`grpc.StatusCode.DATA_LOSS` error.""" + grpc_status_code = grpc.StatusCode.DATA_LOSS if grpc is not None else None + + +class MethodNotImplemented(ServerError): + """Exception mapping a ``501 Not Implemented`` response or a + :prop:`grpc.StatusCode.UNIMPLEMENTED` error.""" + code = http_client.NOT_IMPLEMENTED + grpc_status_code = ( + grpc.StatusCode.UNIMPLEMENTED if grpc is not None else None) + + +class BadGateway(ServerError): + """Exception mapping a ``502 Bad Gateway`` response.""" + code = http_client.BAD_GATEWAY + + +class ServiceUnavailable(ServerError): + """Exception mapping a ``503 Service Unavailable`` response or a + :prop:`grpc.StatusCode.UNAVAILABLE` error.""" + code = http_client.SERVICE_UNAVAILABLE + grpc_status_code = ( + grpc.StatusCode.UNAVAILABLE if grpc is not None else None) + + +class GatewayTimeout(ServerError): + """Exception mapping a ``504 Gateway Timeout`` response.""" + code = http_client.GATEWAY_TIMEOUT + + +class DeadlineExceeded(GatewayTimeout): + """Exception mapping a :prop:`grpc.StatusCode.DEADLINE_EXCEEDED` error.""" + grpc_status_code = ( + grpc.StatusCode.DEADLINE_EXCEEDED if grpc is not None else None) + + +def exception_class_for_http_status(status_code): + """Return the exception class for a specific HTTP status code. + + Args: + status_code (int): The HTTP status code. + + Returns: + type: the appropriate subclass of :class:`GoogleAPICallError`. + """ + return _HTTP_CODE_TO_EXCEPTION.get(status_code, GoogleAPICallError) + + +def from_http_status(status_code, message, **kwargs): + """Create a :class:`GoogleAPICallError` from an HTTP status code. + + Args: + status_code (int): The HTTP status code. + message (str): The exception message. + kwargs: Additional arguments passed to the :class:`GoogleAPICallError` + constructor. + + Returns: + GoogleAPICallError: An instance of the appropriate subclass of + :class:`GoogleAPICallError`. + """ + error_class = exception_class_for_http_status(status_code) + error = error_class(message, **kwargs) + + if error.code is None: + error.code = status_code + + return error + + +def from_http_response(response): + """Create a :class:`GoogleAPICallError` from a :class:`requests.Response`. + + Args: + response (requests.Response): The HTTP response. + + Returns: + GoogleAPICallError: An instance of the appropriate subclass of + :class:`GoogleAPICallError`, with the message and errors populated + from the response. + """ + try: + payload = response.json() + except ValueError: + payload = {'error': {'message': response.text or 'unknown error'}} + + error_message = payload.get('error', {}).get('message', 'unknown error') + errors = payload.get('error', {}).get('errors', ()) + + message = '{method} {url}: {error}'.format( + method=response.request.method, + url=response.request.url, + error=error_message) + + exception = from_http_status( + response.status_code, message, errors=errors, response=response) + return exception + + +def exception_class_for_grpc_status(status_code): + """Return the exception class for a specific :class:`grpc.StatusCode`. + + Args: + status_code (grpc.StatusCode): The gRPC status code. + + Returns: + type: the appropriate subclass of :class:`GoogleAPICallError`. + """ + return _GRPC_CODE_TO_EXCEPTION.get(status_code, GoogleAPICallError) + + +def from_grpc_status(status_code, message, **kwargs): + """Create a :class:`GoogleAPICallError` from a :class:`grpc.StatusCode`. + + Args: + status_code (grpc.StatusCode): The gRPC status code. + message (str): The exception message. + kwargs: Additional arguments passed to the :class:`GoogleAPICallError` + constructor. + + Returns: + GoogleAPICallError: An instance of the appropriate subclass of + :class:`GoogleAPICallError`. + """ + error_class = exception_class_for_grpc_status(status_code) + error = error_class(message, **kwargs) + + if error.grpc_status_code is None: + error.grpc_status_code = status_code + + return error + + +def from_grpc_error(rpc_exc): + """Create a :class:`GoogleAPICallError` from a :class:`grpc.RpcError`. + + Args: + rpc_exc (grpc.RpcError): The gRPC error. + + Returns: + GoogleAPICallError: An instance of the appropriate subclass of + :class:`GoogleAPICallError`. + """ + if isinstance(rpc_exc, grpc.Call): + return from_grpc_status( + rpc_exc.code(), + rpc_exc.details(), + errors=(rpc_exc,), + response=rpc_exc) + else: + return GoogleAPICallError( + str(rpc_exc), errors=(rpc_exc,), response=rpc_exc) diff --git a/packages/google-cloud-core/google/cloud/exceptions.py b/packages/google-cloud-core/google/cloud/exceptions.py index 2e7eca3be98d..a5d82be30452 100644 --- a/packages/google-cloud-core/google/cloud/exceptions.py +++ b/packages/google-cloud-core/google/cloud/exceptions.py @@ -12,240 +12,48 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Custom exceptions for :mod:`google.cloud` package. +# pylint: disable=invalid-name +# pylint recognizies all of these aliases as constants and thinks they have +# invalid names. -See https://cloud.google.com/storage/docs/json_api/v1/status-codes -""" +"""Custom exceptions for :mod:`google.cloud` package.""" # Avoid the grpc and google.cloud.grpc collision. from __future__ import absolute_import -import copy - -import six - -from google.cloud._helpers import _to_bytes +from google.api.core import exceptions try: from grpc._channel import _Rendezvous except ImportError: # pragma: NO COVER _Rendezvous = None -_HTTP_CODE_TO_EXCEPTION = {} # populated at end of module - - -# pylint: disable=invalid-name GrpcRendezvous = _Rendezvous """Exception class raised by gRPC stable.""" -# pylint: enable=invalid-name - - -class GoogleCloudError(Exception): - """Base error class for Google Cloud errors (abstract). - - Each subclass represents a single type of HTTP error response. - """ - code = None - """HTTP status code. Concrete subclasses *must* define. - - See http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html - """ - - def __init__(self, message, errors=()): - super(GoogleCloudError, self).__init__(message) - self.message = message - self._errors = errors - - def __str__(self): - result = u'%d %s' % (self.code, self.message) - if six.PY2: - result = _to_bytes(result, 'utf-8') - return result - - @property - def errors(self): - """Detailed error information. - - :rtype: list(dict) - :returns: a list of mappings describing each error. - """ - return [copy.deepcopy(error) for error in self._errors] - - -class Redirection(GoogleCloudError): - """Base for 3xx responses - - This class is abstract. - """ - - -class MovedPermanently(Redirection): - """Exception mapping a '301 Moved Permanently' response.""" - code = 301 - - -class NotModified(Redirection): - """Exception mapping a '304 Not Modified' response.""" - code = 304 - - -class TemporaryRedirect(Redirection): - """Exception mapping a '307 Temporary Redirect' response.""" - code = 307 - - -class ResumeIncomplete(Redirection): - """Exception mapping a '308 Resume Incomplete' response.""" - code = 308 - - -class ClientError(GoogleCloudError): - """Base for 4xx responses - - This class is abstract - """ - - -class BadRequest(ClientError): - """Exception mapping a '400 Bad Request' response.""" - code = 400 - - -class Unauthorized(ClientError): - """Exception mapping a '401 Unauthorized' response.""" - code = 401 - - -class Forbidden(ClientError): - """Exception mapping a '403 Forbidden' response.""" - code = 403 - - -class NotFound(ClientError): - """Exception mapping a '404 Not Found' response.""" - code = 404 - - -class MethodNotAllowed(ClientError): - """Exception mapping a '405 Method Not Allowed' response.""" - code = 405 - - -class Conflict(ClientError): - """Exception mapping a '409 Conflict' response.""" - code = 409 - - -class LengthRequired(ClientError): - """Exception mapping a '411 Length Required' response.""" - code = 411 - - -class PreconditionFailed(ClientError): - """Exception mapping a '412 Precondition Failed' response.""" - code = 412 - - -class RequestRangeNotSatisfiable(ClientError): - """Exception mapping a '416 Request Range Not Satisfiable' response.""" - code = 416 - - -class TooManyRequests(ClientError): - """Exception mapping a '429 Too Many Requests' response.""" - code = 429 - - -class ServerError(GoogleCloudError): - """Base for 5xx responses: (abstract)""" - - -class InternalServerError(ServerError): - """Exception mapping a '500 Internal Server Error' response.""" - code = 500 - - -class MethodNotImplemented(ServerError): - """Exception mapping a '501 Not Implemented' response.""" - code = 501 - - -class BadGateway(ServerError): - """Exception mapping a '502 Bad Gateway' response.""" - code = 502 - - -class ServiceUnavailable(ServerError): - """Exception mapping a '503 Service Unavailable' response.""" - code = 503 - - -class GatewayTimeout(ServerError): - """Exception mapping a `504 Gateway Timeout'` response.""" - code = 504 - - -def from_http_status(status_code, message, errors=()): - """Create a :class:`GoogleCloudError` from an HTTP status code. - - Args: - status_code (int): The HTTP status code. - message (str): The exception message. - errors (Sequence[Any]): A list of additional error information. - - Returns: - GoogleCloudError: An instance of the appropriate subclass of - :class:`GoogleCloudError`. - """ - error_class = _HTTP_CODE_TO_EXCEPTION.get(status_code, GoogleCloudError) - error = error_class(message, errors) - - if error.code is None: - error.code = status_code - - return error - - -def from_http_response(response): - """Create a :class:`GoogleCloudError` from a :class:`requests.Response`. - - Args: - response (requests.Response): The HTTP response. - - Returns: - GoogleCloudError: An instance of the appropriate subclass of - :class:`GoogleCloudError`, with the message and errors populated - from the response. - """ - try: - payload = response.json() - except ValueError: - payload = {'error': {'message': response.text or 'unknown error'}} - - error_message = payload.get('error', {}).get('message', 'unknown error') - errors = payload.get('error', {}).get('errors', ()) - - message = '{method} {url}: {error}'.format( - method=response.request.method, - url=response.request.url, - error=error_message) - - exception = from_http_status( - response.status_code, message, errors=errors) - exception.response = response - return exception - - -def _walk_subclasses(klass): - """Recursively walk subclass tree.""" - for sub in klass.__subclasses__(): - yield sub - for subsub in _walk_subclasses(sub): - yield subsub - -# Build the code->exception class mapping. -for _eklass in _walk_subclasses(GoogleCloudError): - code = getattr(_eklass, 'code', None) - if code is not None: - _HTTP_CODE_TO_EXCEPTION[code] = _eklass +# Aliases to moved classes. +GoogleCloudError = exceptions.GoogleAPICallError +Redirection = exceptions.Redirection +MovedPermanently = exceptions.MovedPermanently +NotModified = exceptions.NotModified +TemporaryRedirect = exceptions.TemporaryRedirect +ResumeIncomplete = exceptions.ResumeIncomplete +ClientError = exceptions.ClientError +BadRequest = exceptions.BadRequest +Unauthorized = exceptions.Unauthorized +Forbidden = exceptions.Forbidden +NotFound = exceptions.NotFound +MethodNotAllowed = exceptions.MethodNotAllowed +Conflict = exceptions.Conflict +LengthRequired = exceptions.LengthRequired +PreconditionFailed = exceptions.PreconditionFailed +RequestRangeNotSatisfiable = exceptions.RequestRangeNotSatisfiable +TooManyRequests = exceptions.TooManyRequests +ServerError = exceptions.ServerError +InternalServerError = exceptions.InternalServerError +MethodNotImplemented = exceptions.MethodNotImplemented +BadGateway = exceptions.BadGateway +ServiceUnavailable = exceptions.ServiceUnavailable +GatewayTimeout = exceptions.GatewayTimeout +from_http_status = exceptions.from_http_status +from_http_response = exceptions.from_http_response diff --git a/packages/google-cloud-core/google/cloud/obselete.py b/packages/google-cloud-core/google/cloud/obselete.py index 9af28cd85d52..cd70025946f7 100644 --- a/packages/google-cloud-core/google/cloud/obselete.py +++ b/packages/google-cloud-core/google/cloud/obselete.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +"""Helpers for deprecated code and modules.""" + import warnings import pkg_resources diff --git a/packages/google-cloud-core/nox.py b/packages/google-cloud-core/nox.py index 1dca10eb9b69..b795ddfce7a6 100644 --- a/packages/google-cloud-core/nox.py +++ b/packages/google-cloud-core/nox.py @@ -43,6 +43,7 @@ def unit_tests(session, python_version): 'py.test', '--quiet', '--cov=google.cloud', + '--cov=google.api.core', '--cov=tests.unit', '--cov-append', '--cov-config=.coveragerc', diff --git a/packages/google-cloud-core/tests/unit/api_core/__init__.py b/packages/google-cloud-core/tests/unit/api_core/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-core/tests/unit/api_core/test_exceptions.py b/packages/google-cloud-core/tests/unit/api_core/test_exceptions.py new file mode 100644 index 000000000000..f29873e7b3d8 --- /dev/null +++ b/packages/google-cloud-core/tests/unit/api_core/test_exceptions.py @@ -0,0 +1,201 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json + +import grpc +import mock +import requests +from six.moves import http_client + +from google.api.core import exceptions + + +def test_create_google_cloud_error(): + exception = exceptions.GoogleAPICallError('Testing') + exception.code = 600 + assert str(exception) == '600 Testing' + assert exception.message == 'Testing' + assert exception.errors == [] + assert exception.response is None + + +def test_create_google_cloud_error_with_args(): + error = { + 'domain': 'global', + 'location': 'test', + 'locationType': 'testing', + 'message': 'Testing', + 'reason': 'test', + } + response = mock.sentinel.response + exception = exceptions.GoogleAPICallError( + 'Testing', [error], response=response) + exception.code = 600 + assert str(exception) == '600 Testing' + assert exception.message == 'Testing' + assert exception.errors == [error] + assert exception.response == response + + +def test_from_http_status(): + message = 'message' + exception = exceptions.from_http_status(http_client.NOT_FOUND, message) + assert exception.code == http_client.NOT_FOUND + assert exception.message == message + assert exception.errors == [] + + +def test_from_http_status_with_errors_and_response(): + message = 'message' + errors = ['1', '2'] + response = mock.sentinel.response + exception = exceptions.from_http_status( + http_client.NOT_FOUND, message, errors=errors, response=response) + + assert isinstance(exception, exceptions.NotFound) + assert exception.code == http_client.NOT_FOUND + assert exception.message == message + assert exception.errors == errors + assert exception.response == response + + +def test_from_http_status_unknown_code(): + message = 'message' + status_code = 156 + exception = exceptions.from_http_status(status_code, message) + assert exception.code == status_code + assert exception.message == message + + +def make_response(content): + response = requests.Response() + response._content = content + response.status_code = http_client.NOT_FOUND + response.request = requests.Request( + method='POST', url='https://example.com').prepare() + return response + + +def test_from_http_response_no_content(): + response = make_response(None) + + exception = exceptions.from_http_response(response) + + assert isinstance(exception, exceptions.NotFound) + assert exception.code == http_client.NOT_FOUND + assert exception.message == 'POST https://example.com/: unknown error' + assert exception.response == response + + +def test_from_http_response_text_content(): + response = make_response(b'message') + + exception = exceptions.from_http_response(response) + + assert isinstance(exception, exceptions.NotFound) + assert exception.code == http_client.NOT_FOUND + assert exception.message == 'POST https://example.com/: message' + + +def test_from_http_response_json_content(): + response = make_response(json.dumps({ + 'error': { + 'message': 'json message', + 'errors': ['1', '2'] + } + }).encode('utf-8')) + + exception = exceptions.from_http_response(response) + + assert isinstance(exception, exceptions.NotFound) + assert exception.code == http_client.NOT_FOUND + assert exception.message == 'POST https://example.com/: json message' + assert exception.errors == ['1', '2'] + + +def test_from_http_response_bad_json_content(): + response = make_response(json.dumps({'meep': 'moop'}).encode('utf-8')) + + exception = exceptions.from_http_response(response) + + assert isinstance(exception, exceptions.NotFound) + assert exception.code == http_client.NOT_FOUND + assert exception.message == 'POST https://example.com/: unknown error' + + +def test_from_grpc_status(): + message = 'message' + exception = exceptions.from_grpc_status( + grpc.StatusCode.OUT_OF_RANGE, message) + assert isinstance(exception, exceptions.BadRequest) + assert isinstance(exception, exceptions.OutOfRange) + assert exception.code == http_client.BAD_REQUEST + assert exception.grpc_status_code == grpc.StatusCode.OUT_OF_RANGE + assert exception.message == message + assert exception.errors == [] + + +def test_from_grpc_status_with_errors_and_response(): + message = 'message' + response = mock.sentinel.response + errors = ['1', '2'] + exception = exceptions.from_grpc_status( + grpc.StatusCode.OUT_OF_RANGE, message, + errors=errors, response=response) + + assert isinstance(exception, exceptions.OutOfRange) + assert exception.message == message + assert exception.errors == errors + assert exception.response == response + + +def test_from_grpc_status_unknown_code(): + message = 'message' + exception = exceptions.from_grpc_status( + grpc.StatusCode.OK, message) + assert exception.grpc_status_code == grpc.StatusCode.OK + assert exception.message == message + + +def test_from_grpc_error(): + message = 'message' + error = mock.create_autospec(grpc.Call, instance=True) + error.code.return_value = grpc.StatusCode.INVALID_ARGUMENT + error.details.return_value = message + + exception = exceptions.from_grpc_error(error) + + assert isinstance(exception, exceptions.BadRequest) + assert isinstance(exception, exceptions.InvalidArgument) + assert exception.code == http_client.BAD_REQUEST + assert exception.grpc_status_code == grpc.StatusCode.INVALID_ARGUMENT + assert exception.message == message + assert exception.errors == [error] + assert exception.response == error + + +def test_from_grpc_error_non_call(): + message = 'message' + error = mock.create_autospec(grpc.RpcError, instance=True) + error.__str__.return_value = message + + exception = exceptions.from_grpc_error(error) + + assert isinstance(exception, exceptions.GoogleAPICallError) + assert exception.code is None + assert exception.grpc_status_code is None + assert exception.message == message + assert exception.errors == [error] + assert exception.response == error From 72aa7e8724e117b446469edc2dd6e6bf737b5979 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 8 Aug 2017 14:03:04 -0700 Subject: [PATCH 175/468] Move google.cloud.future to google.api.core (#3764) --- .../{cloud => api/core}/future/__init__.py | 2 +- .../{cloud => api/core}/future/_helpers.py | 0 .../google/{cloud => api/core}/future/base.py | 0 .../{cloud => api/core}/future/polling.py | 4 +-- .../{cloud/future => api/core}/operation.py | 35 +++++++++++++++---- .../unit/{ => api_core}/future/__init__.py | 0 .../{ => api_core}/future/test__helpers.py | 2 +- .../{ => api_core}/future/test_polling.py | 2 +- .../{future => api_core}/test_operation.py | 2 +- 9 files changed, 35 insertions(+), 12 deletions(-) rename packages/google-cloud-core/google/{cloud => api/core}/future/__init__.py (93%) rename packages/google-cloud-core/google/{cloud => api/core}/future/_helpers.py (100%) rename packages/google-cloud-core/google/{cloud => api/core}/future/base.py (100%) rename packages/google-cloud-core/google/{cloud => api/core}/future/polling.py (98%) rename packages/google-cloud-core/google/{cloud/future => api/core}/operation.py (91%) rename packages/google-cloud-core/tests/unit/{ => api_core}/future/__init__.py (100%) rename packages/google-cloud-core/tests/unit/{ => api_core}/future/test__helpers.py (96%) rename packages/google-cloud-core/tests/unit/{ => api_core}/future/test_polling.py (98%) rename packages/google-cloud-core/tests/unit/{future => api_core}/test_operation.py (99%) diff --git a/packages/google-cloud-core/google/cloud/future/__init__.py b/packages/google-cloud-core/google/api/core/future/__init__.py similarity index 93% rename from packages/google-cloud-core/google/cloud/future/__init__.py rename to packages/google-cloud-core/google/api/core/future/__init__.py index e5cf2b20ce7e..a61510d307e6 100644 --- a/packages/google-cloud-core/google/cloud/future/__init__.py +++ b/packages/google-cloud-core/google/api/core/future/__init__.py @@ -14,7 +14,7 @@ """Futures for dealing with asynchronous operations.""" -from google.cloud.future.base import Future +from google.api.core.future.base import Future __all__ = [ 'Future', diff --git a/packages/google-cloud-core/google/cloud/future/_helpers.py b/packages/google-cloud-core/google/api/core/future/_helpers.py similarity index 100% rename from packages/google-cloud-core/google/cloud/future/_helpers.py rename to packages/google-cloud-core/google/api/core/future/_helpers.py diff --git a/packages/google-cloud-core/google/cloud/future/base.py b/packages/google-cloud-core/google/api/core/future/base.py similarity index 100% rename from packages/google-cloud-core/google/cloud/future/base.py rename to packages/google-cloud-core/google/api/core/future/base.py diff --git a/packages/google-cloud-core/google/cloud/future/polling.py b/packages/google-cloud-core/google/api/core/future/polling.py similarity index 98% rename from packages/google-cloud-core/google/cloud/future/polling.py rename to packages/google-cloud-core/google/api/core/future/polling.py index 6b7ae4221f64..40380d6ad938 100644 --- a/packages/google-cloud-core/google/cloud/future/polling.py +++ b/packages/google-cloud-core/google/api/core/future/polling.py @@ -22,8 +22,8 @@ import six import tenacity -from google.cloud.future import _helpers -from google.cloud.future import base +from google.api.core.future import _helpers +from google.api.core.future import base class PollingFuture(base.Future): diff --git a/packages/google-cloud-core/google/cloud/future/operation.py b/packages/google-cloud-core/google/api/core/operation.py similarity index 91% rename from packages/google-cloud-core/google/cloud/future/operation.py rename to packages/google-cloud-core/google/api/core/operation.py index ec430cd9c55b..1cc44f0b3d7b 100644 --- a/packages/google-cloud-core/google/cloud/future/operation.py +++ b/packages/google-cloud-core/google/api/core/operation.py @@ -12,14 +12,36 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Futures for long-running operations returned from Google Cloud APIs.""" +"""Futures for long-running operations returned from Google Cloud APIs. + +These futures can be used to synchronously wait for the result of a +long-running operation using :meth:`Operation.result`: + + +.. code-block:: python + + operation = my_api_client.long_running_method() + result = operation.result() + +Or asynchronously using callbacks and :meth:`Operation.add_done_callback`: + +.. code-block:: python + + operation = my_api_client.long_running_method() + + def my_callback(future): + result = future.result() + + operation.add_done_callback(my_callback) + +""" import functools import threading +from google.api.core import exceptions +from google.api.core.future import polling from google.cloud import _helpers -from google.cloud import exceptions -from google.cloud.future import polling from google.longrunning import operations_pb2 from google.protobuf import json_format from google.rpc import code_pb2 @@ -85,12 +107,13 @@ def _set_result_from_operation(self): self._result_type, self._operation.response) self.set_result(response) elif self._operation.HasField('error'): - exception = exceptions.GoogleCloudError( + exception = exceptions.GoogleAPICallError( self._operation.error.message, - errors=(self._operation.error)) + errors=(self._operation.error), + response=self._operation) self.set_exception(exception) else: - exception = exceptions.GoogleCloudError( + exception = exceptions.GoogleAPICallError( 'Unexpected state: Long-running operation had neither ' 'response nor error set.') self.set_exception(exception) diff --git a/packages/google-cloud-core/tests/unit/future/__init__.py b/packages/google-cloud-core/tests/unit/api_core/future/__init__.py similarity index 100% rename from packages/google-cloud-core/tests/unit/future/__init__.py rename to packages/google-cloud-core/tests/unit/api_core/future/__init__.py diff --git a/packages/google-cloud-core/tests/unit/future/test__helpers.py b/packages/google-cloud-core/tests/unit/api_core/future/test__helpers.py similarity index 96% rename from packages/google-cloud-core/tests/unit/future/test__helpers.py rename to packages/google-cloud-core/tests/unit/api_core/future/test__helpers.py index cbca5ba4d4df..534dd3696cb9 100644 --- a/packages/google-cloud-core/tests/unit/future/test__helpers.py +++ b/packages/google-cloud-core/tests/unit/api_core/future/test__helpers.py @@ -14,7 +14,7 @@ import mock -from google.cloud.future import _helpers +from google.api.core.future import _helpers @mock.patch('threading.Thread', autospec=True) diff --git a/packages/google-cloud-core/tests/unit/future/test_polling.py b/packages/google-cloud-core/tests/unit/api_core/future/test_polling.py similarity index 98% rename from packages/google-cloud-core/tests/unit/future/test_polling.py rename to packages/google-cloud-core/tests/unit/api_core/future/test_polling.py index c8fde1c20385..a359ba1a2152 100644 --- a/packages/google-cloud-core/tests/unit/future/test_polling.py +++ b/packages/google-cloud-core/tests/unit/api_core/future/test_polling.py @@ -19,7 +19,7 @@ import mock import pytest -from google.cloud.future import polling +from google.api.core.future import polling class PollingFutureImpl(polling.PollingFuture): diff --git a/packages/google-cloud-core/tests/unit/future/test_operation.py b/packages/google-cloud-core/tests/unit/api_core/test_operation.py similarity index 99% rename from packages/google-cloud-core/tests/unit/future/test_operation.py rename to packages/google-cloud-core/tests/unit/api_core/test_operation.py index 2d281694001a..2332c50fdf4b 100644 --- a/packages/google-cloud-core/tests/unit/future/test_operation.py +++ b/packages/google-cloud-core/tests/unit/api_core/test_operation.py @@ -15,7 +15,7 @@ import mock -from google.cloud.future import operation +from google.api.core import operation from google.longrunning import operations_pb2 from google.protobuf import struct_pb2 from google.rpc import code_pb2 From 020c8a470cd1bf71b553e2c6d5976d725499f5c1 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Tue, 8 Aug 2017 14:50:31 -0700 Subject: [PATCH 176/468] Use latest/ directory for docs instead of stable/ (#3766) See also https://github.com/GoogleCloudPlatform/google-cloud-python/pull/3763 $ sed -i '' 's/googlecloudplatform.github.io\/google-cloud-python\/stable\//googlecloudplatform.github.io\/google-cloud-python\/latest\//g' **/*.rst --- packages/google-cloud-core/README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-core/README.rst b/packages/google-cloud-core/README.rst index 0685a028dbb1..878e9a5363e0 100644 --- a/packages/google-cloud-core/README.rst +++ b/packages/google-cloud-core/README.rst @@ -9,7 +9,7 @@ common helpers (e.g. base ``Client`` classes) used by all of the - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/core/modules.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/latest/core/modules.html Quick Start ----------- From d6fc4c28a303ec2319d35056256874d6c863655e Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 9 Aug 2017 10:02:05 -0700 Subject: [PATCH 177/468] Move google.cloud.iterator to google.api.core.page_iterator (#3770) * Move google.cloud.iterator to google.api.core.page_iterator * Re-write tests to pytest style. * Make GAXIterator private- it will soon be removed. * Pass api_request into HTTPIterator to avoid accessing private members * BigQuery: use google.api.core.page_iterator * DNS: use google.api.core.page_iterator * Logging: use google.api.core.page_iterator * PubSub: use google.api.core.page_iterator * Resource manager: use google.api.core.page_iterator * Runtimeconfig: use google.api.core.page_iterator * logging: use google.api.core._GAXIterator * Storage: use google.api.core.page_iterator * Pubsub: use google.api.core._GAXIterator * Trace: use google.api.core._GAXIterator * Spanner: use google.api.core._GAXIterator --- .../iterator.py => api/core/page_iterator.py} | 326 +++++----- .../tests/unit/api_core/test_page_iterator.py | 461 +++++++++++++ .../tests/unit/test_iterator.py | 605 ------------------ 3 files changed, 610 insertions(+), 782 deletions(-) rename packages/google-cloud-core/google/{cloud/iterator.py => api/core/page_iterator.py} (52%) create mode 100644 packages/google-cloud-core/tests/unit/api_core/test_page_iterator.py delete mode 100644 packages/google-cloud-core/tests/unit/test_iterator.py diff --git a/packages/google-cloud-core/google/cloud/iterator.py b/packages/google-cloud-core/google/api/core/page_iterator.py similarity index 52% rename from packages/google-cloud-core/google/cloud/iterator.py rename to packages/google-cloud-core/google/api/core/page_iterator.py index 742443ddc5f9..147c9f47e35a 100644 --- a/packages/google-cloud-core/google/cloud/iterator.py +++ b/packages/google-cloud-core/google/api/core/page_iterator.py @@ -12,56 +12,49 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Iterators for paging through API responses. +"""Iterators for paging through paged API methods. These iterators simplify the process of paging through API responses -where the response is a list of results with a ``nextPageToken``. - -To make an iterator work, you'll need to provide a way to convert a JSON -item returned from the API into the object of your choice (via -``item_to_value``). You also may need to specify a custom ``items_key`` so -that a given response (containing a page of results) can be parsed into an -iterable page of the actual objects you want. You then can use this to get -**all** the results from a resource:: - - >>> def item_to_value(iterator, item): - ... my_item = MyItemClass(iterator.client, other_arg=True) - ... my_item._set_properties(item) - ... return my_item - ... - >>> iterator = Iterator(..., items_key='blocks', - ... item_to_value=item_to_value) - >>> list(iterator) # Convert to a list (consumes all values). +where the request takes a page token and the response is a list of results with +a token for the next page. See `list pagination`_ in the Google API Style Guide +for more details. + +.. _list pagination: + https://cloud.google.com/apis/design/design_patterns#list_pagination + +API clients that have methods that follow the list pagination pattern can +return an :class:`Iterator`. You can use this iterator to get **all** of +the results across all pages:: + + >>> results_iterator = client.list_resources() + >>> list(results_iterator) # Convert to a list (consumes all values). Or you can walk your way through items and call off the search early if -you find what you're looking for (resulting in possibly fewer -requests):: +you find what you're looking for (resulting in possibly fewer requests):: - >>> for my_item in Iterator(...): - ... print(my_item.name) - ... if not my_item.is_valid: + >>> for resource in results_iterator: + ... print(resource.name) + ... if not resource.is_valid: ... break At any point, you may check the number of items consumed by referencing the ``num_results`` property of the iterator:: - >>> my_iterator = Iterator(...) - >>> for my_item in my_iterator: - ... if my_iterator.num_results >= 10: + >>> for my_item in results_iterator: + ... if results_iterator.num_results >= 10: ... break When iterating, not every new item will send a request to the server. To iterate based on each page of items (where a page corresponds to a request):: - >>> iterator = Iterator(...) - >>> for page in iterator.pages: + >>> for page in results_iterator.pages: ... print('=' * 20) - ... print(' Page number: %d' % (iterator.page_number,)) - ... print(' Items in page: %d' % (page.num_items,)) - ... print(' First item: %r' % (next(page),)) - ... print('Items remaining: %d' % (page.remaining,)) - ... print('Next page token: %s' % (iterator.next_page_token,)) + ... print(' Page number: {:d}'.format(iterator.page_number)) + ... print(' Items in page: {:d}'.format(page.num_items)) + ... print(' First item: {!r}'.format(next(page))) + ... print('Items remaining: {:d}'.format(page.remaining)) + ... print('Next page token: {}'.format(iterator.next_page_token)) ==================== Page number: 1 Items in page: 1 @@ -75,7 +68,8 @@ Items remaining: 18 Next page token: None -To consume an entire page:: +Then, for each page you can get all the resources on that page by iterating +through it or using :func:`list`:: >>> list(page) [ @@ -85,47 +79,21 @@ ] """ +import abc import six -DEFAULT_ITEMS_KEY = 'items' -"""The dictionary key used to retrieve items from each response.""" - - -# pylint: disable=unused-argument -def _do_nothing_page_start(iterator, page, response): - """Helper to provide custom behavior after a :class:`Page` is started. - - This is a do-nothing stand-in as the default value. - - :type iterator: :class:`Iterator` - :param iterator: An iterator that holds some request info. - - :type page: :class:`Page` - :param page: The page that was just created. - - :type response: dict - :param response: The JSON API response for a page. - """ -# pylint: enable=unused-argument - - class Page(object): """Single page of results in an iterator. - :type parent: :class:`Iterator` - :param parent: The iterator that owns the current page. - - :type items: iterable - :param items: An iterable (that also defines __len__) of items - from a raw API response. - - :type item_to_value: callable - :param item_to_value: Callable to convert an item from the type in the - raw API response into the native object. - Assumed signature takes an :class:`Iterator` and a - raw API response with a single item. + Args: + parent (Iterator): The iterator that owns the current page. + items (Sequence[Any]): An iterable (that also defines __len__) of items + from a raw API response. + item_to_value (Callable[Iterator, Any]): Callable to convert an item + from the type in the raw API response into the native object. Will + be called with the iterator and a single item. """ def __init__(self, parent, items, item_to_value): @@ -137,24 +105,16 @@ def __init__(self, parent, items, item_to_value): @property def num_items(self): - """Total items in the page. - - :rtype: int - :returns: The number of items in this page. - """ + """int: Total items in the page.""" return self._num_items @property def remaining(self): - """Remaining items in the page. - - :rtype: int - :returns: The number of items remaining in this page. - """ + """int: Remaining items in the page.""" return self._remaining def __iter__(self): - """The :class:`Page` is an iterator.""" + """The :class:`Page` is an iterator of items.""" return self def next(self): @@ -170,26 +130,28 @@ def next(self): __next__ = next -class Iterator(object): - """A generic class for iterating through API list responses. - - :type client: :class:`~google.cloud.client.Client` - :param client: The client used to identify the application. +def _item_to_value_identity(iterator, item): + """An item to value transformer that returns the item un-changed.""" + # pylint: disable=unused-argument + # We are conforming to the interface defined by Iterator. + return item - :type item_to_value: callable - :param item_to_value: Callable to convert an item from the type in the - raw API response into the native object. - Assumed signature takes an :class:`Iterator` and a - raw API response with a single item. - :type page_token: str - :param page_token: (Optional) A token identifying a page in a result set. +@six.add_metaclass(abc.ABCMeta) +class Iterator(object): + """A generic class for iterating through API list responses. - :type max_results: int - :param max_results: (Optional) The maximum number of results to fetch. + Args: + client(google.cloud.client.Client): The API client. + item_to_value (Callable[Iterator, Any]): Callable to convert an item + from the type in the raw API response into the native object. Will + be called with the iterator and a single item. + page_token (str): A token identifying a page in a result set to start + fetching results from. + max_results (int): The maximum number of results to fetch. """ - def __init__(self, client, item_to_value, + def __init__(self, client, item_to_value=_item_to_value_identity, page_token=None, max_results=None): self._started = False self.client = client @@ -204,9 +166,11 @@ def __init__(self, client, item_to_value, def pages(self): """Iterator of pages in the response. - :rtype: :class:`~types.GeneratorType` - :returns: A generator of :class:`Page` instances. - :raises ValueError: If the iterator has already been started. + returns: + types.GeneratorType[Page]: A generator of :class:`Page` instances. + + raises: + ValueError: If the iterator has already been started. """ if self._started: raise ValueError('Iterator has already started', self) @@ -223,9 +187,11 @@ def _items_iter(self): def __iter__(self): """Iterator for each item returned. - :rtype: :class:`~types.GeneratorType` - :returns: A generator of items from the API. - :raises ValueError: If the iterator has already been started. + Returns: + types.GeneratorType[Any]: A generator of items from the API. + + Raises: + ValueError: If the iterator has already been started. """ if self._started: raise ValueError('Iterator has already started', self) @@ -235,15 +201,14 @@ def __iter__(self): def _page_iter(self, increment): """Generator of pages of API responses. - :type increment: bool - :param increment: Flag indicating if the total number of results - should be incremented on each page. This is useful - since a page iterator will want to increment by - results per page while an items iterator will want - to increment per item. + Args: + increment (bool): Flag indicating if the total number of results + should be incremented on each page. This is useful since a page + iterator will want to increment by results per page while an + items iterator will want to increment per item. - :rtype: :class:`Page` - :returns: pages + Yields: + Page: each page of items from the API. """ page = self._next_page() while page is not None: @@ -253,70 +218,82 @@ def _page_iter(self, increment): yield page page = self._next_page() - @staticmethod - def _next_page(): + @abc.abstractmethod + def _next_page(self): """Get the next page in the iterator. This does nothing and is intended to be over-ridden by subclasses to return the next :class:`Page`. - :raises NotImplementedError: Always. + Raises: + NotImplementedError: Always, this method is abstract. """ raise NotImplementedError -class HTTPIterator(Iterator): - """A generic class for iterating through Cloud JSON APIs list responses. - - :type client: :class:`~google.cloud.client.Client` - :param client: The client used to identify the application. - - :type path: str - :param path: The path to query for the list of items. - - :type item_to_value: callable - :param item_to_value: Callable to convert an item from JSON - into the native object. Assumed signature - takes an :class:`Iterator` and a dictionary - holding a single item. - - :type items_key: str - :param items_key: (Optional) The key used to grab retrieved items from an - API response. Defaults to :data:`DEFAULT_ITEMS_KEY`. +def _do_nothing_page_start(iterator, page, response): + """Helper to provide custom behavior after a :class:`Page` is started. - :type page_token: str - :param page_token: (Optional) A token identifying a page in a result set. + This is a do-nothing stand-in as the default value. - :type max_results: int - :param max_results: (Optional) The maximum number of results to fetch. + Args: + iterator (Iterator): An iterator that holds some request info. + page (Page): The page that was just created. + response (Any): The API response for a page. + """ + # pylint: disable=unused-argument + pass - :type extra_params: dict - :param extra_params: (Optional) Extra query string parameters for the - API call. - :type page_start: callable - :param page_start: (Optional) Callable to provide any special behavior - after a new page has been created. Assumed signature - takes the :class:`Iterator` that started the page, - the :class:`Page` that was started and the dictionary - containing the page response. +class HTTPIterator(Iterator): + """A generic class for iterating through HTTP/JSON API list responses. + + To make an iterator work, you'll need to provide a way to convert a JSON + item returned from the API into the object of your choice (via + ``item_to_value``). You also may need to specify a custom ``items_key`` so + that a given response (containing a page of results) can be parsed into an + iterable page of the actual objects you want. + + Args: + client (google.cloud.client.Client): The API client. + api_request (Callable): The function to use to make API requests. + Generally, this will be + :meth:`google.cloud._http.JSONConnection.api_request`. + path (str): The method path to query for the list of items. + item_to_value (Callable[Iterator, Any]): Callable to convert an item + from the type in the JSON response into a native object. Will + be called with the iterator and a single item. + items_key (str): The key in the API response where the list of items + can be found. + page_token (str): A token identifying a page in a result set to start + fetching results from. + max_results (int): The maximum number of results to fetch. + extra_params (dict): Extra query string parameters for the + API call. + page_start (Callable[Iterator, Page, dict]): Callable to provide any + special behavior after a new page has been created. Assumed + signature takes the :class:`Iterator` that started the page, + the :class:`Page` that was started and the dictionary containing + the page response. .. autoattribute:: pages """ + _DEFAULT_ITEMS_KEY = 'items' _PAGE_TOKEN = 'pageToken' _MAX_RESULTS = 'maxResults' _NEXT_TOKEN = 'nextPageToken' _RESERVED_PARAMS = frozenset([_PAGE_TOKEN, _MAX_RESULTS]) _HTTP_METHOD = 'GET' - def __init__(self, client, path, item_to_value, - items_key=DEFAULT_ITEMS_KEY, + def __init__(self, client, api_request, path, item_to_value, + items_key=_DEFAULT_ITEMS_KEY, page_token=None, max_results=None, extra_params=None, page_start=_do_nothing_page_start): super(HTTPIterator, self).__init__( client, item_to_value, page_token=page_token, max_results=max_results) + self.api_request = api_request self.path = path self._items_key = items_key self.extra_params = extra_params @@ -329,7 +306,8 @@ def __init__(self, client, path, item_to_value, def _verify_params(self): """Verifies the parameters don't use any reserved parameter. - :raises ValueError: If a reserved parameter is used. + Raises: + ValueError: If a reserved parameter is used. """ reserved_in_use = self._RESERVED_PARAMS.intersection( self.extra_params) @@ -340,9 +318,9 @@ def _verify_params(self): def _next_page(self): """Get the next page in the iterator. - :rtype: :class:`Page` - :returns: The next page in the iterator (or :data:`None` if - there are no pages left). + Returns: + Optional[Page]: The next page in the iterator or :data:`None` if + there are no pages left. """ if self._has_next_page(): response = self._get_next_page_response() @@ -357,8 +335,8 @@ def _next_page(self): def _has_next_page(self): """Determines whether or not there are more pages with results. - :rtype: bool - :returns: Whether the iterator has more pages. + Returns: + bool: Whether the iterator has more pages. """ if self.page_number == 0: return True @@ -372,8 +350,8 @@ def _has_next_page(self): def _get_query_params(self): """Getter for query parameters for the next request. - :rtype: dict - :returns: A dictionary of query parameters. + Returns: + dict: A dictionary of query parameters. """ result = {} if self.next_page_token is not None: @@ -386,19 +364,20 @@ def _get_query_params(self): def _get_next_page_response(self): """Requests the next page from the path provided. - :rtype: dict - :returns: The parsed JSON response of the next page's contents. + Returns: + dict: The parsed JSON response of the next page's contents. - :raises ValueError: If the HTTP method is not ``GET`` or ``POST``. + Raises: + ValueError: If the HTTP method is not ``GET`` or ``POST``. """ params = self._get_query_params() if self._HTTP_METHOD == 'GET': - return self.client._connection.api_request( + return self.api_request( method=self._HTTP_METHOD, path=self.path, query_params=params) elif self._HTTP_METHOD == 'POST': - return self.client._connection.api_request( + return self.api_request( method=self._HTTP_METHOD, path=self.path, data=params) @@ -406,30 +385,23 @@ def _get_next_page_response(self): raise ValueError('Unexpected HTTP method', self._HTTP_METHOD) -class GAXIterator(Iterator): +class _GAXIterator(Iterator): """A generic class for iterating through Cloud gRPC APIs list responses. - :type client: :class:`~google.cloud.client.Client` - :param client: The client used to identify the application. - - :type page_iter: :class:`~google.gax.PageIterator` - :param page_iter: A GAX page iterator to be wrapped and conform to the - :class:`~google.cloud.iterator.Iterator` surface. - - :type item_to_value: callable - :param item_to_value: Callable to convert an item from a protobuf - into the native object. Assumed signature - takes an :class:`Iterator` and a single item - from the API response as a protobuf. - - :type max_results: int - :param max_results: (Optional) The maximum number of results to fetch. + Any: + client (google.cloud.client.Client): The API client. + page_iter (google.gax.PageIterator): A GAX page iterator to be wrapped + to conform to the :class:`Iterator` interface. + item_to_value (Callable[Iterator, Any]): Callable to convert an item + from the the protobuf response into a native object. Will + be called with the iterator and a single item. + max_results (int): The maximum number of results to fetch. .. autoattribute:: pages """ def __init__(self, client, page_iter, item_to_value, max_results=None): - super(GAXIterator, self).__init__( + super(_GAXIterator, self).__init__( client, item_to_value, page_token=page_iter.page_token, max_results=max_results) self._gax_page_iter = page_iter @@ -440,9 +412,9 @@ def _next_page(self): Wraps the response from the :class:`~google.gax.PageIterator` in a :class:`Page` instance and captures some state at each page. - :rtype: :class:`Page` - :returns: The next page in the iterator (or :data:`None` if - there are no pages left). + Returns: + Optional[Page]: The next page in the iterator or :data:`None` if + there are no pages left. """ try: items = six.next(self._gax_page_iter) diff --git a/packages/google-cloud-core/tests/unit/api_core/test_page_iterator.py b/packages/google-cloud-core/tests/unit/api_core/test_page_iterator.py new file mode 100644 index 000000000000..82466579e37b --- /dev/null +++ b/packages/google-cloud-core/tests/unit/api_core/test_page_iterator.py @@ -0,0 +1,461 @@ +# Copyright 2015 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import types + +import mock +import pytest +import six + +from google.api.core import page_iterator + + +def test__do_nothing_page_start(): + assert page_iterator._do_nothing_page_start(None, None, None) is None + + +class TestPage(object): + + def test_constructor(self): + parent = mock.sentinel.parent + item_to_value = mock.sentinel.item_to_value + + page = page_iterator.Page(parent, (1, 2, 3), item_to_value) + + assert page.num_items == 3 + assert page.remaining == 3 + assert page._parent is parent + assert page._item_to_value is item_to_value + + def test___iter__(self): + page = page_iterator.Page(None, (), None) + assert iter(page) is page + + def test_iterator_calls_parent_item_to_value(self): + parent = mock.sentinel.parent + + item_to_value = mock.Mock( + side_effect=lambda iterator, value: value, spec=['__call__']) + + page = page_iterator.Page(parent, (10, 11, 12), item_to_value) + page._remaining = 100 + + assert item_to_value.call_count == 0 + assert page.remaining == 100 + + assert six.next(page) == 10 + assert item_to_value.call_count == 1 + item_to_value.assert_called_with(parent, 10) + assert page.remaining == 99 + + assert six.next(page) == 11 + assert item_to_value.call_count == 2 + item_to_value.assert_called_with(parent, 11) + assert page.remaining == 98 + + assert six.next(page) == 12 + assert item_to_value.call_count == 3 + item_to_value.assert_called_with(parent, 12) + assert page.remaining == 97 + + +class PageIteratorImpl(page_iterator.Iterator): + def _next_page(self): + return mock.create_autospec(page_iterator.Page, instance=True) + + +class TestIterator(object): + + def test_constructor(self): + client = mock.sentinel.client + item_to_value = mock.sentinel.item_to_value + token = 'ab13nceor03' + max_results = 1337 + + iterator = PageIteratorImpl( + client, item_to_value, page_token=token, max_results=max_results) + + assert not iterator._started + assert iterator.client is client + assert iterator._item_to_value == item_to_value + assert iterator.max_results == max_results + # Changing attributes. + assert iterator.page_number == 0 + assert iterator.next_page_token == token + assert iterator.num_results == 0 + + def test_pages_property_starts(self): + iterator = PageIteratorImpl(None, None) + + assert not iterator._started + + assert isinstance(iterator.pages, types.GeneratorType) + + assert iterator._started + + def test_pages_property_restart(self): + iterator = PageIteratorImpl(None, None) + + assert iterator.pages + + # Make sure we cannot restart. + with pytest.raises(ValueError): + assert iterator.pages + + def test__page_iter_increment(self): + iterator = PageIteratorImpl(None, None) + page = page_iterator.Page( + iterator, ('item',), page_iterator._item_to_value_identity) + iterator._next_page = mock.Mock(side_effect=[page, None]) + + assert iterator.num_results == 0 + + page_iter = iterator._page_iter(increment=True) + next(page_iter) + + assert iterator.num_results == 1 + + def test__page_iter_no_increment(self): + iterator = PageIteratorImpl(None, None) + + assert iterator.num_results == 0 + + page_iter = iterator._page_iter(increment=False) + next(page_iter) + + # results should still be 0 after fetching a page. + assert iterator.num_results == 0 + + def test__items_iter(self): + # Items to be returned. + item1 = 17 + item2 = 100 + item3 = 211 + + # Make pages from mock responses + parent = mock.sentinel.parent + page1 = page_iterator.Page( + parent, (item1, item2), page_iterator._item_to_value_identity) + page2 = page_iterator.Page( + parent, (item3,), page_iterator._item_to_value_identity) + + iterator = PageIteratorImpl(None, None) + iterator._next_page = mock.Mock(side_effect=[page1, page2, None]) + + items_iter = iterator._items_iter() + + assert isinstance(items_iter, types.GeneratorType) + + # Consume items and check the state of the iterator. + assert iterator.num_results == 0 + + assert six.next(items_iter) == item1 + assert iterator.num_results == 1 + + assert six.next(items_iter) == item2 + assert iterator.num_results == 2 + + assert six.next(items_iter) == item3 + assert iterator.num_results == 3 + + with pytest.raises(StopIteration): + six.next(items_iter) + + def test___iter__(self): + iterator = PageIteratorImpl(None, None) + iterator._next_page = mock.Mock(side_effect=[(1, 2), (3,), None]) + + assert not iterator._started + + result = list(iterator) + + assert result == [1, 2, 3] + assert iterator._started + + def test___iter__restart(self): + iterator = PageIteratorImpl(None, None) + + iter(iterator) + + # Make sure we cannot restart. + with pytest.raises(ValueError): + iter(iterator) + + def test___iter___restart_after_page(self): + iterator = PageIteratorImpl(None, None) + + assert iterator.pages + + # Make sure we cannot restart after starting the page iterator + with pytest.raises(ValueError): + iter(iterator) + + +class TestHTTPIterator(object): + + def test_constructor(self): + client = mock.sentinel.client + path = '/foo' + iterator = page_iterator.HTTPIterator( + client, mock.sentinel.api_request, + path, mock.sentinel.item_to_value) + + assert not iterator._started + assert iterator.client is client + assert iterator.path == path + assert iterator._item_to_value is mock.sentinel.item_to_value + assert iterator._items_key == 'items' + assert iterator.max_results is None + assert iterator.extra_params == {} + assert iterator._page_start == page_iterator._do_nothing_page_start + # Changing attributes. + assert iterator.page_number == 0 + assert iterator.next_page_token is None + assert iterator.num_results == 0 + + def test_constructor_w_extra_param_collision(self): + extra_params = {'pageToken': 'val'} + + with pytest.raises(ValueError): + page_iterator.HTTPIterator( + mock.sentinel.client, + mock.sentinel.api_request, + mock.sentinel.path, + mock.sentinel.item_to_value, + extra_params=extra_params) + + def test_iterate(self): + path = '/foo' + item1 = {'name': '1'} + item2 = {'name': '2'} + api_request = mock.Mock(return_value={'items': [item1, item2]}) + iterator = page_iterator.HTTPIterator( + mock.sentinel.client, api_request, path=path, + item_to_value=page_iterator._item_to_value_identity) + + assert iterator.num_results == 0 + + items_iter = iter(iterator) + + val1 = six.next(items_iter) + assert val1 == item1 + assert iterator.num_results == 1 + + val2 = six.next(items_iter) + assert val2 == item2 + assert iterator.num_results == 2 + + with pytest.raises(StopIteration): + six.next(items_iter) + + api_request.assert_called_once_with( + method='GET', path=path, query_params={}) + + def test__has_next_page_new(self): + iterator = page_iterator.HTTPIterator( + mock.sentinel.client, + mock.sentinel.api_request, + mock.sentinel.path, + mock.sentinel.item_to_value) + + # The iterator should *always* indicate that it has a next page + # when created so that it can fetch the initial page. + assert iterator._has_next_page() + + def test__has_next_page_without_token(self): + iterator = page_iterator.HTTPIterator( + mock.sentinel.client, + mock.sentinel.api_request, + mock.sentinel.path, + mock.sentinel.item_to_value) + + iterator.page_number = 1 + + # The iterator should not indicate that it has a new page if the + # initial page has been requested and there's no page token. + assert not iterator._has_next_page() + + def test__has_next_page_w_number_w_token(self): + iterator = page_iterator.HTTPIterator( + mock.sentinel.client, + mock.sentinel.api_request, + mock.sentinel.path, + mock.sentinel.item_to_value) + + iterator.page_number = 1 + iterator.next_page_token = mock.sentinel.token + + # The iterator should indicate that it has a new page if the + # initial page has been requested and there's is a page token. + assert iterator._has_next_page() + + def test__has_next_page_w_max_results_not_done(self): + iterator = page_iterator.HTTPIterator( + mock.sentinel.client, + mock.sentinel.api_request, + mock.sentinel.path, + mock.sentinel.item_to_value, + max_results=3, + page_token=mock.sentinel.token) + + iterator.page_number = 1 + + # The iterator should indicate that it has a new page if there + # is a page token and it has not consumed more than max_results. + assert iterator.num_results < iterator.max_results + assert iterator._has_next_page() + + def test__has_next_page_w_max_results_done(self): + + iterator = page_iterator.HTTPIterator( + mock.sentinel.client, + mock.sentinel.api_request, + mock.sentinel.path, + mock.sentinel.item_to_value, + max_results=3, + page_token=mock.sentinel.token) + + iterator.page_number = 1 + iterator.num_results = 3 + + # The iterator should not indicate that it has a new page if there + # if it has consumed more than max_results. + assert iterator.num_results == iterator.max_results + assert not iterator._has_next_page() + + def test__get_query_params_no_token(self): + iterator = page_iterator.HTTPIterator( + mock.sentinel.client, + mock.sentinel.api_request, + mock.sentinel.path, + mock.sentinel.item_to_value) + + assert iterator._get_query_params() == {} + + def test__get_query_params_w_token(self): + iterator = page_iterator.HTTPIterator( + mock.sentinel.client, + mock.sentinel.api_request, + mock.sentinel.path, + mock.sentinel.item_to_value) + iterator.next_page_token = 'token' + + assert iterator._get_query_params() == { + 'pageToken': iterator.next_page_token} + + def test__get_query_params_w_max_results(self): + max_results = 3 + iterator = page_iterator.HTTPIterator( + mock.sentinel.client, + mock.sentinel.api_request, + mock.sentinel.path, + mock.sentinel.item_to_value, + max_results=max_results) + + iterator.num_results = 1 + local_max = max_results - iterator.num_results + + assert iterator._get_query_params() == { + 'maxResults': local_max} + + def test__get_query_params_extra_params(self): + extra_params = {'key': 'val'} + iterator = page_iterator.HTTPIterator( + mock.sentinel.client, + mock.sentinel.api_request, + mock.sentinel.path, + mock.sentinel.item_to_value, + extra_params=extra_params) + + assert iterator._get_query_params() == extra_params + + def test__get_next_page_response_with_post(self): + path = '/foo' + page_response = {'items': ['one', 'two']} + api_request = mock.Mock(return_value=page_response) + iterator = page_iterator.HTTPIterator( + mock.sentinel.client, api_request, path=path, + item_to_value=page_iterator._item_to_value_identity) + iterator._HTTP_METHOD = 'POST' + + response = iterator._get_next_page_response() + + assert response == page_response + + api_request.assert_called_once_with( + method='POST', path=path, data={}) + + def test__get_next_page_bad_http_method(self): + iterator = page_iterator.HTTPIterator( + mock.sentinel.client, + mock.sentinel.api_request, + mock.sentinel.path, + mock.sentinel.item_to_value) + iterator._HTTP_METHOD = 'NOT-A-VERB' + + with pytest.raises(ValueError): + iterator._get_next_page_response() + + +class GAXPageIterator(object): + """Fake object that matches gax.PageIterator""" + def __init__(self, pages, page_token=None): + self._pages = iter(pages) + self.page_token = page_token + + def next(self): + return six.next(self._pages) + + __next__ = next + + +class TestGAXIterator(object): + + def test_constructor(self): + client = mock.sentinel.client + token = 'zzzyy78kl' + page_iter = GAXPageIterator((), page_token=token) + item_to_value = page_iterator._item_to_value_identity + max_results = 1337 + iterator = page_iterator._GAXIterator( + client, page_iter, item_to_value, max_results=max_results) + + assert not iterator._started + assert iterator.client is client + assert iterator._item_to_value is item_to_value + assert iterator.max_results == max_results + assert iterator._gax_page_iter is page_iter + # Changing attributes. + assert iterator.page_number == 0 + assert iterator.next_page_token == token + assert iterator.num_results == 0 + + def test__next_page(self): + page_items = (29, 31) + page_token = '2sde98ds2s0hh' + page_iter = GAXPageIterator([page_items], page_token=page_token) + iterator = page_iterator._GAXIterator( + mock.sentinel.client, + page_iter, + page_iterator._item_to_value_identity) + + page = iterator._next_page() + + assert iterator.next_page_token == page_token + assert isinstance(page, page_iterator.Page) + assert list(page) == list(page_items) + + next_page = iterator._next_page() + + assert next_page is None diff --git a/packages/google-cloud-core/tests/unit/test_iterator.py b/packages/google-cloud-core/tests/unit/test_iterator.py deleted file mode 100644 index a7d9e4f0924d..000000000000 --- a/packages/google-cloud-core/tests/unit/test_iterator.py +++ /dev/null @@ -1,605 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class Test__do_nothing_page_start(unittest.TestCase): - - def _call_fut(self, iterator, page, response): - from google.cloud.iterator import _do_nothing_page_start - - return _do_nothing_page_start(iterator, page, response) - - def test_do_nothing(self): - result = self._call_fut(None, None, None) - self.assertIsNone(result) - - -class TestPage(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.iterator import Page - - return Page - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_constructor(self): - parent = object() - item_to_value = object() - page = self._make_one(parent, (1, 2, 3), item_to_value) - self.assertIs(page._parent, parent) - self.assertEqual(page._num_items, 3) - self.assertEqual(page._remaining, 3) - self.assertIs(page._item_to_value, item_to_value) - - def test_num_items_property(self): - page = self._make_one(None, (), None) - num_items = 42 - page._num_items = num_items - self.assertEqual(page.num_items, num_items) - - def test_remaining_property(self): - page = self._make_one(None, (), None) - remaining = 1337 - page._remaining = remaining - self.assertEqual(page.remaining, remaining) - - def test___iter__(self): - page = self._make_one(None, (), None) - self.assertIs(iter(page), page) - - def test_iterator_calls__item_to_value(self): - import six - - class Parent(object): - - calls = 0 - - def item_to_value(self, item): - self.calls += 1 - return item - - parent = Parent() - page = self._make_one(parent, (10, 11, 12), - Parent.item_to_value) - page._remaining = 100 - - self.assertEqual(parent.calls, 0) - self.assertEqual(page.remaining, 100) - self.assertEqual(six.next(page), 10) - self.assertEqual(parent.calls, 1) - self.assertEqual(page.remaining, 99) - self.assertEqual(six.next(page), 11) - self.assertEqual(parent.calls, 2) - self.assertEqual(page.remaining, 98) - self.assertEqual(six.next(page), 12) - self.assertEqual(parent.calls, 3) - self.assertEqual(page.remaining, 97) - - -class TestIterator(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.iterator import Iterator - - return Iterator - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_constructor(self): - connection = _Connection() - client = _Client(connection) - item_to_value = object() - token = 'ab13nceor03' - max_results = 1337 - iterator = self._make_one(client, item_to_value, page_token=token, - max_results=max_results) - - self.assertFalse(iterator._started) - self.assertIs(iterator.client, client) - self.assertIs(iterator._item_to_value, item_to_value) - self.assertEqual(iterator.max_results, max_results) - # Changing attributes. - self.assertEqual(iterator.page_number, 0) - self.assertEqual(iterator.next_page_token, token) - self.assertEqual(iterator.num_results, 0) - - def test_pages_property(self): - iterator = self._make_one(None, None) - self.assertFalse(iterator._started) - mock_iter = object() - incremented = [] - - def page_iter(increment): - incremented.append(increment) - return mock_iter - - iterator._page_iter = page_iter - self.assertIs(iterator.pages, mock_iter) - self.assertEqual(incremented, [True]) - # Check the side-effect. - self.assertTrue(iterator._started) - - def test_pages_property_started(self): - import types - - iterator = self._make_one(None, None) - self.assertIsInstance(iterator.pages, types.GeneratorType) - # Make sure we cannot restart. - with self.assertRaises(ValueError): - getattr(iterator, 'pages') - - def test_pages_property_items_started(self): - import types - - iterator = self._make_one(None, None) - self.assertIsInstance(iter(iterator), types.GeneratorType) - with self.assertRaises(ValueError): - getattr(iterator, 'pages') - - @staticmethod - def _do_nothing(parent, value): - return parent, value - - def test__items_iter(self): - import types - import six - from google.cloud.iterator import Page - - # Items to be returned. - item1 = 17 - item2 = 100 - item3 = 211 - - # Make pages from mock responses - parent = object() - page1 = Page(parent, (item1, item2), self._do_nothing) - page2 = Page(parent, (item3,), self._do_nothing) - - iterator = self._make_one(None, None) - # Fake the page iterator on the object. - incremented = [] - - def page_iter(increment): - incremented.append(increment) - return iter((page1, page2)) - - iterator._page_iter = page_iter - items_iter = iterator._items_iter() - # Make sure it is a generator. - self.assertIsInstance(items_iter, types.GeneratorType) - - # Consume items and check the state of the iterator. - self.assertEqual(iterator.num_results, 0) - self.assertEqual(six.next(items_iter), (parent, item1)) - self.assertEqual(iterator.num_results, 1) - self.assertEqual(six.next(items_iter), (parent, item2)) - self.assertEqual(iterator.num_results, 2) - self.assertEqual(six.next(items_iter), (parent, item3)) - self.assertEqual(iterator.num_results, 3) - with self.assertRaises(StopIteration): - six.next(items_iter) - - # Make sure our page_iter() was called correctly. - self.assertEqual(incremented, [False]) - - def test___iter__(self): - iterator = self._make_one(None, None) - self.assertFalse(iterator._started) - incremented = [] - - def page_iter(increment): - incremented.append(increment) - return iter(()) - - iterator._page_iter = page_iter - self.assertEqual(list(iterator), []) - # Check the side-effect. - self.assertTrue(iterator._started) - - def test___iter___started(self): - import types - - iterator = self._make_one(None, None) - self.assertIsInstance(iter(iterator), types.GeneratorType) - with self.assertRaises(ValueError): - iter(iterator) - - def test___iter___pages_started(self): - import types - - iterator = self._make_one(None, None) - self.assertIsInstance(iterator.pages, types.GeneratorType) - with self.assertRaises(ValueError): - iter(iterator) - - def test__next_page_virtual(self): - iterator = self._make_one(None, None) - with self.assertRaises(NotImplementedError): - iterator._next_page() - - -class TestHTTPIterator(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.iterator import HTTPIterator - - return HTTPIterator - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_constructor(self): - from google.cloud.iterator import _do_nothing_page_start - - connection = _Connection() - client = _Client(connection) - path = '/foo' - iterator = self._make_one(client, path, None) - self.assertFalse(iterator._started) - self.assertIs(iterator.client, client) - self.assertEqual(iterator.path, path) - self.assertIsNone(iterator._item_to_value) - self.assertEqual(iterator._items_key, 'items') - self.assertIsNone(iterator.max_results) - self.assertEqual(iterator.extra_params, {}) - self.assertIs(iterator._page_start, _do_nothing_page_start) - # Changing attributes. - self.assertEqual(iterator.page_number, 0) - self.assertIsNone(iterator.next_page_token) - self.assertEqual(iterator.num_results, 0) - - def test_constructor_w_extra_param_collision(self): - connection = _Connection() - client = _Client(connection) - path = '/foo' - extra_params = {'pageToken': 'val'} - with self.assertRaises(ValueError): - self._make_one(client, path, None, extra_params=extra_params) - - def test_pages_iter_empty_then_another(self): - import six - from google.cloud._testing import _Monkey - from google.cloud import iterator as MUT - - items_key = 'its-key' - iterator = self._make_one(None, None, None, items_key=items_key) - # Fake the next page class. - fake_page = MUT.Page(None, (), None) - page_args = [] - - def dummy_response(): - return {} - - def dummy_page_class(*args): - page_args.append(args) - return fake_page - - iterator._get_next_page_response = dummy_response - pages_iter = iterator.pages - with _Monkey(MUT, Page=dummy_page_class): - page = six.next(pages_iter) - self.assertIs(page, fake_page) - self.assertEqual( - page_args, [(iterator, (), iterator._item_to_value)]) - - def test_iterate(self): - import six - - path = '/foo' - key1 = 'key1' - key2 = 'key2' - item1, item2 = object(), object() - ITEMS = {key1: item1, key2: item2} - - def item_to_value(iterator, item): # pylint: disable=unused-argument - return ITEMS[item['name']] - - connection = _Connection( - {'items': [{'name': key1}, {'name': key2}]}) - client = _Client(connection) - iterator = self._make_one(client, path=path, - item_to_value=item_to_value) - self.assertEqual(iterator.num_results, 0) - - items_iter = iter(iterator) - val1 = six.next(items_iter) - self.assertEqual(val1, item1) - self.assertEqual(iterator.num_results, 1) - - val2 = six.next(items_iter) - self.assertEqual(val2, item2) - self.assertEqual(iterator.num_results, 2) - - with self.assertRaises(StopIteration): - six.next(items_iter) - - kw, = connection._requested - self.assertEqual(kw['method'], 'GET') - self.assertEqual(kw['path'], path) - self.assertEqual(kw['query_params'], {}) - - def test__has_next_page_new(self): - connection = _Connection() - client = _Client(connection) - path = '/foo' - iterator = self._make_one(client, path, None) - self.assertTrue(iterator._has_next_page()) - - def test__has_next_page_w_number_no_token(self): - connection = _Connection() - client = _Client(connection) - path = '/foo' - iterator = self._make_one(client, path, None) - iterator.page_number = 1 - self.assertFalse(iterator._has_next_page()) - - def test__has_next_page_w_number_w_token(self): - connection = _Connection() - client = _Client(connection) - path = '/foo' - token = 'token' - iterator = self._make_one(client, path, None) - iterator.page_number = 1 - iterator.next_page_token = token - self.assertTrue(iterator._has_next_page()) - - def test__has_next_page_w_max_results_not_done(self): - iterator = self._make_one(None, None, None, max_results=3, - page_token='definitely-not-none') - iterator.page_number = 1 - self.assertLess(iterator.num_results, iterator.max_results) - self.assertTrue(iterator._has_next_page()) - - def test__has_next_page_w_max_results_done(self): - iterator = self._make_one(None, None, None, max_results=3) - iterator.page_number = 1 - iterator.num_results = iterator.max_results - self.assertFalse(iterator._has_next_page()) - - def test__get_query_params_no_token(self): - connection = _Connection() - client = _Client(connection) - path = '/foo' - iterator = self._make_one(client, path, None) - self.assertEqual(iterator._get_query_params(), {}) - - def test__get_query_params_w_token(self): - connection = _Connection() - client = _Client(connection) - path = '/foo' - token = 'token' - iterator = self._make_one(client, path, None) - iterator.next_page_token = token - self.assertEqual(iterator._get_query_params(), - {'pageToken': token}) - - def test__get_query_params_w_max_results(self): - connection = _Connection() - client = _Client(connection) - path = '/foo' - max_results = 3 - iterator = self._make_one(client, path, None, - max_results=max_results) - iterator.num_results = 1 - local_max = max_results - iterator.num_results - self.assertEqual(iterator._get_query_params(), - {'maxResults': local_max}) - - def test__get_query_params_extra_params(self): - connection = _Connection() - client = _Client(connection) - path = '/foo' - extra_params = {'key': 'val'} - iterator = self._make_one(client, path, None, - extra_params=extra_params) - self.assertEqual(iterator._get_query_params(), extra_params) - - def test__get_query_params_w_token_and_extra_params(self): - connection = _Connection() - client = _Client(connection) - path = '/foo' - token = 'token' - extra_params = {'key': 'val'} - iterator = self._make_one(client, path, None, - extra_params=extra_params) - iterator.next_page_token = token - - expected_query = extra_params.copy() - expected_query.update({'pageToken': token}) - self.assertEqual(iterator._get_query_params(), expected_query) - - def test__get_next_page_response_new_no_token_in_response(self): - path = '/foo' - token = 'token' - key1 = 'key1' - key2 = 'key2' - connection = _Connection({'items': [{'name': key1}, {'name': key2}], - 'nextPageToken': token}) - client = _Client(connection) - iterator = self._make_one(client, path, None) - response = iterator._get_next_page_response() - self.assertEqual(response['items'], [{'name': key1}, {'name': key2}]) - kw, = connection._requested - self.assertEqual(kw['method'], 'GET') - self.assertEqual(kw['path'], path) - self.assertEqual(kw['query_params'], {}) - - def test__get_next_page_response_with_post(self): - path = '/foo' - returned = {'green': 'eggs', 'ham': 55} - connection = _Connection(returned) - client = _Client(connection) - iterator = self._make_one(client, path, None) - iterator._HTTP_METHOD = 'POST' - response = iterator._get_next_page_response() - self.assertEqual(response, returned) - - self.assertEqual(len(connection._requested), 1) - called_kwargs = connection._requested[0] - self.assertEqual(called_kwargs, { - 'method': iterator._HTTP_METHOD, - 'path': path, - 'data': {}, - }) - - def test__get_next_page_bad_http_method(self): - path = '/foo' - client = _Client(None) - iterator = self._make_one(client, path, None) - iterator._HTTP_METHOD = 'NOT-A-VERB' - with self.assertRaises(ValueError): - iterator._get_next_page_response() - - -class TestGAXIterator(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.iterator import GAXIterator - - return GAXIterator - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_constructor(self): - client = _Client(None) - token = 'zzzyy78kl' - page_iter = SimpleIter(token) - item_to_value = object() - max_results = 1337 - iterator = self._make_one(client, page_iter, item_to_value, - max_results=max_results) - - self.assertFalse(iterator._started) - self.assertIs(iterator.client, client) - self.assertIs(iterator._item_to_value, item_to_value) - self.assertEqual(iterator.max_results, max_results) - self.assertIs(iterator._gax_page_iter, page_iter) - # Changing attributes. - self.assertEqual(iterator.page_number, 0) - self.assertEqual(iterator.next_page_token, token) - self.assertEqual(iterator.num_results, 0) - - @staticmethod - def _do_nothing(parent, value): - return parent, value - - def test__next_page(self): - from google.cloud._testing import _GAXPageIterator - from google.cloud.iterator import Page - - # Make a mock ``google.gax.PageIterator`` - page_items = (29, 31) # Items for just one page. - page_token = '2sde98ds2s0hh' - page_iter = _GAXPageIterator(page_items, page_token=page_token) - # Wrap the GAX iterator. - iterator = self._make_one(None, page_iter, self._do_nothing) - - page = iterator._next_page() - # First check the page token. - self.assertEqual(iterator.next_page_token, page_token) - # Then check the page. - self.assertIsInstance(page, Page) - # _do_nothing will throw the iterator in front. - expected = zip((iterator, iterator), page_items) - self.assertEqual(list(page), list(expected)) - - def test__next_page_empty(self): - from google.cloud._testing import _GAXPageIterator - - # Make a mock ``google.gax.PageIterator`` - page_iter = _GAXPageIterator() - # Wrap the GAX iterator. - iterator = self._make_one(None, page_iter, self._do_nothing) - - page = iterator._next_page() - self.assertIsNone(page) - self.assertIsNone(iterator.next_page_token) - - def test_iterate(self): - import six - from google.cloud._testing import _GAXPageIterator - - item1 = object() - item2 = object() - item3 = object() - token1 = 'smkdme30e2e32r' - token2 = '39cm9csl123dck' - - # Make a mock ``google.gax.PageIterator`` - page1 = (item1,) - page2 = (item2, item3) - page_iter = _GAXPageIterator(page1, page2, page_token=token1) - iterator = self._make_one(None, page_iter, self._do_nothing) - - self.assertEqual(iterator.num_results, 0) - - items_iter = iter(iterator) - val1 = six.next(items_iter) - self.assertEqual(val1, (iterator, item1)) - self.assertEqual(iterator.num_results, 1) - self.assertEqual(iterator.next_page_token, token1) - - # Before grabbing the next page, hot-swap the token - # on the ``page_iter``. - page_iter.page_token = token2 - - # Grab the next item (which will cause the next page). - val2 = six.next(items_iter) - self.assertEqual(val2, (iterator, item2)) - self.assertEqual(iterator.num_results, 2) - self.assertEqual(iterator.next_page_token, token2) - - # Grab the final item from the final / current page. - val3 = six.next(items_iter) - self.assertEqual(val3, (iterator, item3)) - self.assertEqual(iterator.num_results, 3) - # Make sure the token did not change. - self.assertEqual(iterator.next_page_token, token2) - - with self.assertRaises(StopIteration): - six.next(items_iter) - - -class _Connection(object): - - def __init__(self, *responses): - self._responses = responses - self._requested = [] - - def api_request(self, **kw): - self._requested.append(kw) - response, self._responses = self._responses[0], self._responses[1:] - return response - - -class _Client(object): - - def __init__(self, connection): - self._connection = connection - - -class SimpleIter(object): - - def __init__(self, page_token=None): - self.page_token = page_token From bf66cb6ea0b53d3743373bf1a6cc7a9f8f610359 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 16 Aug 2017 11:09:12 -0700 Subject: [PATCH 178/468] Add google.api.core.retry with base retry functionality (#3819) Add google.api.core.retry with base retry functionality Additionally: * Add google.api.core.exceptions.RetryError * Add google.api.core.helpers package * Add google.api.core.helpers.datetime_helpers module --- .../google/api/core/exceptions.py | 23 +++ .../google/api/core/helpers/__init__.py | 0 .../api/core/helpers/datetime_helpers.py | 22 +++ .../google/api/core/retry.py | 148 ++++++++++++++++++ .../tests/unit/api_core/helpers/__init__.py | 0 .../api_core/helpers/test_datetime_helpers.py | 22 +++ .../tests/unit/api_core/test_retry.py | 129 +++++++++++++++ 7 files changed, 344 insertions(+) create mode 100644 packages/google-cloud-core/google/api/core/helpers/__init__.py create mode 100644 packages/google-cloud-core/google/api/core/helpers/datetime_helpers.py create mode 100644 packages/google-cloud-core/google/api/core/retry.py create mode 100644 packages/google-cloud-core/tests/unit/api_core/helpers/__init__.py create mode 100644 packages/google-cloud-core/tests/unit/api_core/helpers/test_datetime_helpers.py create mode 100644 packages/google-cloud-core/tests/unit/api_core/test_retry.py diff --git a/packages/google-cloud-core/google/api/core/exceptions.py b/packages/google-cloud-core/google/api/core/exceptions.py index c25816abce34..38e30718fe83 100644 --- a/packages/google-cloud-core/google/api/core/exceptions.py +++ b/packages/google-cloud-core/google/api/core/exceptions.py @@ -40,6 +40,29 @@ class GoogleAPIError(Exception): pass +@six.python_2_unicode_compatible +class RetryError(GoogleAPIError): + """Raised when a function has exhausted all of its available retries. + + Args: + message (str): The exception message. + cause (Exception): The last exception raised when retring the + function. + """ + def __init__(self, message, cause): + super(RetryError, self).__init__(message) + self.message = message + self._cause = cause + + @property + def cause(self): + """The last exception raised when retrying the function.""" + return self._cause + + def __str__(self): + return '{}, last exception: {}'.format(self.message, self.cause) + + class _GoogleAPICallErrorMeta(type): """Metaclass for registering GoogleAPICallError subclasses.""" def __new__(mcs, name, bases, class_dict): diff --git a/packages/google-cloud-core/google/api/core/helpers/__init__.py b/packages/google-cloud-core/google/api/core/helpers/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-core/google/api/core/helpers/datetime_helpers.py b/packages/google-cloud-core/google/api/core/helpers/datetime_helpers.py new file mode 100644 index 000000000000..cfc817bc16fa --- /dev/null +++ b/packages/google-cloud-core/google/api/core/helpers/datetime_helpers.py @@ -0,0 +1,22 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helpers for :mod:`datetime`.""" + +import datetime + + +def utcnow(): + """A :meth:`datetime.datetime.utcnow()` alias to allow mocking in tests.""" + return datetime.datetime.utcnow() diff --git a/packages/google-cloud-core/google/api/core/retry.py b/packages/google-cloud-core/google/api/core/retry.py new file mode 100644 index 000000000000..b5a550faa584 --- /dev/null +++ b/packages/google-cloud-core/google/api/core/retry.py @@ -0,0 +1,148 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helpers for retrying functions with exponential back-off.""" + +import datetime +import logging +import random +import time + +import six + +from google.api.core import exceptions +from google.api.core.helpers import datetime_helpers + +_LOGGER = logging.getLogger(__name__) +_DEFAULT_MAX_JITTER = 0.2 + + +def if_exception_type(*exception_types): + """Creates a predicate to check if the exception is of a given type. + + Args: + exception_types (Sequence[type]): The exception types to check for. + + Returns: + Callable[Exception]: A predicate that returns True if the provided + exception is of the given type(s). + """ + def inner(exception): + """Bound predicate for checking an exception type.""" + return isinstance(exception, exception_types) + return inner + + +# pylint: disable=invalid-name +# Pylint sees this as a constant, but it is also an alias that should be +# considered a function. +if_transient_error = if_exception_type(( + exceptions.InternalServerError, + exceptions.TooManyRequests)) +"""A predicate that checks if an exception is a transient API error. + +The following server errors are considered transient: + +- :class:`google.api.core.exceptions.InternalServerError` - HTTP 500, gRPC + ``INTERNAL(13)`` and its subclasses. +- :class:`google.api.core.exceptions.TooManyRequests` - HTTP 429 +- :class:`google.api.core.exceptions.ResourceExhausted` - gRPC + ``RESOURCE_EXHAUSTED(8)`` +""" +# pylint: enable=invalid-name + + +def exponential_sleep_generator( + initial, maximum, multiplier=2, jitter=_DEFAULT_MAX_JITTER): + """Generates sleep intervals based on the exponential back-off algorithm. + + This implements the `Truncated Exponential Back-off`_ algorithm. + + .. _Truncated Exponential Back-off: + https://cloud.google.com/storage/docs/exponential-backoff + + Args: + initial (float): The minimum about of time to delay. This must + be greater than 0. + maximum (float): The maximum about of time to delay. + multiplier (float): The multiplier applied to the delay. + jitter (float): The maximum about of randomness to apply to the delay. + + Yields: + float: successive sleep intervals. + """ + delay = initial + while True: + yield delay + delay = min( + delay * multiplier + random.uniform(0, jitter), maximum) + + +def retry_target(target, predicate, sleep_generator, deadline): + """Call a function and retry if it fails. + + This is the lowest-level retry helper. Generally, you'll use the + higher-level retry helper :class:`Retry`. + + Args: + target(Callable): The function to call and retry. This must be a + nullary function - apply arguments with `functools.partial`. + predicate (Callable[Exception]): A callable used to determine if an + exception raised by the target should be considered retryable. + It should return True to retry or False otherwise. + sleep_generator (Iterator[float]): An infinite iterator that determines + how long to sleep between retries. + deadline (float): How long to keep retrying the target. + + Returns: + Any: the return value of the target function. + + Raises: + google.api.core.RetryError: If the deadline is exceeded while retrying. + ValueError: If the sleep generator stops yielding values. + Exception: If the target raises a method that isn't retryable. + """ + if deadline is not None: + deadline_datetime = ( + datetime_helpers.utcnow() + datetime.timedelta(seconds=deadline)) + else: + deadline_datetime = None + + last_exc = None + + for sleep in sleep_generator: + try: + return target() + + # pylint: disable=broad-except + # This function explicitly must deal with broad exceptions. + except Exception as exc: + if not predicate(exc): + raise + last_exc = exc + + now = datetime_helpers.utcnow() + if deadline_datetime is not None and deadline_datetime < now: + six.raise_from( + exceptions.RetryError( + 'Deadline of {:.1f}s exceeded while calling {}'.format( + deadline, target), + last_exc), + last_exc) + + _LOGGER.debug('Retrying due to {}, sleeping {:.1f}s ...'.format( + last_exc, sleep)) + time.sleep(sleep) + + raise ValueError('Sleep generator stopped yielding sleep values.') diff --git a/packages/google-cloud-core/tests/unit/api_core/helpers/__init__.py b/packages/google-cloud-core/tests/unit/api_core/helpers/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-core/tests/unit/api_core/helpers/test_datetime_helpers.py b/packages/google-cloud-core/tests/unit/api_core/helpers/test_datetime_helpers.py new file mode 100644 index 000000000000..cf1db713b5fa --- /dev/null +++ b/packages/google-cloud-core/tests/unit/api_core/helpers/test_datetime_helpers.py @@ -0,0 +1,22 @@ +# Copyright 2017, Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime + +from google.api.core.helpers import datetime_helpers + + +def test_utcnow(): + result = datetime_helpers.utcnow() + assert isinstance(result, datetime.datetime) diff --git a/packages/google-cloud-core/tests/unit/api_core/test_retry.py b/packages/google-cloud-core/tests/unit/api_core/test_retry.py new file mode 100644 index 000000000000..5ad5612482dc --- /dev/null +++ b/packages/google-cloud-core/tests/unit/api_core/test_retry.py @@ -0,0 +1,129 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import itertools + +import mock +import pytest + +from google.api.core import exceptions +from google.api.core import retry + + +def test_if_exception_type(): + predicate = retry.if_exception_type(ValueError) + + assert predicate(ValueError()) + assert not predicate(TypeError()) + + +def test_if_exception_type_multiple(): + predicate = retry.if_exception_type(ValueError, TypeError) + + assert predicate(ValueError()) + assert predicate(TypeError()) + assert not predicate(RuntimeError()) + + +def test_if_transient_error(): + assert retry.if_transient_error(exceptions.InternalServerError('')) + assert retry.if_transient_error(exceptions.TooManyRequests('')) + assert not retry.if_transient_error(exceptions.InvalidArgument('')) + + +def test_exponential_sleep_generator_base_2(): + gen = retry.exponential_sleep_generator( + 1, 60, 2, jitter=0.0) + + result = list(itertools.islice(gen, 8)) + assert result == [1, 2, 4, 8, 16, 32, 60, 60] + + +@mock.patch('random.uniform') +def test_exponential_sleep_generator_jitter(uniform): + uniform.return_value = 1 + gen = retry.exponential_sleep_generator( + 1, 60, 2, jitter=2.2) + + result = list(itertools.islice(gen, 7)) + assert result == [1, 3, 7, 15, 31, 60, 60] + uniform.assert_called_with(0.0, 2.2) + + +@mock.patch('time.sleep') +@mock.patch( + 'google.api.core.helpers.datetime_helpers.utcnow', + return_value=datetime.datetime.min) +def test_retry_target_success(utcnow, sleep): + predicate = retry.if_exception_type(ValueError) + call_count = [0] + + def target(): + call_count[0] += 1 + if call_count[0] < 3: + raise ValueError() + return 42 + + result = retry.retry_target(target, predicate, range(10), None) + + assert result == 42 + assert call_count[0] == 3 + sleep.assert_has_calls([mock.call(0), mock.call(1)]) + + +@mock.patch('time.sleep') +@mock.patch( + 'google.api.core.helpers.datetime_helpers.utcnow', + return_value=datetime.datetime.min) +def test_retry_target_non_retryable_error(utcnow, sleep): + predicate = retry.if_exception_type(ValueError) + exception = TypeError() + target = mock.Mock(side_effect=exception) + + with pytest.raises(TypeError) as exc_info: + retry.retry_target(target, predicate, range(10), None) + + assert exc_info.value == exception + sleep.assert_not_called() + + +@mock.patch('time.sleep') +@mock.patch( + 'google.api.core.helpers.datetime_helpers.utcnow') +def test_retry_target_deadline_exceeded(utcnow, sleep): + predicate = retry.if_exception_type(ValueError) + exception = ValueError('meep') + target = mock.Mock(side_effect=exception) + # Setup the timeline so that the first call takes 5 seconds but the second + # call takes 6, which puts the retry over the deadline. + utcnow.side_effect = [ + # The first call to utcnow establishes the start of the timeline. + datetime.datetime.min, + datetime.datetime.min + datetime.timedelta(seconds=5), + datetime.datetime.min + datetime.timedelta(seconds=11)] + + with pytest.raises(exceptions.RetryError) as exc_info: + retry.retry_target(target, predicate, range(10), deadline=10) + + assert exc_info.value.cause == exception + assert exc_info.match('Deadline of 10.0s exceeded') + assert exc_info.match('last exception: meep') + assert target.call_count == 2 + + +def test_retry_target_bad_sleep_generator(): + with pytest.raises(ValueError, match='Sleep generator'): + retry.retry_target( + mock.sentinel.target, mock.sentinel.predicate, [], None) From 71466f8e8f84f966bebfc4c51350791653d07d0e Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 17 Aug 2017 15:34:08 -0700 Subject: [PATCH 179/468] Add google.api.core.retry.Retry decorator (#3835) * Add google.api.core.retry.Retry decorator * Add futures dependency * Change jitter algorithm --- .../google/api/core/retry.py | 181 +++++++++++++++++- packages/google-cloud-core/setup.py | 5 + .../tests/unit/api_core/test_retry.py | 129 +++++++++++-- 3 files changed, 287 insertions(+), 28 deletions(-) diff --git a/packages/google-cloud-core/google/api/core/retry.py b/packages/google-cloud-core/google/api/core/retry.py index b5a550faa584..fe85ce48cf1b 100644 --- a/packages/google-cloud-core/google/api/core/retry.py +++ b/packages/google-cloud-core/google/api/core/retry.py @@ -12,9 +12,52 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Helpers for retrying functions with exponential back-off.""" +"""Helpers for retrying functions with exponential back-off. + +The :cls:`Retry` decorator can be used to retry functions that raise exceptions +using exponential backoff. Because a exponential sleep algorithm is used, +the retry is limited by a `deadline`. The deadline is the maxmimum amount of +time a method can block. This is used instead of total number of retries +because it is difficult to ascertain the amount of time a function can block +when using total number of retries and exponential backoff. + +By default, this decorator will retry transient +API errors (see :func:`if_transient_error`). For example: + +.. code-block:: python + + @retry.Retry() + def call_flaky_rpc(): + return client.flaky_rpc() + + # Will retry flaky_rpc() if it raises transient API errors. + result = call_flaky_rpc() + +You can pass a custom predicate to retry on different exceptions, such as +waiting for an eventually consistent item to be available: + +.. code-block:: python + + @retry.Retry(predicate=if_exception_type(exceptions.NotFound)) + def check_if_exists(): + return client.does_thing_exist() + + is_available = check_if_exists() + +Some client library methods apply retry automatically. These methods can accept +a ``retry`` parameter that allows you to configure the behavior: + +.. code-block:: python + + my_retry = retry.Retry(deadline=60) + result = client.some_method(retry=my_retry) + +""" + +from __future__ import unicode_literals import datetime +import functools import logging import random import time @@ -25,7 +68,10 @@ from google.api.core.helpers import datetime_helpers _LOGGER = logging.getLogger(__name__) -_DEFAULT_MAX_JITTER = 0.2 +_DEFAULT_INITIAL_DELAY = 1.0 +_DEFAULT_MAXIMUM_DELAY = 60.0 +_DEFAULT_DELAY_MULTIPLIER = 2.0 +_DEFAULT_DEADLINE = 60.0 * 2.0 def if_exception_type(*exception_types): @@ -38,10 +84,10 @@ def if_exception_type(*exception_types): Callable[Exception]: A predicate that returns True if the provided exception is of the given type(s). """ - def inner(exception): + def if_exception_type_predicate(exception): """Bound predicate for checking an exception type.""" return isinstance(exception, exception_types) - return inner + return if_exception_type_predicate # pylint: disable=invalid-name @@ -64,7 +110,7 @@ def inner(exception): def exponential_sleep_generator( - initial, maximum, multiplier=2, jitter=_DEFAULT_MAX_JITTER): + initial, maximum, multiplier=_DEFAULT_DELAY_MULTIPLIER): """Generates sleep intervals based on the exponential back-off algorithm. This implements the `Truncated Exponential Back-off`_ algorithm. @@ -77,16 +123,16 @@ def exponential_sleep_generator( be greater than 0. maximum (float): The maximum about of time to delay. multiplier (float): The multiplier applied to the delay. - jitter (float): The maximum about of randomness to apply to the delay. Yields: float: successive sleep intervals. """ delay = initial while True: - yield delay - delay = min( - delay * multiplier + random.uniform(0, jitter), maximum) + # Introduce jitter by yielding a delay that is uniformly distributed + # to average out to the delay time. + yield min(random.uniform(0.0, delay * 2.0), maximum) + delay = delay * multiplier def retry_target(target, predicate, sleep_generator, deadline): @@ -146,3 +192,120 @@ def retry_target(target, predicate, sleep_generator, deadline): time.sleep(sleep) raise ValueError('Sleep generator stopped yielding sleep values.') + + +@six.python_2_unicode_compatible +class Retry(object): + """Exponential retry decorator. + + This class is a decorator used to add exponential back-off retry behavior + to an RPC call. + + Although the default behavior is to retry transient API errors, a + different predicate can be provided to retry other exceptions. + + Args: + predicate (Callable[Exception]): A callable that should return ``True`` + if the given exception is retryable. + initial (float): The minimum about of time to delay in seconds. This + must be greater than 0. + maximum (float): The maximum about of time to delay in seconds. + multiplier (float): The multiplier applied to the delay. + deadline (float): How long to keep retrying in seconds. + """ + def __init__( + self, + predicate=if_transient_error, + initial=_DEFAULT_INITIAL_DELAY, + maximum=_DEFAULT_MAXIMUM_DELAY, + multiplier=_DEFAULT_DELAY_MULTIPLIER, + deadline=_DEFAULT_DEADLINE): + self._predicate = predicate + self._initial = initial + self._multiplier = multiplier + self._maximum = maximum + self._deadline = deadline + + def __call__(self, func): + """Wrap a callable with retry behavior. + + Args: + func (Callable): The callable to add retry behavior to. + + Returns: + Callable: A callable that will invoke ``func`` with retry + behavior. + """ + @six.wraps(func) + def retry_wrapped_func(*args, **kwargs): + """A wrapper that calls target function with retry.""" + target = functools.partial(func, *args, **kwargs) + sleep_generator = exponential_sleep_generator( + self._initial, self._maximum, multiplier=self._multiplier) + return retry_target( + target, + self._predicate, + sleep_generator, + self._deadline) + + return retry_wrapped_func + + def with_deadline(self, deadline): + """Return a copy of this retry with the given deadline. + + Args: + deadline (float): How long to keep retrying. + + Returns: + Retry: A new retry instance with the given deadline. + """ + return Retry( + predicate=self._predicate, + initial=self._initial, + maximum=self._maximum, + multiplier=self._multiplier, + deadline=deadline) + + def with_predicate(self, predicate): + """Return a copy of this retry with the given predicate. + + Args: + predicate (Callable[Exception]): A callable that should return + ``True`` if the given exception is retryable. + + Returns: + Retry: A new retry instance with the given predicate. + """ + return Retry( + predicate=predicate, + initial=self._initial, + maximum=self._maximum, + multiplier=self._multiplier, + deadline=self._deadline) + + def with_delay( + self, initial=None, maximum=None, multiplier=None): + """Return a copy of this retry with the given delay options. + + Args: + initial (float): The minimum about of time to delay. This must + be greater than 0. + maximum (float): The maximum about of time to delay. + multiplier (float): The multiplier applied to the delay. + + Returns: + Retry: A new retry instance with the given predicate. + """ + return Retry( + predicate=self._predicate, + initial=initial if initial is not None else self._initial, + maximum=maximum if maximum is not None else self._maximum, + multiplier=multiplier if maximum is not None else self._multiplier, + deadline=self._deadline) + + def __str__(self): + return ( + ''.format( + self._predicate, self._initial, self._maximum, + self._multiplier, self._deadline)) diff --git a/packages/google-cloud-core/setup.py b/packages/google-cloud-core/setup.py index 96d7567b9de6..c45f7dd24ac2 100644 --- a/packages/google-cloud-core/setup.py +++ b/packages/google-cloud-core/setup.py @@ -60,6 +60,10 @@ 'tenacity >= 4.0.0, <5.0.0dev' ] +EXTRAS_REQUIREMENTS = { + ':python_version<"3.2"': ['futures >= 3.0.0'], +} + setup( name='google-cloud-core', version='0.26.0', @@ -72,5 +76,6 @@ ], packages=find_packages(exclude=('tests*',)), install_requires=REQUIREMENTS, + extras_require=EXTRAS_REQUIREMENTS, **SETUP_BASE ) diff --git a/packages/google-cloud-core/tests/unit/api_core/test_retry.py b/packages/google-cloud-core/tests/unit/api_core/test_retry.py index 5ad5612482dc..71569137b94f 100644 --- a/packages/google-cloud-core/tests/unit/api_core/test_retry.py +++ b/packages/google-cloud-core/tests/unit/api_core/test_retry.py @@ -14,6 +14,7 @@ import datetime import itertools +import re import mock import pytest @@ -43,29 +44,22 @@ def test_if_transient_error(): assert not retry.if_transient_error(exceptions.InvalidArgument('')) -def test_exponential_sleep_generator_base_2(): +# Make uniform return half of its maximum, which will be the calculated +# sleep time. +@mock.patch('random.uniform', autospec=True, side_effect=lambda m, n: n/2.0) +def test_exponential_sleep_generator_base_2(uniform): gen = retry.exponential_sleep_generator( - 1, 60, 2, jitter=0.0) + 1, 60, multiplier=2) result = list(itertools.islice(gen, 8)) assert result == [1, 2, 4, 8, 16, 32, 60, 60] -@mock.patch('random.uniform') -def test_exponential_sleep_generator_jitter(uniform): - uniform.return_value = 1 - gen = retry.exponential_sleep_generator( - 1, 60, 2, jitter=2.2) - - result = list(itertools.islice(gen, 7)) - assert result == [1, 3, 7, 15, 31, 60, 60] - uniform.assert_called_with(0.0, 2.2) - - -@mock.patch('time.sleep') +@mock.patch('time.sleep', autospec=True) @mock.patch( 'google.api.core.helpers.datetime_helpers.utcnow', - return_value=datetime.datetime.min) + return_value=datetime.datetime.min, + autospec=True) def test_retry_target_success(utcnow, sleep): predicate = retry.if_exception_type(ValueError) call_count = [0] @@ -83,10 +77,11 @@ def target(): sleep.assert_has_calls([mock.call(0), mock.call(1)]) -@mock.patch('time.sleep') +@mock.patch('time.sleep', autospec=True) @mock.patch( 'google.api.core.helpers.datetime_helpers.utcnow', - return_value=datetime.datetime.min) + return_value=datetime.datetime.min, + autospec=True) def test_retry_target_non_retryable_error(utcnow, sleep): predicate = retry.if_exception_type(ValueError) exception = TypeError() @@ -99,9 +94,9 @@ def test_retry_target_non_retryable_error(utcnow, sleep): sleep.assert_not_called() -@mock.patch('time.sleep') +@mock.patch('time.sleep', autospec=True) @mock.patch( - 'google.api.core.helpers.datetime_helpers.utcnow') + 'google.api.core.helpers.datetime_helpers.utcnow', autospec=True) def test_retry_target_deadline_exceeded(utcnow, sleep): predicate = retry.if_exception_type(ValueError) exception = ValueError('meep') @@ -127,3 +122,99 @@ def test_retry_target_bad_sleep_generator(): with pytest.raises(ValueError, match='Sleep generator'): retry.retry_target( mock.sentinel.target, mock.sentinel.predicate, [], None) + + +class TestRetry(object): + def test_constructor_defaults(self): + retry_ = retry.Retry() + assert retry_._predicate == retry.if_transient_error + assert retry_._initial == 1 + assert retry_._maximum == 60 + assert retry_._multiplier == 2 + assert retry_._deadline == 120 + + def test_constructor_options(self): + retry_ = retry.Retry( + predicate=mock.sentinel.predicate, + initial=1, + maximum=2, + multiplier=3, + deadline=4) + assert retry_._predicate == mock.sentinel.predicate + assert retry_._initial == 1 + assert retry_._maximum == 2 + assert retry_._multiplier == 3 + assert retry_._deadline == 4 + + def test_with_deadline(self): + retry_ = retry.Retry() + new_retry = retry_.with_deadline(42) + assert retry_ is not new_retry + assert new_retry._deadline == 42 + + def test_with_predicate(self): + retry_ = retry.Retry() + new_retry = retry_.with_predicate(mock.sentinel.predicate) + assert retry_ is not new_retry + assert new_retry._predicate == mock.sentinel.predicate + + def test_with_delay_noop(self): + retry_ = retry.Retry() + new_retry = retry_.with_delay() + assert retry_ is not new_retry + assert new_retry._initial == retry_._initial + assert new_retry._maximum == retry_._maximum + assert new_retry._multiplier == retry_._multiplier + + def test_with_delay(self): + retry_ = retry.Retry() + new_retry = retry_.with_delay( + initial=1, maximum=2, multiplier=3) + assert retry_ is not new_retry + assert new_retry._initial == 1 + assert new_retry._maximum == 2 + assert new_retry._multiplier == 3 + + def test___str__(self): + retry_ = retry.Retry() + assert re.match(( + r', ' + r'initial=1.0, maximum=60.0, multiplier=2.0, deadline=120.0>'), + str(retry_)) + + @mock.patch('time.sleep', autospec=True) + def test___call___and_execute_success(self, sleep): + retry_ = retry.Retry() + target = mock.Mock(spec=['__call__'], return_value=42) + # __name__ is needed by functools.partial. + target.__name__ = 'target' + + decorated = retry_(target) + target.assert_not_called() + + result = decorated('meep') + + assert result == 42 + target.assert_called_once_with('meep') + sleep.assert_not_called() + + # Make uniform return half of its maximum, which will be the calculated + # sleep time. + @mock.patch( + 'random.uniform', autospec=True, side_effect=lambda m, n: n/2.0) + @mock.patch('time.sleep', autospec=True) + def test___call___and_execute_retry(self, sleep, uniform): + retry_ = retry.Retry(predicate=retry.if_exception_type(ValueError)) + target = mock.Mock(spec=['__call__'], side_effect=[ValueError(), 42]) + # __name__ is needed by functools.partial. + target.__name__ = 'target' + + decorated = retry_(target) + target.assert_not_called() + + result = decorated('meep') + + assert result == 42 + assert target.call_count == 2 + target.assert_has_calls([mock.call('meep'), mock.call('meep')]) + sleep.assert_called_once_with(retry_._initial) From 31a9b6b43df92bdc2782f942a2c42b7c1045dab5 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Fri, 18 Aug 2017 09:01:30 -0700 Subject: [PATCH 180/468] Drop tenacity dependency; use google.api.core.retry in google.api.core.future (#3837) --- .../google/api/core/future/polling.py | 46 ++++++++----------- packages/google-cloud-core/setup.py | 1 - 2 files changed, 20 insertions(+), 27 deletions(-) diff --git a/packages/google-cloud-core/google/api/core/future/polling.py b/packages/google-cloud-core/google/api/core/future/polling.py index 40380d6ad938..9e3d07e7128f 100644 --- a/packages/google-cloud-core/google/api/core/future/polling.py +++ b/packages/google-cloud-core/google/api/core/future/polling.py @@ -16,16 +16,18 @@ import abc import concurrent.futures -import functools -import operator - -import six -import tenacity +from google.api.core import exceptions +from google.api.core import retry from google.api.core.future import _helpers from google.api.core.future import base +class _OperationNotComplete(Exception): + """Private exception used for polling via retry.""" + pass + + class PollingFuture(base.Future): """A Future that needs to poll some service to check its status. @@ -55,6 +57,11 @@ def done(self): # pylint: disable=redundant-returns-doc, missing-raises-doc raise NotImplementedError() + def _done_or_raise(self): + """Check if the future is done and raise if it's not.""" + if not self.done(): + raise _OperationNotComplete() + def running(self): """True if the operation is currently running.""" return not self.done() @@ -69,29 +76,16 @@ def _blocking_poll(self, timeout=None): if self._result_set: return - retry_on = tenacity.retry_if_result( - functools.partial(operator.is_not, True)) - # Use exponential backoff with jitter. - wait_on = ( - tenacity.wait_exponential(multiplier=1, max=10) + - tenacity.wait_random(0, 1)) - - if timeout is None: - retry = tenacity.retry(retry=retry_on, wait=wait_on) - else: - retry = tenacity.retry( - retry=retry_on, - wait=wait_on, - stop=tenacity.stop_after_delay(timeout)) + retry_ = retry.Retry( + predicate=retry.if_exception_type(_OperationNotComplete), + deadline=timeout) try: - retry(self.done)() - except tenacity.RetryError as exc: - six.raise_from( - concurrent.futures.TimeoutError( - 'Operation did not complete within the designated ' - 'timeout.'), - exc) + retry_(self._done_or_raise)() + except exceptions.RetryError: + raise concurrent.futures.TimeoutError( + 'Operation did not complete within the designated ' + 'timeout.') def result(self, timeout=None): """Get the result of the operation, blocking if necessary. diff --git a/packages/google-cloud-core/setup.py b/packages/google-cloud-core/setup.py index c45f7dd24ac2..6adacb0e6c1b 100644 --- a/packages/google-cloud-core/setup.py +++ b/packages/google-cloud-core/setup.py @@ -57,7 +57,6 @@ 'requests >= 2.18.0, < 3.0.0dev', 'setuptools >= 34.0.0', 'six', - 'tenacity >= 4.0.0, <5.0.0dev' ] EXTRAS_REQUIREMENTS = { From f2aa180ff264fd5c68c99f3935002a4dc2c81bd7 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Fri, 18 Aug 2017 13:40:31 -0700 Subject: [PATCH 181/468] Add google.api.page_iterator.GRPCIterator (#3843) --- .../google/api/core/page_iterator.py | 87 +++++++++++++++++++ .../tests/unit/api_core/test_page_iterator.py | 84 ++++++++++++++++++ 2 files changed, 171 insertions(+) diff --git a/packages/google-cloud-core/google/api/core/page_iterator.py b/packages/google-cloud-core/google/api/core/page_iterator.py index 147c9f47e35a..23c469f9bc1d 100644 --- a/packages/google-cloud-core/google/api/core/page_iterator.py +++ b/packages/google-cloud-core/google/api/core/page_iterator.py @@ -423,3 +423,90 @@ def _next_page(self): return page except StopIteration: return None + + +class GRPCIterator(Iterator): + """A generic class for iterating through gRPC list responses. + + .. note:: The class does not take a ``page_token`` argument because it can + just be specified in the ``request``. + + Args: + client (google.cloud.client.Client): The API client. This unused by + this class, but kept to satisfy the :class:`Iterator` interface. + method (Callable[protobuf.Message]): A bound gRPC method that should + take a single message for the request. + request (protobuf.Message): The request message. + items_field (str): The field in the response message that has the + items for the page. + item_to_value (Callable[Iterator, Any]): Callable to convert an item + from the type in the JSON response into a native object. Will + be called with the iterator and a single item. + request_token_field (str): The field in the request message used to + specify the page token. + response_token_field (str): The field in the response message that has + the token for the next page. + max_results (int): The maximum number of results to fetch. + + .. autoattribute:: pages + """ + + _DEFAULT_REQUEST_TOKEN_FIELD = 'page_token' + _DEFAULT_RESPONSE_TOKEN_FIELD = 'next_page_token' + + def __init__( + self, + client, + method, + request, + items_field, + item_to_value=_item_to_value_identity, + request_token_field=_DEFAULT_REQUEST_TOKEN_FIELD, + response_token_field=_DEFAULT_RESPONSE_TOKEN_FIELD, + max_results=None): + super(GRPCIterator, self).__init__( + client, item_to_value, max_results=max_results) + self._method = method + self._request = request + self._items_field = items_field + self._request_token_field = request_token_field + self._response_token_field = response_token_field + + def _next_page(self): + """Get the next page in the iterator. + + Returns: + Page: The next page in the iterator or :data:`None` if there are no + pages left. + """ + if not self._has_next_page(): + return None + + if self.next_page_token is not None: + setattr( + self._request, self._request_token_field, self.next_page_token) + + response = self._method(self._request) + + self.next_page_token = getattr(response, self._response_token_field) + items = getattr(response, self._items_field) + page = Page(self, items, self._item_to_value) + + return page + + def _has_next_page(self): + """Determines whether or not there are more pages with results. + + Returns: + bool: Whether the iterator has more pages. + """ + if self.page_number == 0: + return True + + if self.max_results is not None: + if self.num_results >= self.max_results: + return False + + # Note: intentionally a falsy check instead of a None check. The RPC + # can return an empty string indicating no more pages. + return True if self.next_page_token else False diff --git a/packages/google-cloud-core/tests/unit/api_core/test_page_iterator.py b/packages/google-cloud-core/tests/unit/api_core/test_page_iterator.py index 82466579e37b..541e60a61ffd 100644 --- a/packages/google-cloud-core/tests/unit/api_core/test_page_iterator.py +++ b/packages/google-cloud-core/tests/unit/api_core/test_page_iterator.py @@ -408,6 +408,90 @@ def test__get_next_page_bad_http_method(self): iterator._get_next_page_response() +class TestGRPCIterator(object): + + def test_constructor(self): + client = mock.sentinel.client + items_field = 'items' + iterator = page_iterator.GRPCIterator( + client, mock.sentinel.method, mock.sentinel.request, items_field) + + assert not iterator._started + assert iterator.client is client + assert iterator.max_results is None + assert iterator._method == mock.sentinel.method + assert iterator._request == mock.sentinel.request + assert iterator._items_field == items_field + assert iterator._item_to_value is page_iterator._item_to_value_identity + assert (iterator._request_token_field == + page_iterator.GRPCIterator._DEFAULT_REQUEST_TOKEN_FIELD) + assert (iterator._response_token_field == + page_iterator.GRPCIterator._DEFAULT_RESPONSE_TOKEN_FIELD) + # Changing attributes. + assert iterator.page_number == 0 + assert iterator.next_page_token is None + assert iterator.num_results == 0 + + def test_constructor_options(self): + client = mock.sentinel.client + items_field = 'items' + request_field = 'request' + response_field = 'response' + iterator = page_iterator.GRPCIterator( + client, mock.sentinel.method, mock.sentinel.request, items_field, + item_to_value=mock.sentinel.item_to_value, + request_token_field=request_field, + response_token_field=response_field, + max_results=42) + + assert iterator.client is client + assert iterator.max_results == 42 + assert iterator._method == mock.sentinel.method + assert iterator._request == mock.sentinel.request + assert iterator._items_field == items_field + assert iterator._item_to_value is mock.sentinel.item_to_value + assert iterator._request_token_field == request_field + assert iterator._response_token_field == response_field + + def test_iterate(self): + request = mock.Mock(spec=['page_token'], page_token=None) + response1 = mock.Mock(items=['a', 'b'], next_page_token='1') + response2 = mock.Mock(items=['c'], next_page_token='2') + response3 = mock.Mock(items=['d'], next_page_token='') + method = mock.Mock(side_effect=[response1, response2, response3]) + iterator = page_iterator.GRPCIterator( + mock.sentinel.client, method, request, 'items') + + assert iterator.num_results == 0 + + items = list(iterator) + assert items == ['a', 'b', 'c', 'd'] + + method.assert_called_with(request) + assert method.call_count == 3 + assert request.page_token == '2' + + def test_iterate_with_max_results(self): + request = mock.Mock(spec=['page_token'], page_token=None) + response1 = mock.Mock(items=['a', 'b'], next_page_token='1') + response2 = mock.Mock(items=['c'], next_page_token='2') + response3 = mock.Mock(items=['d'], next_page_token='') + method = mock.Mock(side_effect=[response1, response2, response3]) + iterator = page_iterator.GRPCIterator( + mock.sentinel.client, method, request, 'items', max_results=3) + + assert iterator.num_results == 0 + + items = list(iterator) + + assert items == ['a', 'b', 'c'] + assert iterator.num_results == 3 + + method.assert_called_with(request) + assert method.call_count == 2 + assert request.page_token is '1' + + class GAXPageIterator(object): """Fake object that matches gax.PageIterator""" def __init__(self, pages, page_token=None): From 182e5bde0e651592ce0e490591b83bf32dd3a068 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Mon, 21 Aug 2017 12:11:12 -0700 Subject: [PATCH 182/468] Allow fetching more than the first page when max_results is set. (#3845) * BigQuery: reproduce error fetching multiple results with DB-API. Add a system test to call `fetchall()` when multiple rows are expected. * BigQuery: system test to reproduce error of only fetching first page. This error applies to all BigQuery iterators, not just DB-API. * BigQuery: allow arraysize to be set after execute() It was allowed before, but it didn't result in the correct behavior. * max_results in BigQuery API had a different meaning from HTTPIterator. In BigQuery it means the page size, but the HTTPIterator it meant "don't fetch any more pages once you have these many rows." * Fix lint errors --- .../google-cloud-core/google/api/core/page_iterator.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-core/google/api/core/page_iterator.py b/packages/google-cloud-core/google/api/core/page_iterator.py index 23c469f9bc1d..3a38c100cd95 100644 --- a/packages/google-cloud-core/google/api/core/page_iterator.py +++ b/packages/google-cloud-core/google/api/core/page_iterator.py @@ -275,6 +275,8 @@ class HTTPIterator(Iterator): signature takes the :class:`Iterator` that started the page, the :class:`Page` that was started and the dictionary containing the page response. + next_token (str): The name of the field used in the response for page + tokens. .. autoattribute:: pages """ @@ -283,13 +285,13 @@ class HTTPIterator(Iterator): _PAGE_TOKEN = 'pageToken' _MAX_RESULTS = 'maxResults' _NEXT_TOKEN = 'nextPageToken' - _RESERVED_PARAMS = frozenset([_PAGE_TOKEN, _MAX_RESULTS]) + _RESERVED_PARAMS = frozenset([_PAGE_TOKEN]) _HTTP_METHOD = 'GET' def __init__(self, client, api_request, path, item_to_value, items_key=_DEFAULT_ITEMS_KEY, page_token=None, max_results=None, extra_params=None, - page_start=_do_nothing_page_start): + page_start=_do_nothing_page_start, next_token=_NEXT_TOKEN): super(HTTPIterator, self).__init__( client, item_to_value, page_token=page_token, max_results=max_results) @@ -298,6 +300,7 @@ def __init__(self, client, api_request, path, item_to_value, self._items_key = items_key self.extra_params = extra_params self._page_start = page_start + self._next_token = next_token # Verify inputs / provide defaults. if self.extra_params is None: self.extra_params = {} @@ -327,7 +330,7 @@ def _next_page(self): items = response.get(self._items_key, ()) page = Page(self, items, self._item_to_value) self._page_start(self, page, response) - self.next_page_token = response.get(self._NEXT_TOKEN) + self.next_page_token = response.get(self._next_token) return page else: return None From c4b321f6acbaf73a413d7eff2327d07392dae3c6 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 23 Aug 2017 14:29:48 -0700 Subject: [PATCH 183/468] Add google.api.core.path_template (#3851) --- .../google/api/core/path_template.py | 198 ++++++++++++++++++ .../tests/unit/api_core/test_path_template.py | 90 ++++++++ 2 files changed, 288 insertions(+) create mode 100644 packages/google-cloud-core/google/api/core/path_template.py create mode 100644 packages/google-cloud-core/tests/unit/api_core/test_path_template.py diff --git a/packages/google-cloud-core/google/api/core/path_template.py b/packages/google-cloud-core/google/api/core/path_template.py new file mode 100644 index 000000000000..e1cfae360950 --- /dev/null +++ b/packages/google-cloud-core/google/api/core/path_template.py @@ -0,0 +1,198 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Expand and validate URL path templates. + +This module provides the :func:`expand` and :func:`validate` functions for +interacting with Google-style URL `path templates`_ which are commonly used +in Google APIs for `resource names`_. + +.. _path templates: https://github.com/googleapis/googleapis/blob + /57e2d376ac7ef48681554204a3ba78a414f2c533/google/api/http.proto#L212 +.. _resource names: https://cloud.google.com/apis/design/resource_names +""" + +from __future__ import unicode_literals + +import functools +import re + +import six + +# Regular expression for extracting variable parts from a path template. +# The variables can be expressed as: +# +# - "*": a single-segment positional variable, for example: "books/*" +# - "**": a multi-segment positional variable, for example: "shelf/**/book/*" +# - "{name}": a single-segment wildcard named variable, for example +# "books/{name}" +# - "{name=*}: same as above. +# - "{name=**}": a multi-segment wildcard named variable, for example +# "shelf/{name=**}" +# - "{name=/path/*/**}": a multi-segment named variable with a sub-template. +_VARIABLE_RE = re.compile(r""" + ( # Capture the entire variable expression + (?P\*\*?) # Match & capture * and ** positional variables. + | + # Match & capture named variables {name} + { + (?P[^/]+?) + # Optionally match and capture the named variable's template. + (?:=(?P